diff --git a/go.mod b/go.mod
index 2c5ecfd8ce..7de33c60fc 100644
--- a/go.mod
+++ b/go.mod
@@ -12,14 +12,14 @@ require (
github.com/checkpoint-restore/go-criu/v7 v7.2.0
github.com/containernetworking/plugins v1.7.1
github.com/containers/buildah v1.40.1-0.20250604193037-b8d8cc375f30
- github.com/containers/common v0.63.2-0.20250624163146-1bc9d1737003
+ github.com/containers/common v0.63.2-0.20250627125909-bed7a8b142a3
github.com/containers/conmon v2.0.20+incompatible
github.com/containers/gvisor-tap-vsock v0.8.6
- github.com/containers/image/v5 v5.35.1-0.20250603145948-347a6e7283ef
+ github.com/containers/image/v5 v5.35.1-0.20250627132650-84bfe041dbcc
github.com/containers/libhvee v0.10.0
github.com/containers/ocicrypt v1.2.1
github.com/containers/psgo v1.9.0
- github.com/containers/storage v1.58.1-0.20250515004000-78f4258b2bd9
+ github.com/containers/storage v1.58.1-0.20250625164029-83650abfa8de
github.com/containers/winquit v1.1.0
github.com/coreos/go-systemd/v22 v22.5.1-0.20231103132048-7d375ecc2b09
github.com/crc-org/vfkit v0.6.1
@@ -67,7 +67,7 @@ require (
github.com/stretchr/testify v1.10.0
github.com/vbauerster/mpb/v8 v8.10.2
github.com/vishvananda/netlink v1.3.1
- go.etcd.io/bbolt v1.4.1
+ go.etcd.io/bbolt v1.4.2
golang.org/x/crypto v0.39.0
golang.org/x/net v0.41.0
golang.org/x/sync v0.15.0
@@ -77,7 +77,7 @@ require (
google.golang.org/protobuf v1.36.6
gopkg.in/inf.v0 v0.9.1
gopkg.in/yaml.v3 v3.0.1
- sigs.k8s.io/yaml v1.4.0
+ sigs.k8s.io/yaml v1.5.0
tags.cncf.io/container-device-interface v1.0.1
)
@@ -89,7 +89,6 @@ require (
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/aead/serpent v0.0.0-20160714141033-fba169763ea6 // indirect
- github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/chzyer/readline v1.5.1 // indirect
github.com/containerd/cgroups/v3 v3.0.5 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
@@ -117,18 +116,7 @@ require (
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
- github.com/go-openapi/analysis v0.23.0 // indirect
- github.com/go-openapi/errors v0.22.1 // indirect
- github.com/go-openapi/jsonpointer v0.21.1 // indirect
- github.com/go-openapi/jsonreference v0.21.0 // indirect
- github.com/go-openapi/loads v0.22.0 // indirect
- github.com/go-openapi/runtime v0.28.0 // indirect
- github.com/go-openapi/spec v0.21.0 // indirect
- github.com/go-openapi/strfmt v0.23.1-0.20250509134642-64a09ef0e084 // indirect
- github.com/go-openapi/swag v0.23.1 // indirect
- github.com/go-openapi/validate v0.24.0 // indirect
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
- github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/golang/protobuf v1.5.4 // indirect
@@ -138,15 +126,13 @@ require (
github.com/google/pprof v0.0.0-20250403155104-27863c87afa6 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
- github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
+ github.com/hashicorp/go-retryablehttp v0.7.8 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jinzhu/copier v0.4.0 // indirect
- github.com/josharian/intern v1.0.0 // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/kr/fs v0.1.0 // indirect
github.com/letsencrypt/boulder v0.0.0-20240620165639-de9c06129bec // indirect
github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 // indirect
- github.com/mailru/easyjson v0.9.0 // indirect
github.com/manifoldco/promptui v0.9.0 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mdlayher/socket v0.5.1 // indirect
@@ -161,9 +147,7 @@ require (
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/morikuni/aec v1.0.0 // indirect
- github.com/oklog/ulid v1.3.1 // indirect
github.com/opencontainers/runc v1.3.0 // indirect
- github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pkg/sftp v1.13.9 // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
@@ -176,8 +160,7 @@ require (
github.com/segmentio/ksuid v1.0.4 // indirect
github.com/sigstore/fulcio v1.6.6 // indirect
github.com/sigstore/protobuf-specs v0.4.1 // indirect
- github.com/sigstore/rekor v1.3.10 // indirect
- github.com/sigstore/sigstore v1.9.4 // indirect
+ github.com/sigstore/sigstore v1.9.5 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect
github.com/smallstep/pkcs7 v0.1.1 // indirect
@@ -192,21 +175,21 @@ require (
github.com/vbatts/tar-split v0.12.1 // indirect
github.com/vishvananda/netns v0.0.5 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
- go.mongodb.org/mongo-driver v1.17.3 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
- go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 // indirect
- go.opentelemetry.io/otel v1.34.0 // indirect
- go.opentelemetry.io/otel/metric v1.34.0 // indirect
- go.opentelemetry.io/otel/trace v1.34.0 // indirect
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect
+ go.opentelemetry.io/otel v1.35.0 // indirect
+ go.opentelemetry.io/otel/metric v1.35.0 // indirect
+ go.opentelemetry.io/otel/trace v1.35.0 // indirect
go.uber.org/automaxprocs v1.6.0 // indirect
+ go.yaml.in/yaml/v2 v2.4.2 // indirect
golang.org/x/mod v0.25.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.33.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 // indirect
- google.golang.org/grpc v1.71.0 // indirect
+ google.golang.org/grpc v1.72.2 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
tags.cncf.io/container-device-interface/specs-go v1.0.0 // indirect
)
diff --git a/go.sum b/go.sum
index 8e591fee15..4bbd23e8b8 100644
--- a/go.sum
+++ b/go.sum
@@ -20,8 +20,6 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/aead/serpent v0.0.0-20160714141033-fba169763ea6 h1:5L8Mj9Co9sJVgW3TpYk2gxGJnDjsYuboNTcRmbtGKGs=
github.com/aead/serpent v0.0.0-20160714141033-fba169763ea6/go.mod h1:3HgLJ9d18kXMLQlJvIY3+FszZYMxCz8WfE2MQ7hDY0w=
-github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
-github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
@@ -66,14 +64,14 @@ github.com/containernetworking/plugins v1.7.1 h1:CNAR0jviDj6FS5Vg85NTgKWLDzZPfi/
github.com/containernetworking/plugins v1.7.1/go.mod h1:xuMdjuio+a1oVQsHKjr/mgzuZ24leAsqUYRnzGoXHy0=
github.com/containers/buildah v1.40.1-0.20250604193037-b8d8cc375f30 h1:kCt0fnVBvXY9J98pUDeUc0gHKrhRwaBTWWD3otLutCE=
github.com/containers/buildah v1.40.1-0.20250604193037-b8d8cc375f30/go.mod h1:QDecwvjrr+e0VD5GYv2dw7tsiqrz673r8B4rIYFP11Y=
-github.com/containers/common v0.63.2-0.20250624163146-1bc9d1737003 h1:Nk8VZ9Ht7/HnYveikzd8RqNSPphbh358Chmt/GyPeWI=
-github.com/containers/common v0.63.2-0.20250624163146-1bc9d1737003/go.mod h1:mQkSk7VxbvgOo3vLE7yy6spgWNg8Ni0Zytt8HpmRKRw=
+github.com/containers/common v0.63.2-0.20250627125909-bed7a8b142a3 h1:rdAIp2BR1tzfLhgf+CW6ERkUj1TDghUoBTf79+mYRS8=
+github.com/containers/common v0.63.2-0.20250627125909-bed7a8b142a3/go.mod h1:gjzev1MLeaf3myYCfkeptujct/QKi1mJuzebRdwIul0=
github.com/containers/conmon v2.0.20+incompatible h1:YbCVSFSCqFjjVwHTPINGdMX1F6JXHGTUje2ZYobNrkg=
github.com/containers/conmon v2.0.20+incompatible/go.mod h1:hgwZ2mtuDrppv78a/cOBNiCm6O0UMWGx1mu7P00nu5I=
github.com/containers/gvisor-tap-vsock v0.8.6 h1:9SeAXK+K2o36CtrgYk6zRXbU3zrayjvkrI8b7/O6u5A=
github.com/containers/gvisor-tap-vsock v0.8.6/go.mod h1:+0mtKmm4STeSDnZe+DGnIwN4EH2f7AcWir7PwT28Ti0=
-github.com/containers/image/v5 v5.35.1-0.20250603145948-347a6e7283ef h1:sXXyXq3r6nJtwAPx+vnzhakShOM1KJBUpT5e/tZ3zto=
-github.com/containers/image/v5 v5.35.1-0.20250603145948-347a6e7283ef/go.mod h1:tOeAv2LI5fS7gsLlBMhIx46WeiBvvBOwjM4kadtziGQ=
+github.com/containers/image/v5 v5.35.1-0.20250627132650-84bfe041dbcc h1:mJ/fftOAdKuRp8hPlToXYXBDZBejK6kpIS45EoSDHEk=
+github.com/containers/image/v5 v5.35.1-0.20250627132650-84bfe041dbcc/go.mod h1:XwvhNxeeqenT547u5y3haPiLr9wuRCH2TCcjkETRtBI=
github.com/containers/libhvee v0.10.0 h1:7VLv8keWZpHuGmWvyY4c1mVH5V1JYb1G78VC+8AlrM0=
github.com/containers/libhvee v0.10.0/go.mod h1:at0h8lRcK5jCKfQgU/e6Io0Mw12F36zRLjXVOXRoDTM=
github.com/containers/libtrust v0.0.0-20230121012942-c1716e8a8d01 h1:Qzk5C6cYglewc+UyGf6lc8Mj2UaPTHy/iF2De0/77CA=
@@ -84,8 +82,8 @@ github.com/containers/ocicrypt v1.2.1 h1:0qIOTT9DoYwcKmxSt8QJt+VzMY18onl9jUXsxpV
github.com/containers/ocicrypt v1.2.1/go.mod h1:aD0AAqfMp0MtwqWgHM1bUwe1anx0VazI108CRrSKINQ=
github.com/containers/psgo v1.9.0 h1:eJ74jzSaCHnWt26OlKZROSyUyRcGDf+gYBdXnxrMW4g=
github.com/containers/psgo v1.9.0/go.mod h1:0YoluUm43Mz2UnBIh1P+6V6NWcbpTL5uRtXyOcH0B5A=
-github.com/containers/storage v1.58.1-0.20250515004000-78f4258b2bd9 h1:7pLGfniIOBvFoe4hzLpV+DWt1hHWaQPiBQR71ftOpCU=
-github.com/containers/storage v1.58.1-0.20250515004000-78f4258b2bd9/go.mod h1:exWY15dYuRjIG2nfv2/Z1TinvEYub582shEGGr6uawY=
+github.com/containers/storage v1.58.1-0.20250625164029-83650abfa8de h1:Mlc8/KWeMJwmZ1ZXczZttOu3DvR9tyZxXxkPk796mFI=
+github.com/containers/storage v1.58.1-0.20250625164029-83650abfa8de/go.mod h1:bgjPVDYMdJGvghV2oBHeAjha5QkgldLuV3qqUu6fpmk=
github.com/containers/winquit v1.1.0 h1:jArun04BNDQvt2W0Y78kh9TazN2EIEMG5Im6/JY7+pE=
github.com/containers/winquit v1.1.0/go.mod h1:PsPeZlnbkmGGIToMPHF1zhWjBUkd8aHjMOr/vFcPxw8=
github.com/coreos/go-oidc/v3 v3.14.1 h1:9ePWwfdwC4QKRlCXsJGou56adA/owXczOzwKdOumLqk=
@@ -115,8 +113,8 @@ github.com/disiqueira/gotree/v3 v3.0.2 h1:ik5iuLQQoufZBNPY518dXhiO5056hyNBIK9lWh
github.com/disiqueira/gotree/v3 v3.0.2/go.mod h1:ZuyjE4+mUQZlbpkI24AmruZKhg3VHEgPLDY8Qk+uUu8=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
-github.com/docker/cli v28.2.2+incompatible h1:qzx5BNUDFqlvyq4AHzdNB7gSyVTmU4cgsyN9SdInc1A=
-github.com/docker/cli v28.2.2+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
+github.com/docker/cli v28.3.0+incompatible h1:s+ttruVLhB5ayeuf2BciwDVxYdKi+RoUlxmwNHV3Vfo=
+github.com/docker/cli v28.3.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v28.3.0+incompatible h1:ffS62aKWupCWdvcee7nBU9fhnmknOqDPaJAMtfK0ImQ=
@@ -158,34 +156,12 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
-github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU=
-github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo=
-github.com/go-openapi/errors v0.22.1 h1:kslMRRnK7NCb/CvR1q1VWuEQCEIsBGn5GgKD9e+HYhU=
-github.com/go-openapi/errors v0.22.1/go.mod h1:+n/5UdIqdVnLIJ6Q9Se8HNGUXYaY6CN8ImWzfi/Gzp0=
-github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic=
-github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk=
-github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
-github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
-github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco=
-github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs=
-github.com/go-openapi/runtime v0.28.0 h1:gpPPmWSNGo214l6n8hzdXYhPuJcGtziTOgUpvsFWGIQ=
-github.com/go-openapi/runtime v0.28.0/go.mod h1:QN7OzcS+XuYmkQLw05akXk0jRH/eZ3kb18+1KwW9gyc=
-github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
-github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
-github.com/go-openapi/strfmt v0.23.1-0.20250509134642-64a09ef0e084 h1:PNIpnlKt8VYiQuxzI48nNqM3M0ZW+PPBMv/LTEQlNDo=
-github.com/go-openapi/strfmt v0.23.1-0.20250509134642-64a09ef0e084/go.mod h1:WHBPDONkZMEwENrJXFU37tIde3N8Q1lrlHSlXbF49LE=
-github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU=
-github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0=
-github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
-github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
github.com/go-rod/rod v0.116.2 h1:A5t2Ky2A+5eD/ZJQr1EfsQSe5rms5Xof/qj296e+ZqA=
github.com/go-rod/rod v0.116.2/go.mod h1:H+CMO9SCNc2TJ2WfrG+pKhITz57uGNYU43qYHh438Mg=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U=
github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
-github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
-github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/godbus/dbus/v5 v5.1.1-0.20241109141217-c266b19b28e9 h1:Kzr9J0S0V2PRxiX6B6xw1kWjzsIyjLO2Ibi4fNTaYBM=
github.com/godbus/dbus/v5 v5.1.1-0.20241109141217-c266b19b28e9/go.mod h1:3AAv2+hPq5rdnr5txxxRwiGjPXamgoIHgz9FPBfOp3c=
@@ -213,7 +189,6 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
@@ -249,8 +224,8 @@ github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB1
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
-github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
-github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
+github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48=
+github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw=
github.com/hugelgupf/p9 v0.3.1-0.20250420164440-abc96d20b308 h1:lw9bsUGBW4HRaDQA4h4d90DHhcdlMoNxx3bi4gYPSPw=
github.com/hugelgupf/p9 v0.3.1-0.20250420164440-abc96d20b308/go.mod h1:LoNwfBWP+QlCkjS1GFNylCthRIk/TkMZd6ICTbC+hrI=
github.com/hugelgupf/socketpair v0.0.0-20230822150718-707395b1939a h1:Nq7wDsqsVBUBfGn8yB1M028ShWTKTtZBcafaTJ35N0s=
@@ -265,8 +240,6 @@ github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jmhodges/clock v1.2.0 h1:eq4kys+NI0PLngzaHEe7AmPT90XMGIEySD1JfV1PDIs=
github.com/jmhodges/clock v1.2.0/go.mod h1:qKjhA7x7u/lQpPB1XAqX1b1lCI/w3/fNuYpI/ZjLynI=
-github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
-github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/josharian/native v1.1.0 h1:uuaP0hAbW7Y4l0ZRQ6C9zfb7Mg1mbFKry/xzDAfmtLA=
github.com/josharian/native v1.1.0/go.mod h1:7X/raswPFr05uY3HiLlYeyQntB6OO7E/d2Cu7qoaN2w=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
@@ -291,8 +264,6 @@ github.com/linuxkit/virtsock v0.0.0-20241009230534-cb6a20cc0422 h1:XvRuyDDRvi+UD
github.com/linuxkit/virtsock v0.0.0-20241009230534-cb6a20cc0422/go.mod h1:JLgfq4XMVbvfNlAXla/41lZnp21O72a/wWHGJefAvgQ=
github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 h1:7UMa6KCCMjZEMDtTVdcGu0B1GmmC7QJKiCCjyTAWQy0=
github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k=
-github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
-github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA=
github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
@@ -348,8 +319,6 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY=
github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc=
-github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
-github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo/v2 v2.23.4 h1:ktYTpKJAVZnDT4VjxSbiBenUjmlL/5QkBEocaWXiQus=
github.com/onsi/ginkgo/v2 v2.23.4/go.mod h1:Bt66ApGPBFzHyR+JO10Zbt0Gsp4uWxu5mIOTusL46e8=
github.com/onsi/gomega v1.37.0 h1:CdEG8g0S133B4OswTDC/5XPSzE1OeP29QOioj2PID2Y=
@@ -370,8 +339,6 @@ github.com/opencontainers/selinux v1.12.0 h1:6n5JV4Cf+4y0KNXW48TLj5DwfXpvWlxXplU
github.com/opencontainers/selinux v1.12.0/go.mod h1:BTPX+bjVbWGXw7ZZWUbdENt8w0htPSrlgOOysQaU62U=
github.com/openshift/imagebuilder v1.2.16 h1:Vqjy5uPoVDJiX5JUKHo0Cf440ih5cKI7lVe2ZJ2X+RA=
github.com/openshift/imagebuilder v1.2.16/go.mod h1:gASl6jikVG3bCFnLjG6Ow5TeKwKVvrqUUj8C7EUmqc8=
-github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
-github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@@ -389,8 +356,8 @@ github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
github.com/proglottis/gpgme v0.1.4 h1:3nE7YNA70o2aLjcg63tXMOhPD7bplfE5CBdV+hLAm2M=
github.com/proglottis/gpgme v0.1.4/go.mod h1:5LoXMgpE4bttgwwdv9bLs/vwqv3qV7F4glEEZ7mRKrM=
-github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk=
-github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg=
+github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
+github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
@@ -424,10 +391,8 @@ github.com/sigstore/fulcio v1.6.6 h1:XaMYX6TNT+8n7Npe8D94nyZ7/ERjEsNGFC+REdi/wzw
github.com/sigstore/fulcio v1.6.6/go.mod h1:BhQ22lwaebDgIxVBEYOOqLRcN5+xOV+C9bh/GUXRhOk=
github.com/sigstore/protobuf-specs v0.4.1 h1:5SsMqZbdkcO/DNHudaxuCUEjj6x29tS2Xby1BxGU7Zc=
github.com/sigstore/protobuf-specs v0.4.1/go.mod h1:+gXR+38nIa2oEupqDdzg4qSBT0Os+sP7oYv6alWewWc=
-github.com/sigstore/rekor v1.3.10 h1:/mSvRo4MZ/59ECIlARhyykAlQlkmeAQpvBPlmJtZOCU=
-github.com/sigstore/rekor v1.3.10/go.mod h1:JvryKJ40O0XA48MdzYUPu0y4fyvqt0C4iSY7ri9iu3A=
-github.com/sigstore/sigstore v1.9.4 h1:64+OGed80+A4mRlNzRd055vFcgBeDghjZw24rPLZgDU=
-github.com/sigstore/sigstore v1.9.4/go.mod h1:Q7tGTC3gbtK7c3jcxEmGc2MmK4rRpIRzi3bxRFWKvEY=
+github.com/sigstore/sigstore v1.9.5 h1:Wm1LT9yF4LhQdEMy5A2JeGRHTrAWGjT3ubE5JUSrGVU=
+github.com/sigstore/sigstore v1.9.5/go.mod h1:VtxgvGqCmEZN9X2zhFSOkfXxvKUjpy8RpUW39oCtoII=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
@@ -501,36 +466,36 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
-go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI=
-go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8=
-go.mongodb.org/mongo-driver v1.17.3 h1:TQyXhnsWfWtgAhMtOgtYHMTkZIfBTpMTsMnd9ZBeHxQ=
-go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
+go.etcd.io/bbolt v1.4.2 h1:IrUHp260R8c+zYx/Tm8QZr04CX+qWS5PGfPdevhdm1I=
+go.etcd.io/bbolt v1.4.2/go.mod h1:Is8rSHO/b4f3XigBC0lL0+4FwAQv3HXEEIgFMuKHceM=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I=
-go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
-go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfgF8E1rkrT1Z6meFh1NDtownE9Ii3n3X2GJYjsaU=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0=
-go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
-go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
-go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A=
-go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU=
-go.opentelemetry.io/otel/sdk/metric v1.34.0 h1:5CeK9ujjbFVL5c1PhLuStg1wxA7vQv7ce1EK0Gyvahk=
-go.opentelemetry.io/otel/sdk/metric v1.34.0/go.mod h1:jQ/r8Ze28zRKoNRdkjCZxfs6YvBTG1+YIqyFVFYec5w=
-go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
-go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
-go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg=
-go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ=
+go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
+go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0 h1:IJFEoHiytixx8cMiVAO+GmHR6Frwu+u5Ur8njpFO6Ac=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0/go.mod h1:3rHrKNtLIoS0oZwkY2vxi+oJcwFRWdtUyRII+so45p8=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0 h1:xJ2qHD0C1BeYVTLLR9sX12+Qb95kfeD/byKj6Ky1pXg=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0/go.mod h1:u5BF1xyjstDowA1R5QAO9JHzqK+ublenEW/dyqTjBVk=
+go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
+go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
+go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY=
+go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg=
+go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o=
+go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w=
+go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
+go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
+go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0=
+go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8=
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
-go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
-go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
+go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
+go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
+go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE=
+go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
@@ -679,8 +644,8 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
-google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
-google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
+google.golang.org/grpc v1.72.2 h1:TdbGzwb82ty4OusHWepvFWGLgIbNo1/SUynEN0ssqv8=
+google.golang.org/grpc v1.72.2/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -706,8 +671,8 @@ gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
-sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
+sigs.k8s.io/yaml v1.5.0 h1:M10b2U7aEUY6hRtU870n2VTPgR5RZiL/I6Lcc2F4NUQ=
+sigs.k8s.io/yaml v1.5.0/go.mod h1:wZs27Rbxoai4C0f8/9urLZtZtF3avA3gKvGyPdDqTO4=
src.elv.sh v0.16.0-rc1.0.20220116211855-fda62502ad7f h1:pjVeIo9Ba6K1Wy+rlwX91zT7A+xGEmxiNRBdN04gDTQ=
src.elv.sh v0.16.0-rc1.0.20220116211855-fda62502ad7f/go.mod h1:kPbhv5+fBeUh85nET3wWhHGUaUQ64nZMJ8FwA5v5Olg=
tags.cncf.io/container-device-interface v1.0.1 h1:KqQDr4vIlxwfYh0Ed/uJGVgX+CHAkahrgabg6Q8GYxc=
diff --git a/vendor/github.com/asaskevich/govalidator/.gitignore b/vendor/github.com/asaskevich/govalidator/.gitignore
deleted file mode 100644
index 8d69a9418a..0000000000
--- a/vendor/github.com/asaskevich/govalidator/.gitignore
+++ /dev/null
@@ -1,15 +0,0 @@
-bin/
-.idea/
-# Binaries for programs and plugins
-*.exe
-*.exe~
-*.dll
-*.so
-*.dylib
-
-# Test binary, built with `go test -c`
-*.test
-
-# Output of the go coverage tool, specifically when used with LiteIDE
-*.out
-
diff --git a/vendor/github.com/asaskevich/govalidator/.travis.yml b/vendor/github.com/asaskevich/govalidator/.travis.yml
deleted file mode 100644
index bb83c6670d..0000000000
--- a/vendor/github.com/asaskevich/govalidator/.travis.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-language: go
-dist: xenial
-go:
- - '1.10'
- - '1.11'
- - '1.12'
- - '1.13'
- - 'tip'
-
-script:
- - go test -coverpkg=./... -coverprofile=coverage.info -timeout=5s
- - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md b/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md
deleted file mode 100644
index 4b462b0d81..0000000000
--- a/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,43 +0,0 @@
-# Contributor Code of Conduct
-
-This project adheres to [The Code Manifesto](http://codemanifesto.com)
-as its guidelines for contributor interactions.
-
-## The Code Manifesto
-
-We want to work in an ecosystem that empowers developers to reach their
-potential — one that encourages growth and effective collaboration. A space
-that is safe for all.
-
-A space such as this benefits everyone that participates in it. It encourages
-new developers to enter our field. It is through discussion and collaboration
-that we grow, and through growth that we improve.
-
-In the effort to create such a place, we hold to these values:
-
-1. **Discrimination limits us.** This includes discrimination on the basis of
- race, gender, sexual orientation, gender identity, age, nationality,
- technology and any other arbitrary exclusion of a group of people.
-2. **Boundaries honor us.** Your comfort levels are not everyone’s comfort
- levels. Remember that, and if brought to your attention, heed it.
-3. **We are our biggest assets.** None of us were born masters of our trade.
- Each of us has been helped along the way. Return that favor, when and where
- you can.
-4. **We are resources for the future.** As an extension of #3, share what you
- know. Make yourself a resource to help those that come after you.
-5. **Respect defines us.** Treat others as you wish to be treated. Make your
- discussions, criticisms and debates from a position of respectfulness. Ask
- yourself, is it true? Is it necessary? Is it constructive? Anything less is
- unacceptable.
-6. **Reactions require grace.** Angry responses are valid, but abusive language
- and vindictive actions are toxic. When something happens that offends you,
- handle it assertively, but be respectful. Escalate reasonably, and try to
- allow the offender an opportunity to explain themselves, and possibly
- correct the issue.
-7. **Opinions are just that: opinions.** Each and every one of us, due to our
- background and upbringing, have varying opinions. That is perfectly
- acceptable. Remember this: if you respect your own opinions, you should
- respect the opinions of others.
-8. **To err is human.** You might not intend it, but mistakes do happen and
- contribute to build experience. Tolerate honest mistakes, and don't
- hesitate to apologize if you make one yourself.
diff --git a/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
deleted file mode 100644
index 7ed268a1ed..0000000000
--- a/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
+++ /dev/null
@@ -1,63 +0,0 @@
-#### Support
-If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
-
-#### What to contribute
-If you don't know what to do, there are some features and functions that need to be done
-
-- [ ] Refactor code
-- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
-- [ ] Create actual list of contributors and projects that currently using this package
-- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
-- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
-- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
-- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
-- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
-- [ ] Implement fuzzing testing
-- [ ] Implement some struct/map/array utilities
-- [ ] Implement map/array validation
-- [ ] Implement benchmarking
-- [ ] Implement batch of examples
-- [ ] Look at forks for new features and fixes
-
-#### Advice
-Feel free to create what you want, but keep in mind when you implement new features:
-- Code must be clear and readable, names of variables/constants clearly describes what they are doing
-- Public functions must be documented and described in source file and added to README.md to the list of available functions
-- There are must be unit-tests for any new functions and improvements
-
-## Financial contributions
-
-We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator).
-Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed.
-
-
-## Credits
-
-
-### Contributors
-
-Thank you to all the people who have already contributed to govalidator!
-
-
-
-### Backers
-
-Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)]
-
-
-
-
-### Sponsors
-
-Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor))
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/vendor/github.com/asaskevich/govalidator/LICENSE b/vendor/github.com/asaskevich/govalidator/LICENSE
deleted file mode 100644
index cacba91024..0000000000
--- a/vendor/github.com/asaskevich/govalidator/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014-2020 Alex Saskevich
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
\ No newline at end of file
diff --git a/vendor/github.com/asaskevich/govalidator/README.md b/vendor/github.com/asaskevich/govalidator/README.md
deleted file mode 100644
index 2c3fc35eb6..0000000000
--- a/vendor/github.com/asaskevich/govalidator/README.md
+++ /dev/null
@@ -1,622 +0,0 @@
-govalidator
-===========
-[](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [](https://godoc.org/github.com/asaskevich/govalidator)
-[](https://travis-ci.org/asaskevich/govalidator)
-[](https://codecov.io/gh/asaskevich/govalidator) [](https://goreportcard.com/report/github.com/asaskevich/govalidator) [](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [](#backers) [](#sponsors) [](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield)
-
-A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
-
-#### Installation
-Make sure that Go is installed on your computer.
-Type the following command in your terminal:
-
- go get github.com/asaskevich/govalidator
-
-or you can get specified release of the package with `gopkg.in`:
-
- go get gopkg.in/asaskevich/govalidator.v10
-
-After it the package is ready to use.
-
-
-#### Import package in your project
-Add following line in your `*.go` file:
-```go
-import "github.com/asaskevich/govalidator"
-```
-If you are unhappy to use long `govalidator`, you can do something like this:
-```go
-import (
- valid "github.com/asaskevich/govalidator"
-)
-```
-
-#### Activate behavior to require all fields have a validation tag by default
-`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function.
-
-`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors.
-
-```go
-import "github.com/asaskevich/govalidator"
-
-func init() {
- govalidator.SetFieldsRequiredByDefault(true)
-}
-```
-
-Here's some code to explain it:
-```go
-// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
-type exampleStruct struct {
- Name string ``
- Email string `valid:"email"`
-}
-
-// this, however, will only fail when Email is empty or an invalid email address:
-type exampleStruct2 struct {
- Name string `valid:"-"`
- Email string `valid:"email"`
-}
-
-// lastly, this will only fail when Email is an invalid email address but not when it's empty:
-type exampleStruct2 struct {
- Name string `valid:"-"`
- Email string `valid:"email,optional"`
-}
-```
-
-#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123))
-##### Custom validator function signature
-A context was added as the second parameter, for structs this is the object being validated – this makes dependent validation possible.
-```go
-import "github.com/asaskevich/govalidator"
-
-// old signature
-func(i interface{}) bool
-
-// new signature
-func(i interface{}, o interface{}) bool
-```
-
-##### Adding a custom validator
-This was changed to prevent data races when accessing custom validators.
-```go
-import "github.com/asaskevich/govalidator"
-
-// before
-govalidator.CustomTypeTagMap["customByteArrayValidator"] = func(i interface{}, o interface{}) bool {
- // ...
-}
-
-// after
-govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, o interface{}) bool {
- // ...
-})
-```
-
-#### List of functions:
-```go
-func Abs(value float64) float64
-func BlackList(str, chars string) string
-func ByteLength(str string, params ...string) bool
-func CamelCaseToUnderscore(str string) string
-func Contains(str, substring string) bool
-func Count(array []interface{}, iterator ConditionIterator) int
-func Each(array []interface{}, iterator Iterator)
-func ErrorByField(e error, field string) string
-func ErrorsByField(e error) map[string]string
-func Filter(array []interface{}, iterator ConditionIterator) []interface{}
-func Find(array []interface{}, iterator ConditionIterator) interface{}
-func GetLine(s string, index int) (string, error)
-func GetLines(s string) []string
-func HasLowerCase(str string) bool
-func HasUpperCase(str string) bool
-func HasWhitespace(str string) bool
-func HasWhitespaceOnly(str string) bool
-func InRange(value interface{}, left interface{}, right interface{}) bool
-func InRangeFloat32(value, left, right float32) bool
-func InRangeFloat64(value, left, right float64) bool
-func InRangeInt(value, left, right interface{}) bool
-func IsASCII(str string) bool
-func IsAlpha(str string) bool
-func IsAlphanumeric(str string) bool
-func IsBase64(str string) bool
-func IsByteLength(str string, min, max int) bool
-func IsCIDR(str string) bool
-func IsCRC32(str string) bool
-func IsCRC32b(str string) bool
-func IsCreditCard(str string) bool
-func IsDNSName(str string) bool
-func IsDataURI(str string) bool
-func IsDialString(str string) bool
-func IsDivisibleBy(str, num string) bool
-func IsEmail(str string) bool
-func IsExistingEmail(email string) bool
-func IsFilePath(str string) (bool, int)
-func IsFloat(str string) bool
-func IsFullWidth(str string) bool
-func IsHalfWidth(str string) bool
-func IsHash(str string, algorithm string) bool
-func IsHexadecimal(str string) bool
-func IsHexcolor(str string) bool
-func IsHost(str string) bool
-func IsIP(str string) bool
-func IsIPv4(str string) bool
-func IsIPv6(str string) bool
-func IsISBN(str string, version int) bool
-func IsISBN10(str string) bool
-func IsISBN13(str string) bool
-func IsISO3166Alpha2(str string) bool
-func IsISO3166Alpha3(str string) bool
-func IsISO4217(str string) bool
-func IsISO693Alpha2(str string) bool
-func IsISO693Alpha3b(str string) bool
-func IsIn(str string, params ...string) bool
-func IsInRaw(str string, params ...string) bool
-func IsInt(str string) bool
-func IsJSON(str string) bool
-func IsLatitude(str string) bool
-func IsLongitude(str string) bool
-func IsLowerCase(str string) bool
-func IsMAC(str string) bool
-func IsMD4(str string) bool
-func IsMD5(str string) bool
-func IsMagnetURI(str string) bool
-func IsMongoID(str string) bool
-func IsMultibyte(str string) bool
-func IsNatural(value float64) bool
-func IsNegative(value float64) bool
-func IsNonNegative(value float64) bool
-func IsNonPositive(value float64) bool
-func IsNotNull(str string) bool
-func IsNull(str string) bool
-func IsNumeric(str string) bool
-func IsPort(str string) bool
-func IsPositive(value float64) bool
-func IsPrintableASCII(str string) bool
-func IsRFC3339(str string) bool
-func IsRFC3339WithoutZone(str string) bool
-func IsRGBcolor(str string) bool
-func IsRegex(str string) bool
-func IsRequestURI(rawurl string) bool
-func IsRequestURL(rawurl string) bool
-func IsRipeMD128(str string) bool
-func IsRipeMD160(str string) bool
-func IsRsaPub(str string, params ...string) bool
-func IsRsaPublicKey(str string, keylen int) bool
-func IsSHA1(str string) bool
-func IsSHA256(str string) bool
-func IsSHA384(str string) bool
-func IsSHA512(str string) bool
-func IsSSN(str string) bool
-func IsSemver(str string) bool
-func IsTiger128(str string) bool
-func IsTiger160(str string) bool
-func IsTiger192(str string) bool
-func IsTime(str string, format string) bool
-func IsType(v interface{}, params ...string) bool
-func IsURL(str string) bool
-func IsUTFDigit(str string) bool
-func IsUTFLetter(str string) bool
-func IsUTFLetterNumeric(str string) bool
-func IsUTFNumeric(str string) bool
-func IsUUID(str string) bool
-func IsUUIDv3(str string) bool
-func IsUUIDv4(str string) bool
-func IsUUIDv5(str string) bool
-func IsULID(str string) bool
-func IsUnixTime(str string) bool
-func IsUpperCase(str string) bool
-func IsVariableWidth(str string) bool
-func IsWhole(value float64) bool
-func LeftTrim(str, chars string) string
-func Map(array []interface{}, iterator ResultIterator) []interface{}
-func Matches(str, pattern string) bool
-func MaxStringLength(str string, params ...string) bool
-func MinStringLength(str string, params ...string) bool
-func NormalizeEmail(str string) (string, error)
-func PadBoth(str string, padStr string, padLen int) string
-func PadLeft(str string, padStr string, padLen int) string
-func PadRight(str string, padStr string, padLen int) string
-func PrependPathToErrors(err error, path string) error
-func Range(str string, params ...string) bool
-func RemoveTags(s string) string
-func ReplacePattern(str, pattern, replace string) string
-func Reverse(s string) string
-func RightTrim(str, chars string) string
-func RuneLength(str string, params ...string) bool
-func SafeFileName(str string) string
-func SetFieldsRequiredByDefault(value bool)
-func SetNilPtrAllowedByRequired(value bool)
-func Sign(value float64) float64
-func StringLength(str string, params ...string) bool
-func StringMatches(s string, params ...string) bool
-func StripLow(str string, keepNewLines bool) string
-func ToBoolean(str string) (bool, error)
-func ToFloat(str string) (float64, error)
-func ToInt(value interface{}) (res int64, err error)
-func ToJSON(obj interface{}) (string, error)
-func ToString(obj interface{}) string
-func Trim(str, chars string) string
-func Truncate(str string, length int, ending string) string
-func TruncatingErrorf(str string, args ...interface{}) error
-func UnderscoreToCamelCase(s string) string
-func ValidateMap(inputMap map[string]interface{}, validationMap map[string]interface{}) (bool, error)
-func ValidateStruct(s interface{}) (bool, error)
-func WhiteList(str, chars string) string
-type ConditionIterator
-type CustomTypeValidator
-type Error
-func (e Error) Error() string
-type Errors
-func (es Errors) Error() string
-func (es Errors) Errors() []error
-type ISO3166Entry
-type ISO693Entry
-type InterfaceParamValidator
-type Iterator
-type ParamValidator
-type ResultIterator
-type UnsupportedTypeError
-func (e *UnsupportedTypeError) Error() string
-type Validator
-```
-
-#### Examples
-###### IsURL
-```go
-println(govalidator.IsURL(`http://user@pass:domain.com/path/page`))
-```
-###### IsType
-```go
-println(govalidator.IsType("Bob", "string"))
-println(govalidator.IsType(1, "int"))
-i := 1
-println(govalidator.IsType(&i, "*int"))
-```
-
-IsType can be used through the tag `type` which is essential for map validation:
-```go
-type User struct {
- Name string `valid:"type(string)"`
- Age int `valid:"type(int)"`
- Meta interface{} `valid:"type(string)"`
-}
-result, err := govalidator.ValidateStruct(User{"Bob", 20, "meta"})
-if err != nil {
- println("error: " + err.Error())
-}
-println(result)
-```
-###### ToString
-```go
-type User struct {
- FirstName string
- LastName string
-}
-
-str := govalidator.ToString(&User{"John", "Juan"})
-println(str)
-```
-###### Each, Map, Filter, Count for slices
-Each iterates over the slice/array and calls Iterator for every item
-```go
-data := []interface{}{1, 2, 3, 4, 5}
-var fn govalidator.Iterator = func(value interface{}, index int) {
- println(value.(int))
-}
-govalidator.Each(data, fn)
-```
-```go
-data := []interface{}{1, 2, 3, 4, 5}
-var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} {
- return value.(int) * 3
-}
-_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
-```
-```go
-data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
-var fn govalidator.ConditionIterator = func(value interface{}, index int) bool {
- return value.(int)%2 == 0
-}
-_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
-_ = govalidator.Count(data, fn) // result = 5
-```
-###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2)
-If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this:
-```go
-govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
- return str == "duck"
-})
-```
-For completely custom validators (interface-based), see below.
-
-Here is a list of available validators for struct fields (validator - used function):
-```go
-"email": IsEmail,
-"url": IsURL,
-"dialstring": IsDialString,
-"requrl": IsRequestURL,
-"requri": IsRequestURI,
-"alpha": IsAlpha,
-"utfletter": IsUTFLetter,
-"alphanum": IsAlphanumeric,
-"utfletternum": IsUTFLetterNumeric,
-"numeric": IsNumeric,
-"utfnumeric": IsUTFNumeric,
-"utfdigit": IsUTFDigit,
-"hexadecimal": IsHexadecimal,
-"hexcolor": IsHexcolor,
-"rgbcolor": IsRGBcolor,
-"lowercase": IsLowerCase,
-"uppercase": IsUpperCase,
-"int": IsInt,
-"float": IsFloat,
-"null": IsNull,
-"uuid": IsUUID,
-"uuidv3": IsUUIDv3,
-"uuidv4": IsUUIDv4,
-"uuidv5": IsUUIDv5,
-"creditcard": IsCreditCard,
-"isbn10": IsISBN10,
-"isbn13": IsISBN13,
-"json": IsJSON,
-"multibyte": IsMultibyte,
-"ascii": IsASCII,
-"printableascii": IsPrintableASCII,
-"fullwidth": IsFullWidth,
-"halfwidth": IsHalfWidth,
-"variablewidth": IsVariableWidth,
-"base64": IsBase64,
-"datauri": IsDataURI,
-"ip": IsIP,
-"port": IsPort,
-"ipv4": IsIPv4,
-"ipv6": IsIPv6,
-"dns": IsDNSName,
-"host": IsHost,
-"mac": IsMAC,
-"latitude": IsLatitude,
-"longitude": IsLongitude,
-"ssn": IsSSN,
-"semver": IsSemver,
-"rfc3339": IsRFC3339,
-"rfc3339WithoutZone": IsRFC3339WithoutZone,
-"ISO3166Alpha2": IsISO3166Alpha2,
-"ISO3166Alpha3": IsISO3166Alpha3,
-"ulid": IsULID,
-```
-Validators with parameters
-
-```go
-"range(min|max)": Range,
-"length(min|max)": ByteLength,
-"runelength(min|max)": RuneLength,
-"stringlength(min|max)": StringLength,
-"matches(pattern)": StringMatches,
-"in(string1|string2|...|stringN)": IsIn,
-"rsapub(keylength)" : IsRsaPub,
-"minstringlength(int): MinStringLength,
-"maxstringlength(int): MaxStringLength,
-```
-Validators with parameters for any type
-
-```go
-"type(type)": IsType,
-```
-
-And here is small example of usage:
-```go
-type Post struct {
- Title string `valid:"alphanum,required"`
- Message string `valid:"duck,ascii"`
- Message2 string `valid:"animal(dog)"`
- AuthorIP string `valid:"ipv4"`
- Date string `valid:"-"`
-}
-post := &Post{
- Title: "My Example Post",
- Message: "duck",
- Message2: "dog",
- AuthorIP: "123.234.54.3",
-}
-
-// Add your own struct validation tags
-govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
- return str == "duck"
-})
-
-// Add your own struct validation tags with parameter
-govalidator.ParamTagMap["animal"] = govalidator.ParamValidator(func(str string, params ...string) bool {
- species := params[0]
- return str == species
-})
-govalidator.ParamTagRegexMap["animal"] = regexp.MustCompile("^animal\\((\\w+)\\)$")
-
-result, err := govalidator.ValidateStruct(post)
-if err != nil {
- println("error: " + err.Error())
-}
-println(result)
-```
-###### ValidateMap [#2](https://github.com/asaskevich/govalidator/pull/338)
-If you want to validate maps, you can use the map to be validated and a validation map that contain the same tags used in ValidateStruct, both maps have to be in the form `map[string]interface{}`
-
-So here is small example of usage:
-```go
-var mapTemplate = map[string]interface{}{
- "name":"required,alpha",
- "family":"required,alpha",
- "email":"required,email",
- "cell-phone":"numeric",
- "address":map[string]interface{}{
- "line1":"required,alphanum",
- "line2":"alphanum",
- "postal-code":"numeric",
- },
-}
-
-var inputMap = map[string]interface{}{
- "name":"Bob",
- "family":"Smith",
- "email":"foo@bar.baz",
- "address":map[string]interface{}{
- "line1":"",
- "line2":"",
- "postal-code":"",
- },
-}
-
-result, err := govalidator.ValidateMap(inputMap, mapTemplate)
-if err != nil {
- println("error: " + err.Error())
-}
-println(result)
-```
-
-###### WhiteList
-```go
-// Remove all characters from string ignoring characters between "a" and "z"
-println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
-```
-
-###### Custom validation functions
-Custom validation using your own domain specific validators is also available - here's an example of how to use it:
-```go
-import "github.com/asaskevich/govalidator"
-
-type CustomByteArray [6]byte // custom types are supported and can be validated
-
-type StructWithCustomByteArray struct {
- ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence
- Email string `valid:"email"`
- CustomMinLength int `valid:"-"`
-}
-
-govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, context interface{}) bool {
- switch v := context.(type) { // you can type switch on the context interface being validated
- case StructWithCustomByteArray:
- // you can check and validate against some other field in the context,
- // return early or not validate against the context at all – your choice
- case SomeOtherType:
- // ...
- default:
- // expecting some other type? Throw/panic here or continue
- }
-
- switch v := i.(type) { // type switch on the struct field being validated
- case CustomByteArray:
- for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes
- if e != 0 {
- return true
- }
- }
- }
- return false
-})
-govalidator.CustomTypeTagMap.Set("customMinLengthValidator", func(i interface{}, context interface{}) bool {
- switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation
- case StructWithCustomByteArray:
- return len(v.ID) >= v.CustomMinLength
- }
- return false
-})
-```
-
-###### Loop over Error()
-By default .Error() returns all errors in a single String. To access each error you can do this:
-```go
- if err != nil {
- errs := err.(govalidator.Errors).Errors()
- for _, e := range errs {
- fmt.Println(e.Error())
- }
- }
-```
-
-###### Custom error messages
-Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it:
-```go
-type Ticket struct {
- Id int64 `json:"id"`
- FirstName string `json:"firstname" valid:"required~First name is blank"`
-}
-```
-
-#### Notes
-Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator).
-Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator).
-
-#### Support
-If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
-
-#### What to contribute
-If you don't know what to do, there are some features and functions that need to be done
-
-- [ ] Refactor code
-- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
-- [ ] Create actual list of contributors and projects that currently using this package
-- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
-- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
-- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
-- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
-- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
-- [ ] Implement fuzzing testing
-- [ ] Implement some struct/map/array utilities
-- [ ] Implement map/array validation
-- [ ] Implement benchmarking
-- [ ] Implement batch of examples
-- [ ] Look at forks for new features and fixes
-
-#### Advice
-Feel free to create what you want, but keep in mind when you implement new features:
-- Code must be clear and readable, names of variables/constants clearly describes what they are doing
-- Public functions must be documented and described in source file and added to README.md to the list of available functions
-- There are must be unit-tests for any new functions and improvements
-
-## Credits
-### Contributors
-
-This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
-
-#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors)
-* [Daniel Lohse](https://github.com/annismckenzie)
-* [Attila Oláh](https://github.com/attilaolah)
-* [Daniel Korner](https://github.com/Dadie)
-* [Steven Wilkin](https://github.com/stevenwilkin)
-* [Deiwin Sarjas](https://github.com/deiwin)
-* [Noah Shibley](https://github.com/slugmobile)
-* [Nathan Davies](https://github.com/nathj07)
-* [Matt Sanford](https://github.com/mzsanford)
-* [Simon ccl1115](https://github.com/ccl1115)
-
-
-
-
-### Backers
-
-Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/govalidator#backer)]
-
-
-
-
-### Sponsors
-
-Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-## License
-[](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large)
diff --git a/vendor/github.com/asaskevich/govalidator/arrays.go b/vendor/github.com/asaskevich/govalidator/arrays.go
deleted file mode 100644
index 3e1da7cb48..0000000000
--- a/vendor/github.com/asaskevich/govalidator/arrays.go
+++ /dev/null
@@ -1,87 +0,0 @@
-package govalidator
-
-// Iterator is the function that accepts element of slice/array and its index
-type Iterator func(interface{}, int)
-
-// ResultIterator is the function that accepts element of slice/array and its index and returns any result
-type ResultIterator func(interface{}, int) interface{}
-
-// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean
-type ConditionIterator func(interface{}, int) bool
-
-// ReduceIterator is the function that accepts two element of slice/array and returns result of merging those values
-type ReduceIterator func(interface{}, interface{}) interface{}
-
-// Some validates that any item of array corresponds to ConditionIterator. Returns boolean.
-func Some(array []interface{}, iterator ConditionIterator) bool {
- res := false
- for index, data := range array {
- res = res || iterator(data, index)
- }
- return res
-}
-
-// Every validates that every item of array corresponds to ConditionIterator. Returns boolean.
-func Every(array []interface{}, iterator ConditionIterator) bool {
- res := true
- for index, data := range array {
- res = res && iterator(data, index)
- }
- return res
-}
-
-// Reduce boils down a list of values into a single value by ReduceIterator
-func Reduce(array []interface{}, iterator ReduceIterator, initialValue interface{}) interface{} {
- for _, data := range array {
- initialValue = iterator(initialValue, data)
- }
- return initialValue
-}
-
-// Each iterates over the slice and apply Iterator to every item
-func Each(array []interface{}, iterator Iterator) {
- for index, data := range array {
- iterator(data, index)
- }
-}
-
-// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result.
-func Map(array []interface{}, iterator ResultIterator) []interface{} {
- var result = make([]interface{}, len(array))
- for index, data := range array {
- result[index] = iterator(data, index)
- }
- return result
-}
-
-// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise.
-func Find(array []interface{}, iterator ConditionIterator) interface{} {
- for index, data := range array {
- if iterator(data, index) {
- return data
- }
- }
- return nil
-}
-
-// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice.
-func Filter(array []interface{}, iterator ConditionIterator) []interface{} {
- var result = make([]interface{}, 0)
- for index, data := range array {
- if iterator(data, index) {
- result = append(result, data)
- }
- }
- return result
-}
-
-// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator.
-func Count(array []interface{}, iterator ConditionIterator) int {
- count := 0
- for index, data := range array {
- if iterator(data, index) {
- count = count + 1
- }
- }
- return count
-}
diff --git a/vendor/github.com/asaskevich/govalidator/converter.go b/vendor/github.com/asaskevich/govalidator/converter.go
deleted file mode 100644
index d68e990fc2..0000000000
--- a/vendor/github.com/asaskevich/govalidator/converter.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package govalidator
-
-import (
- "encoding/json"
- "fmt"
- "reflect"
- "strconv"
-)
-
-// ToString convert the input to a string.
-func ToString(obj interface{}) string {
- res := fmt.Sprintf("%v", obj)
- return res
-}
-
-// ToJSON convert the input to a valid JSON string
-func ToJSON(obj interface{}) (string, error) {
- res, err := json.Marshal(obj)
- if err != nil {
- res = []byte("")
- }
- return string(res), err
-}
-
-// ToFloat convert the input string to a float, or 0.0 if the input is not a float.
-func ToFloat(value interface{}) (res float64, err error) {
- val := reflect.ValueOf(value)
-
- switch value.(type) {
- case int, int8, int16, int32, int64:
- res = float64(val.Int())
- case uint, uint8, uint16, uint32, uint64:
- res = float64(val.Uint())
- case float32, float64:
- res = val.Float()
- case string:
- res, err = strconv.ParseFloat(val.String(), 64)
- if err != nil {
- res = 0
- }
- default:
- err = fmt.Errorf("ToInt: unknown interface type %T", value)
- res = 0
- }
-
- return
-}
-
-// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer.
-func ToInt(value interface{}) (res int64, err error) {
- val := reflect.ValueOf(value)
-
- switch value.(type) {
- case int, int8, int16, int32, int64:
- res = val.Int()
- case uint, uint8, uint16, uint32, uint64:
- res = int64(val.Uint())
- case float32, float64:
- res = int64(val.Float())
- case string:
- if IsInt(val.String()) {
- res, err = strconv.ParseInt(val.String(), 0, 64)
- if err != nil {
- res = 0
- }
- } else {
- err = fmt.Errorf("ToInt: invalid numeric format %g", value)
- res = 0
- }
- default:
- err = fmt.Errorf("ToInt: unknown interface type %T", value)
- res = 0
- }
-
- return
-}
-
-// ToBoolean convert the input string to a boolean.
-func ToBoolean(str string) (bool, error) {
- return strconv.ParseBool(str)
-}
diff --git a/vendor/github.com/asaskevich/govalidator/doc.go b/vendor/github.com/asaskevich/govalidator/doc.go
deleted file mode 100644
index 55dce62dc8..0000000000
--- a/vendor/github.com/asaskevich/govalidator/doc.go
+++ /dev/null
@@ -1,3 +0,0 @@
-package govalidator
-
-// A package of validators and sanitizers for strings, structures and collections.
diff --git a/vendor/github.com/asaskevich/govalidator/error.go b/vendor/github.com/asaskevich/govalidator/error.go
deleted file mode 100644
index 1da2336f47..0000000000
--- a/vendor/github.com/asaskevich/govalidator/error.go
+++ /dev/null
@@ -1,47 +0,0 @@
-package govalidator
-
-import (
- "sort"
- "strings"
-)
-
-// Errors is an array of multiple errors and conforms to the error interface.
-type Errors []error
-
-// Errors returns itself.
-func (es Errors) Errors() []error {
- return es
-}
-
-func (es Errors) Error() string {
- var errs []string
- for _, e := range es {
- errs = append(errs, e.Error())
- }
- sort.Strings(errs)
- return strings.Join(errs, ";")
-}
-
-// Error encapsulates a name, an error and whether there's a custom error message or not.
-type Error struct {
- Name string
- Err error
- CustomErrorMessageExists bool
-
- // Validator indicates the name of the validator that failed
- Validator string
- Path []string
-}
-
-func (e Error) Error() string {
- if e.CustomErrorMessageExists {
- return e.Err.Error()
- }
-
- errName := e.Name
- if len(e.Path) > 0 {
- errName = strings.Join(append(e.Path, e.Name), ".")
- }
-
- return errName + ": " + e.Err.Error()
-}
diff --git a/vendor/github.com/asaskevich/govalidator/numerics.go b/vendor/github.com/asaskevich/govalidator/numerics.go
deleted file mode 100644
index 5041d9e868..0000000000
--- a/vendor/github.com/asaskevich/govalidator/numerics.go
+++ /dev/null
@@ -1,100 +0,0 @@
-package govalidator
-
-import (
- "math"
-)
-
-// Abs returns absolute value of number
-func Abs(value float64) float64 {
- return math.Abs(value)
-}
-
-// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise
-func Sign(value float64) float64 {
- if value > 0 {
- return 1
- } else if value < 0 {
- return -1
- } else {
- return 0
- }
-}
-
-// IsNegative returns true if value < 0
-func IsNegative(value float64) bool {
- return value < 0
-}
-
-// IsPositive returns true if value > 0
-func IsPositive(value float64) bool {
- return value > 0
-}
-
-// IsNonNegative returns true if value >= 0
-func IsNonNegative(value float64) bool {
- return value >= 0
-}
-
-// IsNonPositive returns true if value <= 0
-func IsNonPositive(value float64) bool {
- return value <= 0
-}
-
-// InRangeInt returns true if value lies between left and right border
-func InRangeInt(value, left, right interface{}) bool {
- value64, _ := ToInt(value)
- left64, _ := ToInt(left)
- right64, _ := ToInt(right)
- if left64 > right64 {
- left64, right64 = right64, left64
- }
- return value64 >= left64 && value64 <= right64
-}
-
-// InRangeFloat32 returns true if value lies between left and right border
-func InRangeFloat32(value, left, right float32) bool {
- if left > right {
- left, right = right, left
- }
- return value >= left && value <= right
-}
-
-// InRangeFloat64 returns true if value lies between left and right border
-func InRangeFloat64(value, left, right float64) bool {
- if left > right {
- left, right = right, left
- }
- return value >= left && value <= right
-}
-
-// InRange returns true if value lies between left and right border, generic type to handle int, float32, float64 and string.
-// All types must the same type.
-// False if value doesn't lie in range or if it incompatible or not comparable
-func InRange(value interface{}, left interface{}, right interface{}) bool {
- switch value.(type) {
- case int:
- intValue, _ := ToInt(value)
- intLeft, _ := ToInt(left)
- intRight, _ := ToInt(right)
- return InRangeInt(intValue, intLeft, intRight)
- case float32, float64:
- intValue, _ := ToFloat(value)
- intLeft, _ := ToFloat(left)
- intRight, _ := ToFloat(right)
- return InRangeFloat64(intValue, intLeft, intRight)
- case string:
- return value.(string) >= left.(string) && value.(string) <= right.(string)
- default:
- return false
- }
-}
-
-// IsWhole returns true if value is whole number
-func IsWhole(value float64) bool {
- return math.Remainder(value, 1) == 0
-}
-
-// IsNatural returns true if value is natural number (positive and whole)
-func IsNatural(value float64) bool {
- return IsWhole(value) && IsPositive(value)
-}
diff --git a/vendor/github.com/asaskevich/govalidator/patterns.go b/vendor/github.com/asaskevich/govalidator/patterns.go
deleted file mode 100644
index bafc3765ea..0000000000
--- a/vendor/github.com/asaskevich/govalidator/patterns.go
+++ /dev/null
@@ -1,113 +0,0 @@
-package govalidator
-
-import "regexp"
-
-// Basic regular expressions for validating strings
-const (
- Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
- CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11}|6[27][0-9]{14})$"
- ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
- ISBN13 string = "^(?:[0-9]{13})$"
- UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$"
- UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
- UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
- UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
- Alpha string = "^[a-zA-Z]+$"
- Alphanumeric string = "^[a-zA-Z0-9]+$"
- Numeric string = "^[0-9]+$"
- Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
- Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
- Hexadecimal string = "^[0-9a-fA-F]+$"
- Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$"
- RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$"
- ASCII string = "^[\x00-\x7F]+$"
- Multibyte string = "[^\x00-\x7F]"
- FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
- HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
- Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$"
- PrintableASCII string = "^[\x20-\x7E]+$"
- DataURI string = "^data:.+\\/(.+);base64$"
- MagnetURI string = "^magnet:\\?xt=urn:[a-zA-Z0-9]+:[a-zA-Z0-9]{32,40}&dn=.+&tr=.+$"
- Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
- Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
- DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$`
- IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
- URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
- URLUsername string = `(\S+(:\S*)?@)`
- URLPath string = `((\/|\?|#)[^\s]*)`
- URLPort string = `(:(\d{1,5}))`
- URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3]|24\d|25[0-5])(\.(\d{1,2}|1\d\d|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-5]))`
- URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))`
- URL = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
- SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
- WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
- UnixPath string = `^(/[^/\x00]*)+/?$`
- WinARPath string = `^(?:(?:[a-zA-Z]:|\\\\[a-z0-9_.$●-]+\\[a-z0-9_.$●-]+)\\|\\?[^\\/:*?"<>|\r\n]+\\?)(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
- UnixARPath string = `^((\.{0,2}/)?([^/\x00]*))+/?$`
- Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
- tagName string = "valid"
- hasLowerCase string = ".*[[:lower:]]"
- hasUpperCase string = ".*[[:upper:]]"
- hasWhitespace string = ".*[[:space:]]"
- hasWhitespaceOnly string = "^[[:space:]]+$"
- IMEI string = "^[0-9a-f]{14}$|^\\d{15}$|^\\d{18}$"
- IMSI string = "^\\d{14,15}$"
- E164 string = `^\+?[1-9]\d{1,14}$`
-)
-
-// Used by IsFilePath func
-const (
- // Unknown is unresolved OS type
- Unknown = iota
- // Win is Windows type
- Win
- // Unix is *nix OS types
- Unix
-)
-
-var (
- userRegexp = regexp.MustCompile("^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~.-]+$")
- hostRegexp = regexp.MustCompile("^[^\\s]+\\.[^\\s]+$")
- userDotRegexp = regexp.MustCompile("(^[.]{1})|([.]{1}$)|([.]{2,})")
- rxEmail = regexp.MustCompile(Email)
- rxCreditCard = regexp.MustCompile(CreditCard)
- rxISBN10 = regexp.MustCompile(ISBN10)
- rxISBN13 = regexp.MustCompile(ISBN13)
- rxUUID3 = regexp.MustCompile(UUID3)
- rxUUID4 = regexp.MustCompile(UUID4)
- rxUUID5 = regexp.MustCompile(UUID5)
- rxUUID = regexp.MustCompile(UUID)
- rxAlpha = regexp.MustCompile(Alpha)
- rxAlphanumeric = regexp.MustCompile(Alphanumeric)
- rxNumeric = regexp.MustCompile(Numeric)
- rxInt = regexp.MustCompile(Int)
- rxFloat = regexp.MustCompile(Float)
- rxHexadecimal = regexp.MustCompile(Hexadecimal)
- rxHexcolor = regexp.MustCompile(Hexcolor)
- rxRGBcolor = regexp.MustCompile(RGBcolor)
- rxASCII = regexp.MustCompile(ASCII)
- rxPrintableASCII = regexp.MustCompile(PrintableASCII)
- rxMultibyte = regexp.MustCompile(Multibyte)
- rxFullWidth = regexp.MustCompile(FullWidth)
- rxHalfWidth = regexp.MustCompile(HalfWidth)
- rxBase64 = regexp.MustCompile(Base64)
- rxDataURI = regexp.MustCompile(DataURI)
- rxMagnetURI = regexp.MustCompile(MagnetURI)
- rxLatitude = regexp.MustCompile(Latitude)
- rxLongitude = regexp.MustCompile(Longitude)
- rxDNSName = regexp.MustCompile(DNSName)
- rxURL = regexp.MustCompile(URL)
- rxSSN = regexp.MustCompile(SSN)
- rxWinPath = regexp.MustCompile(WinPath)
- rxUnixPath = regexp.MustCompile(UnixPath)
- rxARWinPath = regexp.MustCompile(WinARPath)
- rxARUnixPath = regexp.MustCompile(UnixARPath)
- rxSemver = regexp.MustCompile(Semver)
- rxHasLowerCase = regexp.MustCompile(hasLowerCase)
- rxHasUpperCase = regexp.MustCompile(hasUpperCase)
- rxHasWhitespace = regexp.MustCompile(hasWhitespace)
- rxHasWhitespaceOnly = regexp.MustCompile(hasWhitespaceOnly)
- rxIMEI = regexp.MustCompile(IMEI)
- rxIMSI = regexp.MustCompile(IMSI)
- rxE164 = regexp.MustCompile(E164)
-)
diff --git a/vendor/github.com/asaskevich/govalidator/types.go b/vendor/github.com/asaskevich/govalidator/types.go
deleted file mode 100644
index c573abb51a..0000000000
--- a/vendor/github.com/asaskevich/govalidator/types.go
+++ /dev/null
@@ -1,656 +0,0 @@
-package govalidator
-
-import (
- "reflect"
- "regexp"
- "sort"
- "sync"
-)
-
-// Validator is a wrapper for a validator function that returns bool and accepts string.
-type Validator func(str string) bool
-
-// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type.
-// The second parameter should be the context (in the case of validating a struct: the whole object being validated).
-type CustomTypeValidator func(i interface{}, o interface{}) bool
-
-// ParamValidator is a wrapper for validator functions that accept additional parameters.
-type ParamValidator func(str string, params ...string) bool
-
-// InterfaceParamValidator is a wrapper for functions that accept variants parameters for an interface value
-type InterfaceParamValidator func(in interface{}, params ...string) bool
-type tagOptionsMap map[string]tagOption
-
-func (t tagOptionsMap) orderedKeys() []string {
- var keys []string
- for k := range t {
- keys = append(keys, k)
- }
-
- sort.Slice(keys, func(a, b int) bool {
- return t[keys[a]].order < t[keys[b]].order
- })
-
- return keys
-}
-
-type tagOption struct {
- name string
- customErrorMessage string
- order int
-}
-
-// UnsupportedTypeError is a wrapper for reflect.Type
-type UnsupportedTypeError struct {
- Type reflect.Type
-}
-
-// stringValues is a slice of reflect.Value holding *reflect.StringValue.
-// It implements the methods to sort by string.
-type stringValues []reflect.Value
-
-// InterfaceParamTagMap is a map of functions accept variants parameters for an interface value
-var InterfaceParamTagMap = map[string]InterfaceParamValidator{
- "type": IsType,
-}
-
-// InterfaceParamTagRegexMap maps interface param tags to their respective regexes.
-var InterfaceParamTagRegexMap = map[string]*regexp.Regexp{
- "type": regexp.MustCompile(`^type\((.*)\)$`),
-}
-
-// ParamTagMap is a map of functions accept variants parameters
-var ParamTagMap = map[string]ParamValidator{
- "length": ByteLength,
- "range": Range,
- "runelength": RuneLength,
- "stringlength": StringLength,
- "matches": StringMatches,
- "in": IsInRaw,
- "rsapub": IsRsaPub,
- "minstringlength": MinStringLength,
- "maxstringlength": MaxStringLength,
-}
-
-// ParamTagRegexMap maps param tags to their respective regexes.
-var ParamTagRegexMap = map[string]*regexp.Regexp{
- "range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
- "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
- "runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
- "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
- "in": regexp.MustCompile(`^in\((.*)\)`),
- "matches": regexp.MustCompile(`^matches\((.+)\)$`),
- "rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"),
- "minstringlength": regexp.MustCompile("^minstringlength\\((\\d+)\\)$"),
- "maxstringlength": regexp.MustCompile("^maxstringlength\\((\\d+)\\)$"),
-}
-
-type customTypeTagMap struct {
- validators map[string]CustomTypeValidator
-
- sync.RWMutex
-}
-
-func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) {
- tm.RLock()
- defer tm.RUnlock()
- v, ok := tm.validators[name]
- return v, ok
-}
-
-func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) {
- tm.Lock()
- defer tm.Unlock()
- tm.validators[name] = ctv
-}
-
-// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function.
-// Use this to validate compound or custom types that need to be handled as a whole, e.g.
-// `type UUID [16]byte` (this would be handled as an array of bytes).
-var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)}
-
-// TagMap is a map of functions, that can be used as tags for ValidateStruct function.
-var TagMap = map[string]Validator{
- "email": IsEmail,
- "url": IsURL,
- "dialstring": IsDialString,
- "requrl": IsRequestURL,
- "requri": IsRequestURI,
- "alpha": IsAlpha,
- "utfletter": IsUTFLetter,
- "alphanum": IsAlphanumeric,
- "utfletternum": IsUTFLetterNumeric,
- "numeric": IsNumeric,
- "utfnumeric": IsUTFNumeric,
- "utfdigit": IsUTFDigit,
- "hexadecimal": IsHexadecimal,
- "hexcolor": IsHexcolor,
- "rgbcolor": IsRGBcolor,
- "lowercase": IsLowerCase,
- "uppercase": IsUpperCase,
- "int": IsInt,
- "float": IsFloat,
- "null": IsNull,
- "notnull": IsNotNull,
- "uuid": IsUUID,
- "uuidv3": IsUUIDv3,
- "uuidv4": IsUUIDv4,
- "uuidv5": IsUUIDv5,
- "creditcard": IsCreditCard,
- "isbn10": IsISBN10,
- "isbn13": IsISBN13,
- "json": IsJSON,
- "multibyte": IsMultibyte,
- "ascii": IsASCII,
- "printableascii": IsPrintableASCII,
- "fullwidth": IsFullWidth,
- "halfwidth": IsHalfWidth,
- "variablewidth": IsVariableWidth,
- "base64": IsBase64,
- "datauri": IsDataURI,
- "ip": IsIP,
- "port": IsPort,
- "ipv4": IsIPv4,
- "ipv6": IsIPv6,
- "dns": IsDNSName,
- "host": IsHost,
- "mac": IsMAC,
- "latitude": IsLatitude,
- "longitude": IsLongitude,
- "ssn": IsSSN,
- "semver": IsSemver,
- "rfc3339": IsRFC3339,
- "rfc3339WithoutZone": IsRFC3339WithoutZone,
- "ISO3166Alpha2": IsISO3166Alpha2,
- "ISO3166Alpha3": IsISO3166Alpha3,
- "ISO4217": IsISO4217,
- "IMEI": IsIMEI,
- "ulid": IsULID,
-}
-
-// ISO3166Entry stores country codes
-type ISO3166Entry struct {
- EnglishShortName string
- FrenchShortName string
- Alpha2Code string
- Alpha3Code string
- Numeric string
-}
-
-//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes"
-var ISO3166List = []ISO3166Entry{
- {"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"},
- {"Albania", "Albanie (l')", "AL", "ALB", "008"},
- {"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"},
- {"Algeria", "Algérie (l')", "DZ", "DZA", "012"},
- {"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"},
- {"Andorra", "Andorre (l')", "AD", "AND", "020"},
- {"Angola", "Angola (l')", "AO", "AGO", "024"},
- {"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"},
- {"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"},
- {"Argentina", "Argentine (l')", "AR", "ARG", "032"},
- {"Australia", "Australie (l')", "AU", "AUS", "036"},
- {"Austria", "Autriche (l')", "AT", "AUT", "040"},
- {"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"},
- {"Bahrain", "Bahreïn", "BH", "BHR", "048"},
- {"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"},
- {"Armenia", "Arménie (l')", "AM", "ARM", "051"},
- {"Barbados", "Barbade (la)", "BB", "BRB", "052"},
- {"Belgium", "Belgique (la)", "BE", "BEL", "056"},
- {"Bermuda", "Bermudes (les)", "BM", "BMU", "060"},
- {"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"},
- {"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"},
- {"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"},
- {"Botswana", "Botswana (le)", "BW", "BWA", "072"},
- {"Bouvet Island", "Bouvet (l'Île)", "BV", "BVT", "074"},
- {"Brazil", "Brésil (le)", "BR", "BRA", "076"},
- {"Belize", "Belize (le)", "BZ", "BLZ", "084"},
- {"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"},
- {"Solomon Islands", "Salomon (Îles)", "SB", "SLB", "090"},
- {"Virgin Islands (British)", "Vierges britanniques (les Îles)", "VG", "VGB", "092"},
- {"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"},
- {"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"},
- {"Myanmar", "Myanmar (le)", "MM", "MMR", "104"},
- {"Burundi", "Burundi (le)", "BI", "BDI", "108"},
- {"Belarus", "Bélarus (le)", "BY", "BLR", "112"},
- {"Cambodia", "Cambodge (le)", "KH", "KHM", "116"},
- {"Cameroon", "Cameroun (le)", "CM", "CMR", "120"},
- {"Canada", "Canada (le)", "CA", "CAN", "124"},
- {"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"},
- {"Cayman Islands (the)", "Caïmans (les Îles)", "KY", "CYM", "136"},
- {"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"},
- {"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"},
- {"Chad", "Tchad (le)", "TD", "TCD", "148"},
- {"Chile", "Chili (le)", "CL", "CHL", "152"},
- {"China", "Chine (la)", "CN", "CHN", "156"},
- {"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"},
- {"Christmas Island", "Christmas (l'Île)", "CX", "CXR", "162"},
- {"Cocos (Keeling) Islands (the)", "Cocos (les Îles)/ Keeling (les Îles)", "CC", "CCK", "166"},
- {"Colombia", "Colombie (la)", "CO", "COL", "170"},
- {"Comoros (the)", "Comores (les)", "KM", "COM", "174"},
- {"Mayotte", "Mayotte", "YT", "MYT", "175"},
- {"Congo (the)", "Congo (le)", "CG", "COG", "178"},
- {"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"},
- {"Cook Islands (the)", "Cook (les Îles)", "CK", "COK", "184"},
- {"Costa Rica", "Costa Rica (le)", "CR", "CRI", "188"},
- {"Croatia", "Croatie (la)", "HR", "HRV", "191"},
- {"Cuba", "Cuba", "CU", "CUB", "192"},
- {"Cyprus", "Chypre", "CY", "CYP", "196"},
- {"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"},
- {"Benin", "Bénin (le)", "BJ", "BEN", "204"},
- {"Denmark", "Danemark (le)", "DK", "DNK", "208"},
- {"Dominica", "Dominique (la)", "DM", "DMA", "212"},
- {"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"},
- {"Ecuador", "Équateur (l')", "EC", "ECU", "218"},
- {"El Salvador", "El Salvador", "SV", "SLV", "222"},
- {"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"},
- {"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"},
- {"Eritrea", "Érythrée (l')", "ER", "ERI", "232"},
- {"Estonia", "Estonie (l')", "EE", "EST", "233"},
- {"Faroe Islands (the)", "Féroé (les Îles)", "FO", "FRO", "234"},
- {"Falkland Islands (the) [Malvinas]", "Falkland (les Îles)/Malouines (les Îles)", "FK", "FLK", "238"},
- {"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les Îles Sandwich du Sud (la)", "GS", "SGS", "239"},
- {"Fiji", "Fidji (les)", "FJ", "FJI", "242"},
- {"Finland", "Finlande (la)", "FI", "FIN", "246"},
- {"Åland Islands", "Åland(les Îles)", "AX", "ALA", "248"},
- {"France", "France (la)", "FR", "FRA", "250"},
- {"French Guiana", "Guyane française (la )", "GF", "GUF", "254"},
- {"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"},
- {"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"},
- {"Djibouti", "Djibouti", "DJ", "DJI", "262"},
- {"Gabon", "Gabon (le)", "GA", "GAB", "266"},
- {"Georgia", "Géorgie (la)", "GE", "GEO", "268"},
- {"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"},
- {"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"},
- {"Germany", "Allemagne (l')", "DE", "DEU", "276"},
- {"Ghana", "Ghana (le)", "GH", "GHA", "288"},
- {"Gibraltar", "Gibraltar", "GI", "GIB", "292"},
- {"Kiribati", "Kiribati", "KI", "KIR", "296"},
- {"Greece", "Grèce (la)", "GR", "GRC", "300"},
- {"Greenland", "Groenland (le)", "GL", "GRL", "304"},
- {"Grenada", "Grenade (la)", "GD", "GRD", "308"},
- {"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"},
- {"Guam", "Guam", "GU", "GUM", "316"},
- {"Guatemala", "Guatemala (le)", "GT", "GTM", "320"},
- {"Guinea", "Guinée (la)", "GN", "GIN", "324"},
- {"Guyana", "Guyana (le)", "GY", "GUY", "328"},
- {"Haiti", "Haïti", "HT", "HTI", "332"},
- {"Heard Island and McDonald Islands", "Heard-et-Îles MacDonald (l'Île)", "HM", "HMD", "334"},
- {"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"},
- {"Honduras", "Honduras (le)", "HN", "HND", "340"},
- {"Hong Kong", "Hong Kong", "HK", "HKG", "344"},
- {"Hungary", "Hongrie (la)", "HU", "HUN", "348"},
- {"Iceland", "Islande (l')", "IS", "ISL", "352"},
- {"India", "Inde (l')", "IN", "IND", "356"},
- {"Indonesia", "Indonésie (l')", "ID", "IDN", "360"},
- {"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"},
- {"Iraq", "Iraq (l')", "IQ", "IRQ", "368"},
- {"Ireland", "Irlande (l')", "IE", "IRL", "372"},
- {"Israel", "Israël", "IL", "ISR", "376"},
- {"Italy", "Italie (l')", "IT", "ITA", "380"},
- {"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"},
- {"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"},
- {"Japan", "Japon (le)", "JP", "JPN", "392"},
- {"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"},
- {"Jordan", "Jordanie (la)", "JO", "JOR", "400"},
- {"Kenya", "Kenya (le)", "KE", "KEN", "404"},
- {"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"},
- {"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"},
- {"Kuwait", "Koweït (le)", "KW", "KWT", "414"},
- {"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"},
- {"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"},
- {"Lebanon", "Liban (le)", "LB", "LBN", "422"},
- {"Lesotho", "Lesotho (le)", "LS", "LSO", "426"},
- {"Latvia", "Lettonie (la)", "LV", "LVA", "428"},
- {"Liberia", "Libéria (le)", "LR", "LBR", "430"},
- {"Libya", "Libye (la)", "LY", "LBY", "434"},
- {"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"},
- {"Lithuania", "Lituanie (la)", "LT", "LTU", "440"},
- {"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"},
- {"Macao", "Macao", "MO", "MAC", "446"},
- {"Madagascar", "Madagascar", "MG", "MDG", "450"},
- {"Malawi", "Malawi (le)", "MW", "MWI", "454"},
- {"Malaysia", "Malaisie (la)", "MY", "MYS", "458"},
- {"Maldives", "Maldives (les)", "MV", "MDV", "462"},
- {"Mali", "Mali (le)", "ML", "MLI", "466"},
- {"Malta", "Malte", "MT", "MLT", "470"},
- {"Martinique", "Martinique (la)", "MQ", "MTQ", "474"},
- {"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"},
- {"Mauritius", "Maurice", "MU", "MUS", "480"},
- {"Mexico", "Mexique (le)", "MX", "MEX", "484"},
- {"Monaco", "Monaco", "MC", "MCO", "492"},
- {"Mongolia", "Mongolie (la)", "MN", "MNG", "496"},
- {"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"},
- {"Montenegro", "Monténégro (le)", "ME", "MNE", "499"},
- {"Montserrat", "Montserrat", "MS", "MSR", "500"},
- {"Morocco", "Maroc (le)", "MA", "MAR", "504"},
- {"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"},
- {"Oman", "Oman", "OM", "OMN", "512"},
- {"Namibia", "Namibie (la)", "NA", "NAM", "516"},
- {"Nauru", "Nauru", "NR", "NRU", "520"},
- {"Nepal", "Népal (le)", "NP", "NPL", "524"},
- {"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"},
- {"Curaçao", "Curaçao", "CW", "CUW", "531"},
- {"Aruba", "Aruba", "AW", "ABW", "533"},
- {"Sint Maarten (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"},
- {"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"},
- {"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"},
- {"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"},
- {"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"},
- {"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"},
- {"Niger (the)", "Niger (le)", "NE", "NER", "562"},
- {"Nigeria", "Nigéria (le)", "NG", "NGA", "566"},
- {"Niue", "Niue", "NU", "NIU", "570"},
- {"Norfolk Island", "Norfolk (l'Île)", "NF", "NFK", "574"},
- {"Norway", "Norvège (la)", "NO", "NOR", "578"},
- {"Northern Mariana Islands (the)", "Mariannes du Nord (les Îles)", "MP", "MNP", "580"},
- {"United States Minor Outlying Islands (the)", "Îles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"},
- {"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"},
- {"Marshall Islands (the)", "Marshall (Îles)", "MH", "MHL", "584"},
- {"Palau", "Palaos (les)", "PW", "PLW", "585"},
- {"Pakistan", "Pakistan (le)", "PK", "PAK", "586"},
- {"Panama", "Panama (le)", "PA", "PAN", "591"},
- {"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"},
- {"Paraguay", "Paraguay (le)", "PY", "PRY", "600"},
- {"Peru", "Pérou (le)", "PE", "PER", "604"},
- {"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"},
- {"Pitcairn", "Pitcairn", "PN", "PCN", "612"},
- {"Poland", "Pologne (la)", "PL", "POL", "616"},
- {"Portugal", "Portugal (le)", "PT", "PRT", "620"},
- {"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"},
- {"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"},
- {"Puerto Rico", "Porto Rico", "PR", "PRI", "630"},
- {"Qatar", "Qatar (le)", "QA", "QAT", "634"},
- {"Réunion", "Réunion (La)", "RE", "REU", "638"},
- {"Romania", "Roumanie (la)", "RO", "ROU", "642"},
- {"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"},
- {"Rwanda", "Rwanda (le)", "RW", "RWA", "646"},
- {"Saint Barthélemy", "Saint-Barthélemy", "BL", "BLM", "652"},
- {"Saint Helena, Ascension and Tristan da Cunha", "Sainte-Hélène, Ascension et Tristan da Cunha", "SH", "SHN", "654"},
- {"Saint Kitts and Nevis", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"},
- {"Anguilla", "Anguilla", "AI", "AIA", "660"},
- {"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"},
- {"Saint Martin (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"},
- {"Saint Pierre and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"},
- {"Saint Vincent and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"},
- {"San Marino", "Saint-Marin", "SM", "SMR", "674"},
- {"Sao Tome and Principe", "Sao Tomé-et-Principe", "ST", "STP", "678"},
- {"Saudi Arabia", "Arabie saoudite (l')", "SA", "SAU", "682"},
- {"Senegal", "Sénégal (le)", "SN", "SEN", "686"},
- {"Serbia", "Serbie (la)", "RS", "SRB", "688"},
- {"Seychelles", "Seychelles (les)", "SC", "SYC", "690"},
- {"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"},
- {"Singapore", "Singapour", "SG", "SGP", "702"},
- {"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"},
- {"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"},
- {"Slovenia", "Slovénie (la)", "SI", "SVN", "705"},
- {"Somalia", "Somalie (la)", "SO", "SOM", "706"},
- {"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"},
- {"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"},
- {"Spain", "Espagne (l')", "ES", "ESP", "724"},
- {"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"},
- {"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"},
- {"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"},
- {"Suriname", "Suriname (le)", "SR", "SUR", "740"},
- {"Svalbard and Jan Mayen", "Svalbard et l'Île Jan Mayen (le)", "SJ", "SJM", "744"},
- {"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"},
- {"Sweden", "Suède (la)", "SE", "SWE", "752"},
- {"Switzerland", "Suisse (la)", "CH", "CHE", "756"},
- {"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"},
- {"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"},
- {"Thailand", "Thaïlande (la)", "TH", "THA", "764"},
- {"Togo", "Togo (le)", "TG", "TGO", "768"},
- {"Tokelau", "Tokelau (les)", "TK", "TKL", "772"},
- {"Tonga", "Tonga (les)", "TO", "TON", "776"},
- {"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"},
- {"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"},
- {"Tunisia", "Tunisie (la)", "TN", "TUN", "788"},
- {"Turkey", "Turquie (la)", "TR", "TUR", "792"},
- {"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"},
- {"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les Îles)", "TC", "TCA", "796"},
- {"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"},
- {"Uganda", "Ouganda (l')", "UG", "UGA", "800"},
- {"Ukraine", "Ukraine (l')", "UA", "UKR", "804"},
- {"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"},
- {"Egypt", "Égypte (l')", "EG", "EGY", "818"},
- {"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"},
- {"Guernsey", "Guernesey", "GG", "GGY", "831"},
- {"Jersey", "Jersey", "JE", "JEY", "832"},
- {"Isle of Man", "Île de Man", "IM", "IMN", "833"},
- {"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"},
- {"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"},
- {"Virgin Islands (U.S.)", "Vierges des États-Unis (les Îles)", "VI", "VIR", "850"},
- {"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"},
- {"Uruguay", "Uruguay (l')", "UY", "URY", "858"},
- {"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"},
- {"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"},
- {"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"},
- {"Samoa", "Samoa (le)", "WS", "WSM", "882"},
- {"Yemen", "Yémen (le)", "YE", "YEM", "887"},
- {"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
-}
-
-// ISO4217List is the list of ISO currency codes
-var ISO4217List = []string{
- "AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
- "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
- "CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
- "DJF", "DKK", "DOP", "DZD",
- "EGP", "ERN", "ETB", "EUR",
- "FJD", "FKP",
- "GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
- "HKD", "HNL", "HRK", "HTG", "HUF",
- "IDR", "ILS", "INR", "IQD", "IRR", "ISK",
- "JMD", "JOD", "JPY",
- "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
- "LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
- "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
- "NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
- "OMR",
- "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
- "QAR",
- "RON", "RSD", "RUB", "RWF",
- "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "STN", "SVC", "SYP", "SZL",
- "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
- "UAH", "UGX", "USD", "USN", "UYI", "UYU", "UYW", "UZS",
- "VEF", "VES", "VND", "VUV",
- "WST",
- "XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
- "YER",
- "ZAR", "ZMW", "ZWL",
-}
-
-// ISO693Entry stores ISO language codes
-type ISO693Entry struct {
- Alpha3bCode string
- Alpha2Code string
- English string
-}
-
-//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json
-var ISO693List = []ISO693Entry{
- {Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"},
- {Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"},
- {Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"},
- {Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"},
- {Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"},
- {Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"},
- {Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"},
- {Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"},
- {Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"},
- {Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"},
- {Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"},
- {Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"},
- {Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"},
- {Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"},
- {Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"},
- {Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"},
- {Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"},
- {Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"},
- {Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"},
- {Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"},
- {Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"},
- {Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"},
- {Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"},
- {Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"},
- {Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"},
- {Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"},
- {Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"},
- {Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"},
- {Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"},
- {Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"},
- {Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"},
- {Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"},
- {Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"},
- {Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"},
- {Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"},
- {Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"},
- {Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"},
- {Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"},
- {Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"},
- {Alpha3bCode: "eng", Alpha2Code: "en", English: "English"},
- {Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"},
- {Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"},
- {Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"},
- {Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"},
- {Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"},
- {Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"},
- {Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"},
- {Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"},
- {Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"},
- {Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"},
- {Alpha3bCode: "ger", Alpha2Code: "de", English: "German"},
- {Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"},
- {Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"},
- {Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"},
- {Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"},
- {Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"},
- {Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"},
- {Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"},
- {Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"},
- {Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"},
- {Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"},
- {Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"},
- {Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"},
- {Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"},
- {Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"},
- {Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"},
- {Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"},
- {Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"},
- {Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"},
- {Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"},
- {Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"},
- {Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"},
- {Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"},
- {Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"},
- {Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"},
- {Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"},
- {Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"},
- {Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"},
- {Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"},
- {Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"},
- {Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"},
- {Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"},
- {Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"},
- {Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"},
- {Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"},
- {Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"},
- {Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"},
- {Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"},
- {Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"},
- {Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"},
- {Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"},
- {Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"},
- {Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"},
- {Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"},
- {Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"},
- {Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"},
- {Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"},
- {Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"},
- {Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"},
- {Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"},
- {Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"},
- {Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"},
- {Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"},
- {Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"},
- {Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"},
- {Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"},
- {Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"},
- {Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"},
- {Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"},
- {Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"},
- {Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"},
- {Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"},
- {Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"},
- {Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"},
- {Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"},
- {Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"},
- {Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"},
- {Alpha3bCode: "nob", Alpha2Code: "nb", English: "Bokmål, Norwegian; Norwegian Bokmål"},
- {Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"},
- {Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"},
- {Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"},
- {Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"},
- {Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"},
- {Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"},
- {Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"},
- {Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"},
- {Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"},
- {Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"},
- {Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"},
- {Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"},
- {Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"},
- {Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"},
- {Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"},
- {Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"},
- {Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"},
- {Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"},
- {Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"},
- {Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"},
- {Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"},
- {Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"},
- {Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"},
- {Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"},
- {Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"},
- {Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"},
- {Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"},
- {Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"},
- {Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"},
- {Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"},
- {Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"},
- {Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"},
- {Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"},
- {Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"},
- {Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"},
- {Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"},
- {Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"},
- {Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"},
- {Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"},
- {Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"},
- {Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"},
- {Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"},
- {Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"},
- {Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"},
- {Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"},
- {Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"},
- {Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"},
- {Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"},
- {Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"},
- {Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"},
- {Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"},
- {Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"},
- {Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"},
- {Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"},
- {Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"},
- {Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"},
- {Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"},
- {Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"},
- {Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"},
- {Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"},
- {Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"},
- {Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"},
- {Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"},
- {Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"},
- {Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"},
- {Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"},
-}
diff --git a/vendor/github.com/asaskevich/govalidator/utils.go b/vendor/github.com/asaskevich/govalidator/utils.go
deleted file mode 100644
index f4c30f824a..0000000000
--- a/vendor/github.com/asaskevich/govalidator/utils.go
+++ /dev/null
@@ -1,270 +0,0 @@
-package govalidator
-
-import (
- "errors"
- "fmt"
- "html"
- "math"
- "path"
- "regexp"
- "strings"
- "unicode"
- "unicode/utf8"
-)
-
-// Contains checks if the string contains the substring.
-func Contains(str, substring string) bool {
- return strings.Contains(str, substring)
-}
-
-// Matches checks if string matches the pattern (pattern is regular expression)
-// In case of error return false
-func Matches(str, pattern string) bool {
- match, _ := regexp.MatchString(pattern, str)
- return match
-}
-
-// LeftTrim trims characters from the left side of the input.
-// If second argument is empty, it will remove leading spaces.
-func LeftTrim(str, chars string) string {
- if chars == "" {
- return strings.TrimLeftFunc(str, unicode.IsSpace)
- }
- r, _ := regexp.Compile("^[" + chars + "]+")
- return r.ReplaceAllString(str, "")
-}
-
-// RightTrim trims characters from the right side of the input.
-// If second argument is empty, it will remove trailing spaces.
-func RightTrim(str, chars string) string {
- if chars == "" {
- return strings.TrimRightFunc(str, unicode.IsSpace)
- }
- r, _ := regexp.Compile("[" + chars + "]+$")
- return r.ReplaceAllString(str, "")
-}
-
-// Trim trims characters from both sides of the input.
-// If second argument is empty, it will remove spaces.
-func Trim(str, chars string) string {
- return LeftTrim(RightTrim(str, chars), chars)
-}
-
-// WhiteList removes characters that do not appear in the whitelist.
-func WhiteList(str, chars string) string {
- pattern := "[^" + chars + "]+"
- r, _ := regexp.Compile(pattern)
- return r.ReplaceAllString(str, "")
-}
-
-// BlackList removes characters that appear in the blacklist.
-func BlackList(str, chars string) string {
- pattern := "[" + chars + "]+"
- r, _ := regexp.Compile(pattern)
- return r.ReplaceAllString(str, "")
-}
-
-// StripLow removes characters with a numerical value < 32 and 127, mostly control characters.
-// If keep_new_lines is true, newline characters are preserved (\n and \r, hex 0xA and 0xD).
-func StripLow(str string, keepNewLines bool) string {
- chars := ""
- if keepNewLines {
- chars = "\x00-\x09\x0B\x0C\x0E-\x1F\x7F"
- } else {
- chars = "\x00-\x1F\x7F"
- }
- return BlackList(str, chars)
-}
-
-// ReplacePattern replaces regular expression pattern in string
-func ReplacePattern(str, pattern, replace string) string {
- r, _ := regexp.Compile(pattern)
- return r.ReplaceAllString(str, replace)
-}
-
-// Escape replaces <, >, & and " with HTML entities.
-var Escape = html.EscapeString
-
-func addSegment(inrune, segment []rune) []rune {
- if len(segment) == 0 {
- return inrune
- }
- if len(inrune) != 0 {
- inrune = append(inrune, '_')
- }
- inrune = append(inrune, segment...)
- return inrune
-}
-
-// UnderscoreToCamelCase converts from underscore separated form to camel case form.
-// Ex.: my_func => MyFunc
-func UnderscoreToCamelCase(s string) string {
- return strings.Replace(strings.Title(strings.Replace(strings.ToLower(s), "_", " ", -1)), " ", "", -1)
-}
-
-// CamelCaseToUnderscore converts from camel case form to underscore separated form.
-// Ex.: MyFunc => my_func
-func CamelCaseToUnderscore(str string) string {
- var output []rune
- var segment []rune
- for _, r := range str {
-
- // not treat number as separate segment
- if !unicode.IsLower(r) && string(r) != "_" && !unicode.IsNumber(r) {
- output = addSegment(output, segment)
- segment = nil
- }
- segment = append(segment, unicode.ToLower(r))
- }
- output = addSegment(output, segment)
- return string(output)
-}
-
-// Reverse returns reversed string
-func Reverse(s string) string {
- r := []rune(s)
- for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 {
- r[i], r[j] = r[j], r[i]
- }
- return string(r)
-}
-
-// GetLines splits string by "\n" and return array of lines
-func GetLines(s string) []string {
- return strings.Split(s, "\n")
-}
-
-// GetLine returns specified line of multiline string
-func GetLine(s string, index int) (string, error) {
- lines := GetLines(s)
- if index < 0 || index >= len(lines) {
- return "", errors.New("line index out of bounds")
- }
- return lines[index], nil
-}
-
-// RemoveTags removes all tags from HTML string
-func RemoveTags(s string) string {
- return ReplacePattern(s, "<[^>]*>", "")
-}
-
-// SafeFileName returns safe string that can be used in file names
-func SafeFileName(str string) string {
- name := strings.ToLower(str)
- name = path.Clean(path.Base(name))
- name = strings.Trim(name, " ")
- separators, err := regexp.Compile(`[ &_=+:]`)
- if err == nil {
- name = separators.ReplaceAllString(name, "-")
- }
- legal, err := regexp.Compile(`[^[:alnum:]-.]`)
- if err == nil {
- name = legal.ReplaceAllString(name, "")
- }
- for strings.Contains(name, "--") {
- name = strings.Replace(name, "--", "-", -1)
- }
- return name
-}
-
-// NormalizeEmail canonicalize an email address.
-// The local part of the email address is lowercased for all domains; the hostname is always lowercased and
-// the local part of the email address is always lowercased for hosts that are known to be case-insensitive (currently only GMail).
-// Normalization follows special rules for known providers: currently, GMail addresses have dots removed in the local part and
-// are stripped of tags (e.g. some.one+tag@gmail.com becomes someone@gmail.com) and all @googlemail.com addresses are
-// normalized to @gmail.com.
-func NormalizeEmail(str string) (string, error) {
- if !IsEmail(str) {
- return "", fmt.Errorf("%s is not an email", str)
- }
- parts := strings.Split(str, "@")
- parts[0] = strings.ToLower(parts[0])
- parts[1] = strings.ToLower(parts[1])
- if parts[1] == "gmail.com" || parts[1] == "googlemail.com" {
- parts[1] = "gmail.com"
- parts[0] = strings.Split(ReplacePattern(parts[0], `\.`, ""), "+")[0]
- }
- return strings.Join(parts, "@"), nil
-}
-
-// Truncate a string to the closest length without breaking words.
-func Truncate(str string, length int, ending string) string {
- var aftstr, befstr string
- if len(str) > length {
- words := strings.Fields(str)
- before, present := 0, 0
- for i := range words {
- befstr = aftstr
- before = present
- aftstr = aftstr + words[i] + " "
- present = len(aftstr)
- if present > length && i != 0 {
- if (length - before) < (present - length) {
- return Trim(befstr, " /\\.,\"'#!?&@+-") + ending
- }
- return Trim(aftstr, " /\\.,\"'#!?&@+-") + ending
- }
- }
- }
-
- return str
-}
-
-// PadLeft pads left side of a string if size of string is less then indicated pad length
-func PadLeft(str string, padStr string, padLen int) string {
- return buildPadStr(str, padStr, padLen, true, false)
-}
-
-// PadRight pads right side of a string if size of string is less then indicated pad length
-func PadRight(str string, padStr string, padLen int) string {
- return buildPadStr(str, padStr, padLen, false, true)
-}
-
-// PadBoth pads both sides of a string if size of string is less then indicated pad length
-func PadBoth(str string, padStr string, padLen int) string {
- return buildPadStr(str, padStr, padLen, true, true)
-}
-
-// PadString either left, right or both sides.
-// Note that padding string can be unicode and more then one character
-func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
-
- // When padded length is less then the current string size
- if padLen < utf8.RuneCountInString(str) {
- return str
- }
-
- padLen -= utf8.RuneCountInString(str)
-
- targetLen := padLen
-
- targetLenLeft := targetLen
- targetLenRight := targetLen
- if padLeft && padRight {
- targetLenLeft = padLen / 2
- targetLenRight = padLen - targetLenLeft
- }
-
- strToRepeatLen := utf8.RuneCountInString(padStr)
-
- repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
- repeatedString := strings.Repeat(padStr, repeatTimes)
-
- leftSide := ""
- if padLeft {
- leftSide = repeatedString[0:targetLenLeft]
- }
-
- rightSide := ""
- if padRight {
- rightSide = repeatedString[0:targetLenRight]
- }
-
- return leftSide + str + rightSide
-}
-
-// TruncatingErrorf removes extra args from fmt.Errorf if not formatted in the str object
-func TruncatingErrorf(str string, args ...interface{}) error {
- n := strings.Count(str, "%s")
- return fmt.Errorf(str, args[:n]...)
-}
diff --git a/vendor/github.com/asaskevich/govalidator/validator.go b/vendor/github.com/asaskevich/govalidator/validator.go
deleted file mode 100644
index c9c4fac065..0000000000
--- a/vendor/github.com/asaskevich/govalidator/validator.go
+++ /dev/null
@@ -1,1768 +0,0 @@
-// Package govalidator is package of validators and sanitizers for strings, structs and collections.
-package govalidator
-
-import (
- "bytes"
- "crypto/rsa"
- "crypto/x509"
- "encoding/base64"
- "encoding/json"
- "encoding/pem"
- "fmt"
- "io/ioutil"
- "net"
- "net/url"
- "reflect"
- "regexp"
- "sort"
- "strconv"
- "strings"
- "time"
- "unicode"
- "unicode/utf8"
-)
-
-var (
- fieldsRequiredByDefault bool
- nilPtrAllowedByRequired = false
- notNumberRegexp = regexp.MustCompile("[^0-9]+")
- whiteSpacesAndMinus = regexp.MustCompile(`[\s-]+`)
- paramsRegexp = regexp.MustCompile(`\(.*\)$`)
-)
-
-const maxURLRuneCount = 2083
-const minURLRuneCount = 3
-const rfc3339WithoutZone = "2006-01-02T15:04:05"
-
-// SetFieldsRequiredByDefault causes validation to fail when struct fields
-// do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`).
-// This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
-// type exampleStruct struct {
-// Name string ``
-// Email string `valid:"email"`
-// This, however, will only fail when Email is empty or an invalid email address:
-// type exampleStruct2 struct {
-// Name string `valid:"-"`
-// Email string `valid:"email"`
-// Lastly, this will only fail when Email is an invalid email address but not when it's empty:
-// type exampleStruct2 struct {
-// Name string `valid:"-"`
-// Email string `valid:"email,optional"`
-func SetFieldsRequiredByDefault(value bool) {
- fieldsRequiredByDefault = value
-}
-
-// SetNilPtrAllowedByRequired causes validation to pass for nil ptrs when a field is set to required.
-// The validation will still reject ptr fields in their zero value state. Example with this enabled:
-// type exampleStruct struct {
-// Name *string `valid:"required"`
-// With `Name` set to "", this will be considered invalid input and will cause a validation error.
-// With `Name` set to nil, this will be considered valid by validation.
-// By default this is disabled.
-func SetNilPtrAllowedByRequired(value bool) {
- nilPtrAllowedByRequired = value
-}
-
-// IsEmail checks if the string is an email.
-func IsEmail(str string) bool {
- // TODO uppercase letters are not supported
- return rxEmail.MatchString(str)
-}
-
-// IsExistingEmail checks if the string is an email of existing domain
-func IsExistingEmail(email string) bool {
-
- if len(email) < 6 || len(email) > 254 {
- return false
- }
- at := strings.LastIndex(email, "@")
- if at <= 0 || at > len(email)-3 {
- return false
- }
- user := email[:at]
- host := email[at+1:]
- if len(user) > 64 {
- return false
- }
- switch host {
- case "localhost", "example.com":
- return true
- }
- if userDotRegexp.MatchString(user) || !userRegexp.MatchString(user) || !hostRegexp.MatchString(host) {
- return false
- }
- if _, err := net.LookupMX(host); err != nil {
- if _, err := net.LookupIP(host); err != nil {
- return false
- }
- }
-
- return true
-}
-
-// IsURL checks if the string is an URL.
-func IsURL(str string) bool {
- if str == "" || utf8.RuneCountInString(str) >= maxURLRuneCount || len(str) <= minURLRuneCount || strings.HasPrefix(str, ".") {
- return false
- }
- strTemp := str
- if strings.Contains(str, ":") && !strings.Contains(str, "://") {
- // support no indicated urlscheme but with colon for port number
- // http:// is appended so url.Parse will succeed, strTemp used so it does not impact rxURL.MatchString
- strTemp = "http://" + str
- }
- u, err := url.Parse(strTemp)
- if err != nil {
- return false
- }
- if strings.HasPrefix(u.Host, ".") {
- return false
- }
- if u.Host == "" && (u.Path != "" && !strings.Contains(u.Path, ".")) {
- return false
- }
- return rxURL.MatchString(str)
-}
-
-// IsRequestURL checks if the string rawurl, assuming
-// it was received in an HTTP request, is a valid
-// URL confirm to RFC 3986
-func IsRequestURL(rawurl string) bool {
- url, err := url.ParseRequestURI(rawurl)
- if err != nil {
- return false //Couldn't even parse the rawurl
- }
- if len(url.Scheme) == 0 {
- return false //No Scheme found
- }
- return true
-}
-
-// IsRequestURI checks if the string rawurl, assuming
-// it was received in an HTTP request, is an
-// absolute URI or an absolute path.
-func IsRequestURI(rawurl string) bool {
- _, err := url.ParseRequestURI(rawurl)
- return err == nil
-}
-
-// IsAlpha checks if the string contains only letters (a-zA-Z). Empty string is valid.
-func IsAlpha(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxAlpha.MatchString(str)
-}
-
-//IsUTFLetter checks if the string contains only unicode letter characters.
-//Similar to IsAlpha but for all languages. Empty string is valid.
-func IsUTFLetter(str string) bool {
- if IsNull(str) {
- return true
- }
-
- for _, c := range str {
- if !unicode.IsLetter(c) {
- return false
- }
- }
- return true
-
-}
-
-// IsAlphanumeric checks if the string contains only letters and numbers. Empty string is valid.
-func IsAlphanumeric(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxAlphanumeric.MatchString(str)
-}
-
-// IsUTFLetterNumeric checks if the string contains only unicode letters and numbers. Empty string is valid.
-func IsUTFLetterNumeric(str string) bool {
- if IsNull(str) {
- return true
- }
- for _, c := range str {
- if !unicode.IsLetter(c) && !unicode.IsNumber(c) { //letters && numbers are ok
- return false
- }
- }
- return true
-
-}
-
-// IsNumeric checks if the string contains only numbers. Empty string is valid.
-func IsNumeric(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxNumeric.MatchString(str)
-}
-
-// IsUTFNumeric checks if the string contains only unicode numbers of any kind.
-// Numbers can be 0-9 but also Fractions ¾,Roman Ⅸ and Hangzhou 〩. Empty string is valid.
-func IsUTFNumeric(str string) bool {
- if IsNull(str) {
- return true
- }
- if strings.IndexAny(str, "+-") > 0 {
- return false
- }
- if len(str) > 1 {
- str = strings.TrimPrefix(str, "-")
- str = strings.TrimPrefix(str, "+")
- }
- for _, c := range str {
- if !unicode.IsNumber(c) { //numbers && minus sign are ok
- return false
- }
- }
- return true
-
-}
-
-// IsUTFDigit checks if the string contains only unicode radix-10 decimal digits. Empty string is valid.
-func IsUTFDigit(str string) bool {
- if IsNull(str) {
- return true
- }
- if strings.IndexAny(str, "+-") > 0 {
- return false
- }
- if len(str) > 1 {
- str = strings.TrimPrefix(str, "-")
- str = strings.TrimPrefix(str, "+")
- }
- for _, c := range str {
- if !unicode.IsDigit(c) { //digits && minus sign are ok
- return false
- }
- }
- return true
-
-}
-
-// IsHexadecimal checks if the string is a hexadecimal number.
-func IsHexadecimal(str string) bool {
- return rxHexadecimal.MatchString(str)
-}
-
-// IsHexcolor checks if the string is a hexadecimal color.
-func IsHexcolor(str string) bool {
- return rxHexcolor.MatchString(str)
-}
-
-// IsRGBcolor checks if the string is a valid RGB color in form rgb(RRR, GGG, BBB).
-func IsRGBcolor(str string) bool {
- return rxRGBcolor.MatchString(str)
-}
-
-// IsLowerCase checks if the string is lowercase. Empty string is valid.
-func IsLowerCase(str string) bool {
- if IsNull(str) {
- return true
- }
- return str == strings.ToLower(str)
-}
-
-// IsUpperCase checks if the string is uppercase. Empty string is valid.
-func IsUpperCase(str string) bool {
- if IsNull(str) {
- return true
- }
- return str == strings.ToUpper(str)
-}
-
-// HasLowerCase checks if the string contains at least 1 lowercase. Empty string is valid.
-func HasLowerCase(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxHasLowerCase.MatchString(str)
-}
-
-// HasUpperCase checks if the string contains as least 1 uppercase. Empty string is valid.
-func HasUpperCase(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxHasUpperCase.MatchString(str)
-}
-
-// IsInt checks if the string is an integer. Empty string is valid.
-func IsInt(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxInt.MatchString(str)
-}
-
-// IsFloat checks if the string is a float.
-func IsFloat(str string) bool {
- return str != "" && rxFloat.MatchString(str)
-}
-
-// IsDivisibleBy checks if the string is a number that's divisible by another.
-// If second argument is not valid integer or zero, it's return false.
-// Otherwise, if first argument is not valid integer or zero, it's return true (Invalid string converts to zero).
-func IsDivisibleBy(str, num string) bool {
- f, _ := ToFloat(str)
- p := int64(f)
- q, _ := ToInt(num)
- if q == 0 {
- return false
- }
- return (p == 0) || (p%q == 0)
-}
-
-// IsNull checks if the string is null.
-func IsNull(str string) bool {
- return len(str) == 0
-}
-
-// IsNotNull checks if the string is not null.
-func IsNotNull(str string) bool {
- return !IsNull(str)
-}
-
-// HasWhitespaceOnly checks the string only contains whitespace
-func HasWhitespaceOnly(str string) bool {
- return len(str) > 0 && rxHasWhitespaceOnly.MatchString(str)
-}
-
-// HasWhitespace checks if the string contains any whitespace
-func HasWhitespace(str string) bool {
- return len(str) > 0 && rxHasWhitespace.MatchString(str)
-}
-
-// IsByteLength checks if the string's length (in bytes) falls in a range.
-func IsByteLength(str string, min, max int) bool {
- return len(str) >= min && len(str) <= max
-}
-
-// IsUUIDv3 checks if the string is a UUID version 3.
-func IsUUIDv3(str string) bool {
- return rxUUID3.MatchString(str)
-}
-
-// IsUUIDv4 checks if the string is a UUID version 4.
-func IsUUIDv4(str string) bool {
- return rxUUID4.MatchString(str)
-}
-
-// IsUUIDv5 checks if the string is a UUID version 5.
-func IsUUIDv5(str string) bool {
- return rxUUID5.MatchString(str)
-}
-
-// IsUUID checks if the string is a UUID (version 3, 4 or 5).
-func IsUUID(str string) bool {
- return rxUUID.MatchString(str)
-}
-
-// Byte to index table for O(1) lookups when unmarshaling.
-// We use 0xFF as sentinel value for invalid indexes.
-var ulidDec = [...]byte{
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x01,
- 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
- 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14, 0x15, 0xFF,
- 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C, 0x1D, 0x1E,
- 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C,
- 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14,
- 0x15, 0xFF, 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C,
- 0x1D, 0x1E, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
-}
-
-// EncodedSize is the length of a text encoded ULID.
-const ulidEncodedSize = 26
-
-// IsULID checks if the string is a ULID.
-//
-// Implementation got from:
-// https://github.com/oklog/ulid (Apache-2.0 License)
-//
-func IsULID(str string) bool {
- // Check if a base32 encoded ULID is the right length.
- if len(str) != ulidEncodedSize {
- return false
- }
-
- // Check if all the characters in a base32 encoded ULID are part of the
- // expected base32 character set.
- if ulidDec[str[0]] == 0xFF ||
- ulidDec[str[1]] == 0xFF ||
- ulidDec[str[2]] == 0xFF ||
- ulidDec[str[3]] == 0xFF ||
- ulidDec[str[4]] == 0xFF ||
- ulidDec[str[5]] == 0xFF ||
- ulidDec[str[6]] == 0xFF ||
- ulidDec[str[7]] == 0xFF ||
- ulidDec[str[8]] == 0xFF ||
- ulidDec[str[9]] == 0xFF ||
- ulidDec[str[10]] == 0xFF ||
- ulidDec[str[11]] == 0xFF ||
- ulidDec[str[12]] == 0xFF ||
- ulidDec[str[13]] == 0xFF ||
- ulidDec[str[14]] == 0xFF ||
- ulidDec[str[15]] == 0xFF ||
- ulidDec[str[16]] == 0xFF ||
- ulidDec[str[17]] == 0xFF ||
- ulidDec[str[18]] == 0xFF ||
- ulidDec[str[19]] == 0xFF ||
- ulidDec[str[20]] == 0xFF ||
- ulidDec[str[21]] == 0xFF ||
- ulidDec[str[22]] == 0xFF ||
- ulidDec[str[23]] == 0xFF ||
- ulidDec[str[24]] == 0xFF ||
- ulidDec[str[25]] == 0xFF {
- return false
- }
-
- // Check if the first character in a base32 encoded ULID will overflow. This
- // happens because the base32 representation encodes 130 bits, while the
- // ULID is only 128 bits.
- //
- // See https://github.com/oklog/ulid/issues/9 for details.
- if str[0] > '7' {
- return false
- }
- return true
-}
-
-// IsCreditCard checks if the string is a credit card.
-func IsCreditCard(str string) bool {
- sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "")
- if !rxCreditCard.MatchString(sanitized) {
- return false
- }
-
- number, _ := ToInt(sanitized)
- number, lastDigit := number / 10, number % 10
-
- var sum int64
- for i:=0; number > 0; i++ {
- digit := number % 10
-
- if i % 2 == 0 {
- digit *= 2
- if digit > 9 {
- digit -= 9
- }
- }
-
- sum += digit
- number = number / 10
- }
-
- return (sum + lastDigit) % 10 == 0
-}
-
-// IsISBN10 checks if the string is an ISBN version 10.
-func IsISBN10(str string) bool {
- return IsISBN(str, 10)
-}
-
-// IsISBN13 checks if the string is an ISBN version 13.
-func IsISBN13(str string) bool {
- return IsISBN(str, 13)
-}
-
-// IsISBN checks if the string is an ISBN (version 10 or 13).
-// If version value is not equal to 10 or 13, it will be checks both variants.
-func IsISBN(str string, version int) bool {
- sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "")
- var checksum int32
- var i int32
- if version == 10 {
- if !rxISBN10.MatchString(sanitized) {
- return false
- }
- for i = 0; i < 9; i++ {
- checksum += (i + 1) * int32(sanitized[i]-'0')
- }
- if sanitized[9] == 'X' {
- checksum += 10 * 10
- } else {
- checksum += 10 * int32(sanitized[9]-'0')
- }
- if checksum%11 == 0 {
- return true
- }
- return false
- } else if version == 13 {
- if !rxISBN13.MatchString(sanitized) {
- return false
- }
- factor := []int32{1, 3}
- for i = 0; i < 12; i++ {
- checksum += factor[i%2] * int32(sanitized[i]-'0')
- }
- return (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0
- }
- return IsISBN(str, 10) || IsISBN(str, 13)
-}
-
-// IsJSON checks if the string is valid JSON (note: uses json.Unmarshal).
-func IsJSON(str string) bool {
- var js json.RawMessage
- return json.Unmarshal([]byte(str), &js) == nil
-}
-
-// IsMultibyte checks if the string contains one or more multibyte chars. Empty string is valid.
-func IsMultibyte(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxMultibyte.MatchString(str)
-}
-
-// IsASCII checks if the string contains ASCII chars only. Empty string is valid.
-func IsASCII(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxASCII.MatchString(str)
-}
-
-// IsPrintableASCII checks if the string contains printable ASCII chars only. Empty string is valid.
-func IsPrintableASCII(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxPrintableASCII.MatchString(str)
-}
-
-// IsFullWidth checks if the string contains any full-width chars. Empty string is valid.
-func IsFullWidth(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxFullWidth.MatchString(str)
-}
-
-// IsHalfWidth checks if the string contains any half-width chars. Empty string is valid.
-func IsHalfWidth(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxHalfWidth.MatchString(str)
-}
-
-// IsVariableWidth checks if the string contains a mixture of full and half-width chars. Empty string is valid.
-func IsVariableWidth(str string) bool {
- if IsNull(str) {
- return true
- }
- return rxHalfWidth.MatchString(str) && rxFullWidth.MatchString(str)
-}
-
-// IsBase64 checks if a string is base64 encoded.
-func IsBase64(str string) bool {
- return rxBase64.MatchString(str)
-}
-
-// IsFilePath checks is a string is Win or Unix file path and returns it's type.
-func IsFilePath(str string) (bool, int) {
- if rxWinPath.MatchString(str) {
- //check windows path limit see:
- // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath
- if len(str[3:]) > 32767 {
- return false, Win
- }
- return true, Win
- } else if rxUnixPath.MatchString(str) {
- return true, Unix
- }
- return false, Unknown
-}
-
-//IsWinFilePath checks both relative & absolute paths in Windows
-func IsWinFilePath(str string) bool {
- if rxARWinPath.MatchString(str) {
- //check windows path limit see:
- // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath
- if len(str[3:]) > 32767 {
- return false
- }
- return true
- }
- return false
-}
-
-//IsUnixFilePath checks both relative & absolute paths in Unix
-func IsUnixFilePath(str string) bool {
- if rxARUnixPath.MatchString(str) {
- return true
- }
- return false
-}
-
-// IsDataURI checks if a string is base64 encoded data URI such as an image
-func IsDataURI(str string) bool {
- dataURI := strings.Split(str, ",")
- if !rxDataURI.MatchString(dataURI[0]) {
- return false
- }
- return IsBase64(dataURI[1])
-}
-
-// IsMagnetURI checks if a string is valid magnet URI
-func IsMagnetURI(str string) bool {
- return rxMagnetURI.MatchString(str)
-}
-
-// IsISO3166Alpha2 checks if a string is valid two-letter country code
-func IsISO3166Alpha2(str string) bool {
- for _, entry := range ISO3166List {
- if str == entry.Alpha2Code {
- return true
- }
- }
- return false
-}
-
-// IsISO3166Alpha3 checks if a string is valid three-letter country code
-func IsISO3166Alpha3(str string) bool {
- for _, entry := range ISO3166List {
- if str == entry.Alpha3Code {
- return true
- }
- }
- return false
-}
-
-// IsISO693Alpha2 checks if a string is valid two-letter language code
-func IsISO693Alpha2(str string) bool {
- for _, entry := range ISO693List {
- if str == entry.Alpha2Code {
- return true
- }
- }
- return false
-}
-
-// IsISO693Alpha3b checks if a string is valid three-letter language code
-func IsISO693Alpha3b(str string) bool {
- for _, entry := range ISO693List {
- if str == entry.Alpha3bCode {
- return true
- }
- }
- return false
-}
-
-// IsDNSName will validate the given string as a DNS name
-func IsDNSName(str string) bool {
- if str == "" || len(strings.Replace(str, ".", "", -1)) > 255 {
- // constraints already violated
- return false
- }
- return !IsIP(str) && rxDNSName.MatchString(str)
-}
-
-// IsHash checks if a string is a hash of type algorithm.
-// Algorithm is one of ['md4', 'md5', 'sha1', 'sha256', 'sha384', 'sha512', 'ripemd128', 'ripemd160', 'tiger128', 'tiger160', 'tiger192', 'crc32', 'crc32b']
-func IsHash(str string, algorithm string) bool {
- var len string
- algo := strings.ToLower(algorithm)
-
- if algo == "crc32" || algo == "crc32b" {
- len = "8"
- } else if algo == "md5" || algo == "md4" || algo == "ripemd128" || algo == "tiger128" {
- len = "32"
- } else if algo == "sha1" || algo == "ripemd160" || algo == "tiger160" {
- len = "40"
- } else if algo == "tiger192" {
- len = "48"
- } else if algo == "sha3-224" {
- len = "56"
- } else if algo == "sha256" || algo == "sha3-256" {
- len = "64"
- } else if algo == "sha384" || algo == "sha3-384" {
- len = "96"
- } else if algo == "sha512" || algo == "sha3-512" {
- len = "128"
- } else {
- return false
- }
-
- return Matches(str, "^[a-f0-9]{"+len+"}$")
-}
-
-// IsSHA3224 checks is a string is a SHA3-224 hash. Alias for `IsHash(str, "sha3-224")`
-func IsSHA3224(str string) bool {
- return IsHash(str, "sha3-224")
-}
-
-// IsSHA3256 checks is a string is a SHA3-256 hash. Alias for `IsHash(str, "sha3-256")`
-func IsSHA3256(str string) bool {
- return IsHash(str, "sha3-256")
-}
-
-// IsSHA3384 checks is a string is a SHA3-384 hash. Alias for `IsHash(str, "sha3-384")`
-func IsSHA3384(str string) bool {
- return IsHash(str, "sha3-384")
-}
-
-// IsSHA3512 checks is a string is a SHA3-512 hash. Alias for `IsHash(str, "sha3-512")`
-func IsSHA3512(str string) bool {
- return IsHash(str, "sha3-512")
-}
-
-// IsSHA512 checks is a string is a SHA512 hash. Alias for `IsHash(str, "sha512")`
-func IsSHA512(str string) bool {
- return IsHash(str, "sha512")
-}
-
-// IsSHA384 checks is a string is a SHA384 hash. Alias for `IsHash(str, "sha384")`
-func IsSHA384(str string) bool {
- return IsHash(str, "sha384")
-}
-
-// IsSHA256 checks is a string is a SHA256 hash. Alias for `IsHash(str, "sha256")`
-func IsSHA256(str string) bool {
- return IsHash(str, "sha256")
-}
-
-// IsTiger192 checks is a string is a Tiger192 hash. Alias for `IsHash(str, "tiger192")`
-func IsTiger192(str string) bool {
- return IsHash(str, "tiger192")
-}
-
-// IsTiger160 checks is a string is a Tiger160 hash. Alias for `IsHash(str, "tiger160")`
-func IsTiger160(str string) bool {
- return IsHash(str, "tiger160")
-}
-
-// IsRipeMD160 checks is a string is a RipeMD160 hash. Alias for `IsHash(str, "ripemd160")`
-func IsRipeMD160(str string) bool {
- return IsHash(str, "ripemd160")
-}
-
-// IsSHA1 checks is a string is a SHA-1 hash. Alias for `IsHash(str, "sha1")`
-func IsSHA1(str string) bool {
- return IsHash(str, "sha1")
-}
-
-// IsTiger128 checks is a string is a Tiger128 hash. Alias for `IsHash(str, "tiger128")`
-func IsTiger128(str string) bool {
- return IsHash(str, "tiger128")
-}
-
-// IsRipeMD128 checks is a string is a RipeMD128 hash. Alias for `IsHash(str, "ripemd128")`
-func IsRipeMD128(str string) bool {
- return IsHash(str, "ripemd128")
-}
-
-// IsCRC32 checks is a string is a CRC32 hash. Alias for `IsHash(str, "crc32")`
-func IsCRC32(str string) bool {
- return IsHash(str, "crc32")
-}
-
-// IsCRC32b checks is a string is a CRC32b hash. Alias for `IsHash(str, "crc32b")`
-func IsCRC32b(str string) bool {
- return IsHash(str, "crc32b")
-}
-
-// IsMD5 checks is a string is a MD5 hash. Alias for `IsHash(str, "md5")`
-func IsMD5(str string) bool {
- return IsHash(str, "md5")
-}
-
-// IsMD4 checks is a string is a MD4 hash. Alias for `IsHash(str, "md4")`
-func IsMD4(str string) bool {
- return IsHash(str, "md4")
-}
-
-// IsDialString validates the given string for usage with the various Dial() functions
-func IsDialString(str string) bool {
- if h, p, err := net.SplitHostPort(str); err == nil && h != "" && p != "" && (IsDNSName(h) || IsIP(h)) && IsPort(p) {
- return true
- }
-
- return false
-}
-
-// IsIP checks if a string is either IP version 4 or 6. Alias for `net.ParseIP`
-func IsIP(str string) bool {
- return net.ParseIP(str) != nil
-}
-
-// IsPort checks if a string represents a valid port
-func IsPort(str string) bool {
- if i, err := strconv.Atoi(str); err == nil && i > 0 && i < 65536 {
- return true
- }
- return false
-}
-
-// IsIPv4 checks if the string is an IP version 4.
-func IsIPv4(str string) bool {
- ip := net.ParseIP(str)
- return ip != nil && strings.Contains(str, ".")
-}
-
-// IsIPv6 checks if the string is an IP version 6.
-func IsIPv6(str string) bool {
- ip := net.ParseIP(str)
- return ip != nil && strings.Contains(str, ":")
-}
-
-// IsCIDR checks if the string is an valid CIDR notiation (IPV4 & IPV6)
-func IsCIDR(str string) bool {
- _, _, err := net.ParseCIDR(str)
- return err == nil
-}
-
-// IsMAC checks if a string is valid MAC address.
-// Possible MAC formats:
-// 01:23:45:67:89:ab
-// 01:23:45:67:89:ab:cd:ef
-// 01-23-45-67-89-ab
-// 01-23-45-67-89-ab-cd-ef
-// 0123.4567.89ab
-// 0123.4567.89ab.cdef
-func IsMAC(str string) bool {
- _, err := net.ParseMAC(str)
- return err == nil
-}
-
-// IsHost checks if the string is a valid IP (both v4 and v6) or a valid DNS name
-func IsHost(str string) bool {
- return IsIP(str) || IsDNSName(str)
-}
-
-// IsMongoID checks if the string is a valid hex-encoded representation of a MongoDB ObjectId.
-func IsMongoID(str string) bool {
- return rxHexadecimal.MatchString(str) && (len(str) == 24)
-}
-
-// IsLatitude checks if a string is valid latitude.
-func IsLatitude(str string) bool {
- return rxLatitude.MatchString(str)
-}
-
-// IsLongitude checks if a string is valid longitude.
-func IsLongitude(str string) bool {
- return rxLongitude.MatchString(str)
-}
-
-// IsIMEI checks if a string is valid IMEI
-func IsIMEI(str string) bool {
- return rxIMEI.MatchString(str)
-}
-
-// IsIMSI checks if a string is valid IMSI
-func IsIMSI(str string) bool {
- if !rxIMSI.MatchString(str) {
- return false
- }
-
- mcc, err := strconv.ParseInt(str[0:3], 10, 32)
- if err != nil {
- return false
- }
-
- switch mcc {
- case 202, 204, 206, 208, 212, 213, 214, 216, 218, 219:
- case 220, 221, 222, 226, 228, 230, 231, 232, 234, 235:
- case 238, 240, 242, 244, 246, 247, 248, 250, 255, 257:
- case 259, 260, 262, 266, 268, 270, 272, 274, 276, 278:
- case 280, 282, 283, 284, 286, 288, 289, 290, 292, 293:
- case 294, 295, 297, 302, 308, 310, 311, 312, 313, 314:
- case 315, 316, 330, 332, 334, 338, 340, 342, 344, 346:
- case 348, 350, 352, 354, 356, 358, 360, 362, 363, 364:
- case 365, 366, 368, 370, 372, 374, 376, 400, 401, 402:
- case 404, 405, 406, 410, 412, 413, 414, 415, 416, 417:
- case 418, 419, 420, 421, 422, 424, 425, 426, 427, 428:
- case 429, 430, 431, 432, 434, 436, 437, 438, 440, 441:
- case 450, 452, 454, 455, 456, 457, 460, 461, 466, 467:
- case 470, 472, 502, 505, 510, 514, 515, 520, 525, 528:
- case 530, 536, 537, 539, 540, 541, 542, 543, 544, 545:
- case 546, 547, 548, 549, 550, 551, 552, 553, 554, 555:
- case 602, 603, 604, 605, 606, 607, 608, 609, 610, 611:
- case 612, 613, 614, 615, 616, 617, 618, 619, 620, 621:
- case 622, 623, 624, 625, 626, 627, 628, 629, 630, 631:
- case 632, 633, 634, 635, 636, 637, 638, 639, 640, 641:
- case 642, 643, 645, 646, 647, 648, 649, 650, 651, 652:
- case 653, 654, 655, 657, 658, 659, 702, 704, 706, 708:
- case 710, 712, 714, 716, 722, 724, 730, 732, 734, 736:
- case 738, 740, 742, 744, 746, 748, 750, 995:
- return true
- default:
- return false
- }
- return true
-}
-
-// IsRsaPublicKey checks if a string is valid public key with provided length
-func IsRsaPublicKey(str string, keylen int) bool {
- bb := bytes.NewBufferString(str)
- pemBytes, err := ioutil.ReadAll(bb)
- if err != nil {
- return false
- }
- block, _ := pem.Decode(pemBytes)
- if block != nil && block.Type != "PUBLIC KEY" {
- return false
- }
- var der []byte
-
- if block != nil {
- der = block.Bytes
- } else {
- der, err = base64.StdEncoding.DecodeString(str)
- if err != nil {
- return false
- }
- }
-
- key, err := x509.ParsePKIXPublicKey(der)
- if err != nil {
- return false
- }
- pubkey, ok := key.(*rsa.PublicKey)
- if !ok {
- return false
- }
- bitlen := len(pubkey.N.Bytes()) * 8
- return bitlen == int(keylen)
-}
-
-// IsRegex checks if a give string is a valid regex with RE2 syntax or not
-func IsRegex(str string) bool {
- if _, err := regexp.Compile(str); err == nil {
- return true
- }
- return false
-}
-
-func toJSONName(tag string) string {
- if tag == "" {
- return ""
- }
-
- // JSON name always comes first. If there's no options then split[0] is
- // JSON name, if JSON name is not set, then split[0] is an empty string.
- split := strings.SplitN(tag, ",", 2)
-
- name := split[0]
-
- // However it is possible that the field is skipped when
- // (de-)serializing from/to JSON, in which case assume that there is no
- // tag name to use
- if name == "-" {
- return ""
- }
- return name
-}
-
-func prependPathToErrors(err error, path string) error {
- switch err2 := err.(type) {
- case Error:
- err2.Path = append([]string{path}, err2.Path...)
- return err2
- case Errors:
- errors := err2.Errors()
- for i, err3 := range errors {
- errors[i] = prependPathToErrors(err3, path)
- }
- return err2
- }
- return err
-}
-
-// ValidateArray performs validation according to condition iterator that validates every element of the array
-func ValidateArray(array []interface{}, iterator ConditionIterator) bool {
- return Every(array, iterator)
-}
-
-// ValidateMap use validation map for fields.
-// result will be equal to `false` if there are any errors.
-// s is the map containing the data to be validated.
-// m is the validation map in the form:
-// map[string]interface{}{"name":"required,alpha","address":map[string]interface{}{"line1":"required,alphanum"}}
-func ValidateMap(s map[string]interface{}, m map[string]interface{}) (bool, error) {
- if s == nil {
- return true, nil
- }
- result := true
- var err error
- var errs Errors
- var index int
- val := reflect.ValueOf(s)
- for key, value := range s {
- presentResult := true
- validator, ok := m[key]
- if !ok {
- presentResult = false
- var err error
- err = fmt.Errorf("all map keys has to be present in the validation map; got %s", key)
- err = prependPathToErrors(err, key)
- errs = append(errs, err)
- }
- valueField := reflect.ValueOf(value)
- mapResult := true
- typeResult := true
- structResult := true
- resultField := true
- switch subValidator := validator.(type) {
- case map[string]interface{}:
- var err error
- if v, ok := value.(map[string]interface{}); !ok {
- mapResult = false
- err = fmt.Errorf("map validator has to be for the map type only; got %s", valueField.Type().String())
- err = prependPathToErrors(err, key)
- errs = append(errs, err)
- } else {
- mapResult, err = ValidateMap(v, subValidator)
- if err != nil {
- mapResult = false
- err = prependPathToErrors(err, key)
- errs = append(errs, err)
- }
- }
- case string:
- if (valueField.Kind() == reflect.Struct ||
- (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) &&
- subValidator != "-" {
- var err error
- structResult, err = ValidateStruct(valueField.Interface())
- if err != nil {
- err = prependPathToErrors(err, key)
- errs = append(errs, err)
- }
- }
- resultField, err = typeCheck(valueField, reflect.StructField{
- Name: key,
- PkgPath: "",
- Type: val.Type(),
- Tag: reflect.StructTag(fmt.Sprintf("%s:%q", tagName, subValidator)),
- Offset: 0,
- Index: []int{index},
- Anonymous: false,
- }, val, nil)
- if err != nil {
- errs = append(errs, err)
- }
- case nil:
- // already handlerd when checked before
- default:
- typeResult = false
- err = fmt.Errorf("map validator has to be either map[string]interface{} or string; got %s", valueField.Type().String())
- err = prependPathToErrors(err, key)
- errs = append(errs, err)
- }
- result = result && presentResult && typeResult && resultField && structResult && mapResult
- index++
- }
- // checks required keys
- requiredResult := true
- for key, value := range m {
- if schema, ok := value.(string); ok {
- tags := parseTagIntoMap(schema)
- if required, ok := tags["required"]; ok {
- if _, ok := s[key]; !ok {
- requiredResult = false
- if required.customErrorMessage != "" {
- err = Error{key, fmt.Errorf(required.customErrorMessage), true, "required", []string{}}
- } else {
- err = Error{key, fmt.Errorf("required field missing"), false, "required", []string{}}
- }
- errs = append(errs, err)
- }
- }
- }
- }
-
- if len(errs) > 0 {
- err = errs
- }
- return result && requiredResult, err
-}
-
-// ValidateStruct use tags for fields.
-// result will be equal to `false` if there are any errors.
-// todo currently there is no guarantee that errors will be returned in predictable order (tests may to fail)
-func ValidateStruct(s interface{}) (bool, error) {
- if s == nil {
- return true, nil
- }
- result := true
- var err error
- val := reflect.ValueOf(s)
- if val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr {
- val = val.Elem()
- }
- // we only accept structs
- if val.Kind() != reflect.Struct {
- return false, fmt.Errorf("function only accepts structs; got %s", val.Kind())
- }
- var errs Errors
- for i := 0; i < val.NumField(); i++ {
- valueField := val.Field(i)
- typeField := val.Type().Field(i)
- if typeField.PkgPath != "" {
- continue // Private field
- }
- structResult := true
- if valueField.Kind() == reflect.Interface {
- valueField = valueField.Elem()
- }
- if (valueField.Kind() == reflect.Struct ||
- (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) &&
- typeField.Tag.Get(tagName) != "-" {
- var err error
- structResult, err = ValidateStruct(valueField.Interface())
- if err != nil {
- err = prependPathToErrors(err, typeField.Name)
- errs = append(errs, err)
- }
- }
- resultField, err2 := typeCheck(valueField, typeField, val, nil)
- if err2 != nil {
-
- // Replace structure name with JSON name if there is a tag on the variable
- jsonTag := toJSONName(typeField.Tag.Get("json"))
- if jsonTag != "" {
- switch jsonError := err2.(type) {
- case Error:
- jsonError.Name = jsonTag
- err2 = jsonError
- case Errors:
- for i2, err3 := range jsonError {
- switch customErr := err3.(type) {
- case Error:
- customErr.Name = jsonTag
- jsonError[i2] = customErr
- }
- }
-
- err2 = jsonError
- }
- }
-
- errs = append(errs, err2)
- }
- result = result && resultField && structResult
- }
- if len(errs) > 0 {
- err = errs
- }
- return result, err
-}
-
-// ValidateStructAsync performs async validation of the struct and returns results through the channels
-func ValidateStructAsync(s interface{}) (<-chan bool, <-chan error) {
- res := make(chan bool)
- errors := make(chan error)
-
- go func() {
- defer close(res)
- defer close(errors)
-
- isValid, isFailed := ValidateStruct(s)
-
- res <- isValid
- errors <- isFailed
- }()
-
- return res, errors
-}
-
-// ValidateMapAsync performs async validation of the map and returns results through the channels
-func ValidateMapAsync(s map[string]interface{}, m map[string]interface{}) (<-chan bool, <-chan error) {
- res := make(chan bool)
- errors := make(chan error)
-
- go func() {
- defer close(res)
- defer close(errors)
-
- isValid, isFailed := ValidateMap(s, m)
-
- res <- isValid
- errors <- isFailed
- }()
-
- return res, errors
-}
-
-// parseTagIntoMap parses a struct tag `valid:required~Some error message,length(2|3)` into map[string]string{"required": "Some error message", "length(2|3)": ""}
-func parseTagIntoMap(tag string) tagOptionsMap {
- optionsMap := make(tagOptionsMap)
- options := strings.Split(tag, ",")
-
- for i, option := range options {
- option = strings.TrimSpace(option)
-
- validationOptions := strings.Split(option, "~")
- if !isValidTag(validationOptions[0]) {
- continue
- }
- if len(validationOptions) == 2 {
- optionsMap[validationOptions[0]] = tagOption{validationOptions[0], validationOptions[1], i}
- } else {
- optionsMap[validationOptions[0]] = tagOption{validationOptions[0], "", i}
- }
- }
- return optionsMap
-}
-
-func isValidTag(s string) bool {
- if s == "" {
- return false
- }
- for _, c := range s {
- switch {
- case strings.ContainsRune("\\'\"!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
- // Backslash and quote chars are reserved, but
- // otherwise any punctuation chars are allowed
- // in a tag name.
- default:
- if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
- return false
- }
- }
- }
- return true
-}
-
-// IsSSN will validate the given string as a U.S. Social Security Number
-func IsSSN(str string) bool {
- if str == "" || len(str) != 11 {
- return false
- }
- return rxSSN.MatchString(str)
-}
-
-// IsSemver checks if string is valid semantic version
-func IsSemver(str string) bool {
- return rxSemver.MatchString(str)
-}
-
-// IsType checks if interface is of some type
-func IsType(v interface{}, params ...string) bool {
- if len(params) == 1 {
- typ := params[0]
- return strings.Replace(reflect.TypeOf(v).String(), " ", "", -1) == strings.Replace(typ, " ", "", -1)
- }
- return false
-}
-
-// IsTime checks if string is valid according to given format
-func IsTime(str string, format string) bool {
- _, err := time.Parse(format, str)
- return err == nil
-}
-
-// IsUnixTime checks if string is valid unix timestamp value
-func IsUnixTime(str string) bool {
- if _, err := strconv.Atoi(str); err == nil {
- return true
- }
- return false
-}
-
-// IsRFC3339 checks if string is valid timestamp value according to RFC3339
-func IsRFC3339(str string) bool {
- return IsTime(str, time.RFC3339)
-}
-
-// IsRFC3339WithoutZone checks if string is valid timestamp value according to RFC3339 which excludes the timezone.
-func IsRFC3339WithoutZone(str string) bool {
- return IsTime(str, rfc3339WithoutZone)
-}
-
-// IsISO4217 checks if string is valid ISO currency code
-func IsISO4217(str string) bool {
- for _, currency := range ISO4217List {
- if str == currency {
- return true
- }
- }
-
- return false
-}
-
-// ByteLength checks string's length
-func ByteLength(str string, params ...string) bool {
- if len(params) == 2 {
- min, _ := ToInt(params[0])
- max, _ := ToInt(params[1])
- return len(str) >= int(min) && len(str) <= int(max)
- }
-
- return false
-}
-
-// RuneLength checks string's length
-// Alias for StringLength
-func RuneLength(str string, params ...string) bool {
- return StringLength(str, params...)
-}
-
-// IsRsaPub checks whether string is valid RSA key
-// Alias for IsRsaPublicKey
-func IsRsaPub(str string, params ...string) bool {
- if len(params) == 1 {
- len, _ := ToInt(params[0])
- return IsRsaPublicKey(str, int(len))
- }
-
- return false
-}
-
-// StringMatches checks if a string matches a given pattern.
-func StringMatches(s string, params ...string) bool {
- if len(params) == 1 {
- pattern := params[0]
- return Matches(s, pattern)
- }
- return false
-}
-
-// StringLength checks string's length (including multi byte strings)
-func StringLength(str string, params ...string) bool {
-
- if len(params) == 2 {
- strLength := utf8.RuneCountInString(str)
- min, _ := ToInt(params[0])
- max, _ := ToInt(params[1])
- return strLength >= int(min) && strLength <= int(max)
- }
-
- return false
-}
-
-// MinStringLength checks string's minimum length (including multi byte strings)
-func MinStringLength(str string, params ...string) bool {
-
- if len(params) == 1 {
- strLength := utf8.RuneCountInString(str)
- min, _ := ToInt(params[0])
- return strLength >= int(min)
- }
-
- return false
-}
-
-// MaxStringLength checks string's maximum length (including multi byte strings)
-func MaxStringLength(str string, params ...string) bool {
-
- if len(params) == 1 {
- strLength := utf8.RuneCountInString(str)
- max, _ := ToInt(params[0])
- return strLength <= int(max)
- }
-
- return false
-}
-
-// Range checks string's length
-func Range(str string, params ...string) bool {
- if len(params) == 2 {
- value, _ := ToFloat(str)
- min, _ := ToFloat(params[0])
- max, _ := ToFloat(params[1])
- return InRange(value, min, max)
- }
-
- return false
-}
-
-// IsInRaw checks if string is in list of allowed values
-func IsInRaw(str string, params ...string) bool {
- if len(params) == 1 {
- rawParams := params[0]
-
- parsedParams := strings.Split(rawParams, "|")
-
- return IsIn(str, parsedParams...)
- }
-
- return false
-}
-
-// IsIn checks if string str is a member of the set of strings params
-func IsIn(str string, params ...string) bool {
- for _, param := range params {
- if str == param {
- return true
- }
- }
-
- return false
-}
-
-func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) {
- if nilPtrAllowedByRequired {
- k := v.Kind()
- if (k == reflect.Ptr || k == reflect.Interface) && v.IsNil() {
- return true, nil
- }
- }
-
- if requiredOption, isRequired := options["required"]; isRequired {
- if len(requiredOption.customErrorMessage) > 0 {
- return false, Error{t.Name, fmt.Errorf(requiredOption.customErrorMessage), true, "required", []string{}}
- }
- return false, Error{t.Name, fmt.Errorf("non zero value required"), false, "required", []string{}}
- } else if _, isOptional := options["optional"]; fieldsRequiredByDefault && !isOptional {
- return false, Error{t.Name, fmt.Errorf("Missing required field"), false, "required", []string{}}
- }
- // not required and empty is valid
- return true, nil
-}
-
-func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value, options tagOptionsMap) (isValid bool, resultErr error) {
- if !v.IsValid() {
- return false, nil
- }
-
- tag := t.Tag.Get(tagName)
-
- // checks if the field should be ignored
- switch tag {
- case "":
- if v.Kind() != reflect.Slice && v.Kind() != reflect.Map {
- if !fieldsRequiredByDefault {
- return true, nil
- }
- return false, Error{t.Name, fmt.Errorf("All fields are required to at least have one validation defined"), false, "required", []string{}}
- }
- case "-":
- return true, nil
- }
-
- isRootType := false
- if options == nil {
- isRootType = true
- options = parseTagIntoMap(tag)
- }
-
- if isEmptyValue(v) {
- // an empty value is not validated, checks only required
- isValid, resultErr = checkRequired(v, t, options)
- for key := range options {
- delete(options, key)
- }
- return isValid, resultErr
- }
-
- var customTypeErrors Errors
- optionsOrder := options.orderedKeys()
- for _, validatorName := range optionsOrder {
- validatorStruct := options[validatorName]
- if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok {
- delete(options, validatorName)
-
- if result := validatefunc(v.Interface(), o.Interface()); !result {
- if len(validatorStruct.customErrorMessage) > 0 {
- customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: TruncatingErrorf(validatorStruct.customErrorMessage, fmt.Sprint(v), validatorName), CustomErrorMessageExists: true, Validator: stripParams(validatorName)})
- continue
- }
- customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), validatorName), CustomErrorMessageExists: false, Validator: stripParams(validatorName)})
- }
- }
- }
-
- if len(customTypeErrors.Errors()) > 0 {
- return false, customTypeErrors
- }
-
- if isRootType {
- // Ensure that we've checked the value by all specified validators before report that the value is valid
- defer func() {
- delete(options, "optional")
- delete(options, "required")
-
- if isValid && resultErr == nil && len(options) != 0 {
- optionsOrder := options.orderedKeys()
- for _, validator := range optionsOrder {
- isValid = false
- resultErr = Error{t.Name, fmt.Errorf(
- "The following validator is invalid or can't be applied to the field: %q", validator), false, stripParams(validator), []string{}}
- return
- }
- }
- }()
- }
-
- for _, validatorSpec := range optionsOrder {
- validatorStruct := options[validatorSpec]
- var negate bool
- validator := validatorSpec
- customMsgExists := len(validatorStruct.customErrorMessage) > 0
-
- // checks whether the tag looks like '!something' or 'something'
- if validator[0] == '!' {
- validator = validator[1:]
- negate = true
- }
-
- // checks for interface param validators
- for key, value := range InterfaceParamTagRegexMap {
- ps := value.FindStringSubmatch(validator)
- if len(ps) == 0 {
- continue
- }
-
- validatefunc, ok := InterfaceParamTagMap[key]
- if !ok {
- continue
- }
-
- delete(options, validatorSpec)
-
- field := fmt.Sprint(v)
- if result := validatefunc(v.Interface(), ps[1:]...); (!result && !negate) || (result && negate) {
- if customMsgExists {
- return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- if negate {
- return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- }
- }
-
- switch v.Kind() {
- case reflect.Bool,
- reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
- reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
- reflect.Float32, reflect.Float64,
- reflect.String:
- // for each tag option checks the map of validator functions
- for _, validatorSpec := range optionsOrder {
- validatorStruct := options[validatorSpec]
- var negate bool
- validator := validatorSpec
- customMsgExists := len(validatorStruct.customErrorMessage) > 0
-
- // checks whether the tag looks like '!something' or 'something'
- if validator[0] == '!' {
- validator = validator[1:]
- negate = true
- }
-
- // checks for param validators
- for key, value := range ParamTagRegexMap {
- ps := value.FindStringSubmatch(validator)
- if len(ps) == 0 {
- continue
- }
-
- validatefunc, ok := ParamTagMap[key]
- if !ok {
- continue
- }
-
- delete(options, validatorSpec)
-
- switch v.Kind() {
- case reflect.String,
- reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
- reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
- reflect.Float32, reflect.Float64:
-
- field := fmt.Sprint(v) // make value into string, then validate with regex
- if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) {
- if customMsgExists {
- return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- if negate {
- return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- default:
- // type not yet supported, fail
- return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false, stripParams(validatorSpec), []string{}}
- }
- }
-
- if validatefunc, ok := TagMap[validator]; ok {
- delete(options, validatorSpec)
-
- switch v.Kind() {
- case reflect.String,
- reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
- reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
- reflect.Float32, reflect.Float64:
- field := fmt.Sprint(v) // make value into string, then validate with regex
- if result := validatefunc(field); !result && !negate || result && negate {
- if customMsgExists {
- return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- if negate {
- return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
- }
- default:
- //Not Yet Supported Types (Fail here!)
- err := fmt.Errorf("Validator %s doesn't support kind %s for value %v", validator, v.Kind(), v)
- return false, Error{t.Name, err, false, stripParams(validatorSpec), []string{}}
- }
- }
- }
- return true, nil
- case reflect.Map:
- if v.Type().Key().Kind() != reflect.String {
- return false, &UnsupportedTypeError{v.Type()}
- }
- var sv stringValues
- sv = v.MapKeys()
- sort.Sort(sv)
- result := true
- for i, k := range sv {
- var resultItem bool
- var err error
- if v.MapIndex(k).Kind() != reflect.Struct {
- resultItem, err = typeCheck(v.MapIndex(k), t, o, options)
- if err != nil {
- return false, err
- }
- } else {
- resultItem, err = ValidateStruct(v.MapIndex(k).Interface())
- if err != nil {
- err = prependPathToErrors(err, t.Name+"."+sv[i].Interface().(string))
- return false, err
- }
- }
- result = result && resultItem
- }
- return result, nil
- case reflect.Slice, reflect.Array:
- result := true
- for i := 0; i < v.Len(); i++ {
- var resultItem bool
- var err error
- if v.Index(i).Kind() != reflect.Struct {
- resultItem, err = typeCheck(v.Index(i), t, o, options)
- if err != nil {
- return false, err
- }
- } else {
- resultItem, err = ValidateStruct(v.Index(i).Interface())
- if err != nil {
- err = prependPathToErrors(err, t.Name+"."+strconv.Itoa(i))
- return false, err
- }
- }
- result = result && resultItem
- }
- return result, nil
- case reflect.Interface:
- // If the value is an interface then encode its element
- if v.IsNil() {
- return true, nil
- }
- return ValidateStruct(v.Interface())
- case reflect.Ptr:
- // If the value is a pointer then checks its element
- if v.IsNil() {
- return true, nil
- }
- return typeCheck(v.Elem(), t, o, options)
- case reflect.Struct:
- return true, nil
- default:
- return false, &UnsupportedTypeError{v.Type()}
- }
-}
-
-func stripParams(validatorString string) string {
- return paramsRegexp.ReplaceAllString(validatorString, "")
-}
-
-// isEmptyValue checks whether value empty or not
-func isEmptyValue(v reflect.Value) bool {
- switch v.Kind() {
- case reflect.String, reflect.Array:
- return v.Len() == 0
- case reflect.Map, reflect.Slice:
- return v.Len() == 0 || v.IsNil()
- case reflect.Bool:
- return !v.Bool()
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return v.Int() == 0
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return v.Uint() == 0
- case reflect.Float32, reflect.Float64:
- return v.Float() == 0
- case reflect.Interface, reflect.Ptr:
- return v.IsNil()
- }
-
- return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface())
-}
-
-// ErrorByField returns error for specified field of the struct
-// validated by ValidateStruct or empty string if there are no errors
-// or this field doesn't exists or doesn't have any errors.
-func ErrorByField(e error, field string) string {
- if e == nil {
- return ""
- }
- return ErrorsByField(e)[field]
-}
-
-// ErrorsByField returns map of errors of the struct validated
-// by ValidateStruct or empty map if there are no errors.
-func ErrorsByField(e error) map[string]string {
- m := make(map[string]string)
- if e == nil {
- return m
- }
- // prototype for ValidateStruct
-
- switch e := e.(type) {
- case Error:
- m[e.Name] = e.Err.Error()
- case Errors:
- for _, item := range e.Errors() {
- n := ErrorsByField(item)
- for k, v := range n {
- m[k] = v
- }
- }
- }
-
- return m
-}
-
-// Error returns string equivalent for reflect.Type
-func (e *UnsupportedTypeError) Error() string {
- return "validator: unsupported type: " + e.Type.String()
-}
-
-func (sv stringValues) Len() int { return len(sv) }
-func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
-func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) }
-func (sv stringValues) get(i int) string { return sv[i].String() }
-
-func IsE164(str string) bool {
- return rxE164.MatchString(str)
-}
diff --git a/vendor/github.com/asaskevich/govalidator/wercker.yml b/vendor/github.com/asaskevich/govalidator/wercker.yml
deleted file mode 100644
index bc5f7b0864..0000000000
--- a/vendor/github.com/asaskevich/govalidator/wercker.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-box: golang
-build:
- steps:
- - setup-go-workspace
-
- - script:
- name: go get
- code: |
- go version
- go get -t ./...
-
- - script:
- name: go test
- code: |
- go test -race -v ./...
diff --git a/vendor/github.com/containers/common/libimage/filters.go b/vendor/github.com/containers/common/libimage/filters.go
index aa11d54f53..b8bcecc868 100644
--- a/vendor/github.com/containers/common/libimage/filters.go
+++ b/vendor/github.com/containers/common/libimage/filters.go
@@ -25,6 +25,7 @@ type compiledFilters map[string][]filterFunc
// Apply the specified filters. All filters of each key must apply.
// tree must be provided if compileImageFilters indicated it is necessary.
+// WARNING: Application of filterReferences sets the image names to matched names, but this only affects the values in memory, they are not written to storage.
func (i *Image) applyFilters(ctx context.Context, filters compiledFilters, tree *layerTree) (bool, error) {
for key := range filters {
for _, filter := range filters[key] {
@@ -51,6 +52,7 @@ func (i *Image) applyFilters(ctx context.Context, filters compiledFilters, tree
// filterImages returns a slice of images which are passing all specified
// filters.
// tree must be provided if compileImageFilters indicated it is necessary.
+// WARNING: Application of filterReferences sets the image names to matched names, but this only affects the values in memory, they are not written to storage.
func (r *Runtime) filterImages(ctx context.Context, images []*Image, filters compiledFilters, tree *layerTree) ([]*Image, error) {
result := []*Image{}
for i := range images {
@@ -272,7 +274,6 @@ func filterReferences(r *Runtime, wantedReferenceMatches, unwantedReferenceMatch
return true, nil
}
- unwantedMatched := false
// Go through the unwanted matches first
for _, value := range unwantedReferenceMatches {
matches, err := imageMatchesReferenceFilter(r, img, value)
@@ -280,33 +281,82 @@ func filterReferences(r *Runtime, wantedReferenceMatches, unwantedReferenceMatch
return false, err
}
if matches {
- unwantedMatched = true
+ return false, nil
}
}
// If there are no wanted match filters, then return false for the image
// that matched the unwanted value otherwise return true
if len(wantedReferenceMatches) == 0 {
- return !unwantedMatched, nil
+ return true, nil
}
- // Go through the wanted matches
- // If an image matches the wanted filter but it also matches the unwanted
- // filter, don't add it to the output
+ matchedReference := ""
for _, value := range wantedReferenceMatches {
matches, err := imageMatchesReferenceFilter(r, img, value)
if err != nil {
return false, err
}
- if matches && !unwantedMatched {
- return true, nil
+ if matches {
+ matchedReference = value
+ break
}
}
- return false, nil
+ if matchedReference == "" {
+ return false, nil
+ }
+
+ // If there is exactly one wanted reference match and no unwanted matches,
+ // the filter is treated as a query, so it sets the matching names to
+ // the image in memory.
+ if len(wantedReferenceMatches) == 1 && len(unwantedReferenceMatches) == 0 {
+ ref, ok := isFullyQualifiedReference(matchedReference)
+ if !ok {
+ return true, nil
+ }
+ namesThatMatch := []string{}
+ for _, name := range img.Names() {
+ if nameMatchesReference(name, ref) {
+ namesThatMatch = append(namesThatMatch, name)
+ }
+ }
+ img.setEphemeralNames(namesThatMatch)
+ }
+ return true, nil
}
}
+// isFullyQualifiedReference checks if the provided string is a fully qualified
+// reference (i.e., it contains a domain, path, and tag or digest).
+// It returns a reference.Named and a boolean indicating whether the
+// reference is fully qualified. If the reference is not fully qualified,
+// it returns nil and false.
+func isFullyQualifiedReference(r string) (reference.Named, bool) {
+ ref, err := reference.ParseNamed(r)
+ // If there is an error parsing the reference, it is not a valid reference
+ if err != nil {
+ return nil, false
+ }
+ // If it's name-only (no tag/digest), it's not fully qualified
+ if reference.IsNameOnly(ref) {
+ return nil, false
+ }
+ return ref, true
+}
+
+func nameMatchesReference(name string, ref reference.Named) bool {
+ _, containsDigest := ref.(reference.Digested)
+ if containsDigest {
+ nameRef, err := reference.ParseNamed(name)
+ if err != nil {
+ return false
+ }
+ return nameRef.Name() == ref.Name()
+ }
+ return name == ref.String()
+}
+
// imageMatchesReferenceFilter returns true if an image matches the filter value given
func imageMatchesReferenceFilter(r *Runtime, img *Image, value string) (bool, error) {
lookedUp, _, _ := r.LookupImage(value, nil)
@@ -352,7 +402,6 @@ func imageMatchesReferenceFilter(r *Runtime, img *Image, value string) (bool, er
}
}
}
-
return false, nil
}
diff --git a/vendor/github.com/containers/common/libimage/image.go b/vendor/github.com/containers/common/libimage/image.go
index 11e387e010..c346d5dd1b 100644
--- a/vendor/github.com/containers/common/libimage/image.go
+++ b/vendor/github.com/containers/common/libimage/image.go
@@ -112,6 +112,13 @@ func (i *Image) Names() []string {
return i.storageImage.Names
}
+// setEphemeralNames sets the names of the image.
+//
+// WARNING: this only affects the in-memory values, they are not written into the backing storage.
+func (i *Image) setEphemeralNames(names []string) {
+ i.storageImage.Names = names
+}
+
// NamesReferences returns Names() as references.
func (i *Image) NamesReferences() ([]reference.Reference, error) {
if i.cached.namesReferences != nil {
diff --git a/vendor/github.com/containers/common/libimage/runtime.go b/vendor/github.com/containers/common/libimage/runtime.go
index ad1286197c..4eb1652c98 100644
--- a/vendor/github.com/containers/common/libimage/runtime.go
+++ b/vendor/github.com/containers/common/libimage/runtime.go
@@ -599,6 +599,16 @@ func (r *Runtime) ListImagesByNames(names []string) ([]*Image, error) {
}
// ListImages lists the images in the local container storage and filter the images by ListImagesOptions
+//
+// podman images consumes the output of ListImages and produces one line for each tag in each Image.Names value,
+// rather than one line for each Image with all Names, so if options.Filters contains one reference filter
+// with a fully qualified image name without negation, it is considered a query so it makes more sense for
+// the user to see only the corresponding names in the output, not all the names of the deduplicated
+// image; therefore, we make the corresponding names available to the caller by overwriting the actual image names
+// with the corresponding names when the reference filter matches and the reference is a fully qualified image name
+// (i.e., contains a tag or digest, not just a bare repository name).
+//
+// This overwriting is done only in memory and is not written to storage in any way.
func (r *Runtime) ListImages(ctx context.Context, options *ListImagesOptions) ([]*Image, error) {
if options == nil {
options = &ListImagesOptions{}
diff --git a/vendor/github.com/containers/image/v5/signature/fulcio_cert.go b/vendor/github.com/containers/image/v5/signature/fulcio_cert.go
index cce4677486..908dd0fd59 100644
--- a/vendor/github.com/containers/image/v5/signature/fulcio_cert.go
+++ b/vendor/github.com/containers/image/v5/signature/fulcio_cert.go
@@ -1,5 +1,3 @@
-//go:build !containers_image_fulcio_stub
-
package signature
import (
diff --git a/vendor/github.com/containers/image/v5/signature/fulcio_cert_stub.go b/vendor/github.com/containers/image/v5/signature/fulcio_cert_stub.go
deleted file mode 100644
index da8e13c1df..0000000000
--- a/vendor/github.com/containers/image/v5/signature/fulcio_cert_stub.go
+++ /dev/null
@@ -1,27 +0,0 @@
-//go:build containers_image_fulcio_stub
-
-package signature
-
-import (
- "crypto"
- "crypto/ecdsa"
- "crypto/x509"
- "errors"
-)
-
-type fulcioTrustRoot struct {
- caCertificates *x509.CertPool
- oidcIssuer string
- subjectEmail string
-}
-
-func (f *fulcioTrustRoot) validate() error {
- return errors.New("fulcio disabled at compile-time")
-}
-
-func verifyRekorFulcio(rekorPublicKeys []*ecdsa.PublicKey, fulcioTrustRoot *fulcioTrustRoot, untrustedRekorSET []byte,
- untrustedCertificateBytes []byte, untrustedIntermediateChainBytes []byte, untrustedBase64Signature string,
- untrustedPayloadBytes []byte) (crypto.PublicKey, error) {
- return nil, errors.New("fulcio disabled at compile-time")
-
-}
diff --git a/vendor/github.com/containers/image/v5/signature/internal/rekor_api_types.go b/vendor/github.com/containers/image/v5/signature/internal/rekor_api_types.go
new file mode 100644
index 0000000000..a722ee385a
--- /dev/null
+++ b/vendor/github.com/containers/image/v5/signature/internal/rekor_api_types.go
@@ -0,0 +1,93 @@
+package internal
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+)
+
+const rekorHashedrekordKind = "hashedrekord"
+
+type RekorHashedrekord struct {
+ APIVersion *string `json:"apiVersion"`
+ Spec json.RawMessage `json:"spec"`
+}
+
+func (m *RekorHashedrekord) Kind() string {
+ return rekorHashedrekordKind
+}
+
+func (m *RekorHashedrekord) SetKind(val string) {
+}
+
+func (m *RekorHashedrekord) UnmarshalJSON(raw []byte) error {
+ var base struct {
+ Kind string `json:"kind"`
+ }
+ dec := json.NewDecoder(bytes.NewReader(raw))
+ dec.UseNumber()
+ if err := dec.Decode(&base); err != nil {
+ return err
+ }
+
+ switch base.Kind {
+ case rekorHashedrekordKind:
+ var data struct { // We can’t use RekorHashedRekord directly, because that would be an infinite recursion.
+ APIVersion *string `json:"apiVersion"`
+ Spec json.RawMessage `json:"spec"`
+ }
+ dec = json.NewDecoder(bytes.NewReader(raw))
+ dec.UseNumber()
+ if err := dec.Decode(&data); err != nil {
+ return err
+ }
+ res := RekorHashedrekord{
+ APIVersion: data.APIVersion,
+ Spec: data.Spec,
+ }
+ *m = res
+ return nil
+
+ default:
+ return fmt.Errorf("invalid kind value: %q", base.Kind)
+ }
+}
+
+func (m RekorHashedrekord) MarshalJSON() ([]byte, error) {
+ return json.Marshal(struct {
+ Kind string `json:"kind"`
+ APIVersion *string `json:"apiVersion"`
+ Spec json.RawMessage `json:"spec"`
+ }{
+ Kind: m.Kind(),
+ APIVersion: m.APIVersion,
+ Spec: m.Spec,
+ })
+}
+
+type RekorHashedrekordV001Schema struct {
+ Data *RekorHashedrekordV001SchemaData `json:"data"`
+ Signature *RekorHashedrekordV001SchemaSignature `json:"signature"`
+}
+
+type RekorHashedrekordV001SchemaData struct {
+ Hash *RekorHashedrekordV001SchemaDataHash `json:"hash,omitempty"`
+}
+
+type RekorHashedrekordV001SchemaDataHash struct {
+ Algorithm *string `json:"algorithm"`
+ Value *string `json:"value"`
+}
+
+const (
+ RekorHashedrekordV001SchemaDataHashAlgorithmSha256 string = "sha256"
+)
+
+type RekorHashedrekordV001SchemaSignature struct {
+ Content []byte `json:"content,omitempty"`
+ PublicKey *RekorHashedrekordV001SchemaSignaturePublicKey `json:"publicKey,omitempty"`
+}
+
+type RekorHashedrekordV001SchemaSignaturePublicKey struct {
+ Content []byte `json:"content,omitempty"`
+}
diff --git a/vendor/github.com/containers/image/v5/signature/internal/rekor_set.go b/vendor/github.com/containers/image/v5/signature/internal/rekor_set.go
index f26a978701..1c20e496a7 100644
--- a/vendor/github.com/containers/image/v5/signature/internal/rekor_set.go
+++ b/vendor/github.com/containers/image/v5/signature/internal/rekor_set.go
@@ -1,5 +1,3 @@
-//go:build !containers_image_rekor_stub
-
package internal
import (
@@ -14,12 +12,11 @@ import (
"time"
"github.com/cyberphone/json-canonicalization/go/src/webpki.org/jsoncanonicalizer"
- "github.com/sigstore/rekor/pkg/generated/models"
)
// This is the github.com/sigstore/rekor/pkg/generated/models.Hashedrekord.APIVersion for github.com/sigstore/rekor/pkg/generated/models.HashedrekordV001Schema.
// We could alternatively use github.com/sigstore/rekor/pkg/types/hashedrekord.APIVERSION, but that subpackage adds too many dependencies.
-const HashedRekordV001APIVersion = "0.0.1"
+const RekorHashedRekordV001APIVersion = "0.0.1"
// UntrustedRekorSET is a parsed content of the sigstore-signature Rekor SET
// (note that this a signature-specific format, not a format directly used by the Rekor API).
@@ -137,31 +134,20 @@ func VerifyRekorSET(publicKeys []*ecdsa.PublicKey, unverifiedRekorSET []byte, un
if err := json.Unmarshal(untrustedSETPayloadCanonicalBytes, &rekorPayload); err != nil {
return time.Time{}, NewInvalidSignatureError(fmt.Sprintf("parsing Rekor SET payload: %v", err.Error()))
}
- // FIXME: Use a different decoder implementation? The Swagger-generated code is kinda ridiculous, with the need to re-marshal
- // hashedRekor.Spec and so on.
- // Especially if we anticipate needing to decode different data formats…
- // That would also allow being much more strict about JSON.
- //
- // Alternatively, rely on the existing .Validate() methods instead of manually checking for nil all over the place.
- var hashedRekord models.Hashedrekord
+ // FIXME: Consider being much more strict about decoding JSON.
+ var hashedRekord RekorHashedrekord
if err := json.Unmarshal(rekorPayload.Body, &hashedRekord); err != nil {
return time.Time{}, NewInvalidSignatureError(fmt.Sprintf("decoding the body of a Rekor SET payload: %v", err))
}
- // The decode of models.HashedRekord validates the "kind": "hashedrecord" field, which is otherwise invisible to us.
+ // The decode of HashedRekord validates the "kind": "hashedrecord" field, which is otherwise invisible to us.
if hashedRekord.APIVersion == nil {
return time.Time{}, NewInvalidSignatureError("missing Rekor SET Payload API version")
}
- if *hashedRekord.APIVersion != HashedRekordV001APIVersion {
+ if *hashedRekord.APIVersion != RekorHashedRekordV001APIVersion {
return time.Time{}, NewInvalidSignatureError(fmt.Sprintf("unsupported Rekor SET Payload hashedrekord version %#v", hashedRekord.APIVersion))
}
- hashedRekordV001Bytes, err := json.Marshal(hashedRekord.Spec)
- if err != nil {
- // Coverage: hashedRekord.Spec is an any that was just unmarshaled,
- // so this should never fail.
- return time.Time{}, NewInvalidSignatureError(fmt.Sprintf("re-creating hashedrekord spec: %v", err))
- }
- var hashedRekordV001 models.HashedrekordV001Schema
- if err := json.Unmarshal(hashedRekordV001Bytes, &hashedRekordV001); err != nil {
+ var hashedRekordV001 RekorHashedrekordV001Schema
+ if err := json.Unmarshal(hashedRekord.Spec, &hashedRekordV001); err != nil {
return time.Time{}, NewInvalidSignatureError(fmt.Sprintf("decoding hashedrekod spec: %v", err))
}
@@ -217,7 +203,7 @@ func VerifyRekorSET(publicKeys []*ecdsa.PublicKey, unverifiedRekorSET []byte, un
// Eventually we should support them as well.
// Short-term, Cosign (as of 2024-02 and Cosign 2.2.3) only produces and accepts SHA-256, so right now that’s not a compatibility
// issue.
- if *hashedRekordV001.Data.Hash.Algorithm != models.HashedrekordV001SchemaDataHashAlgorithmSha256 {
+ if *hashedRekordV001.Data.Hash.Algorithm != RekorHashedrekordV001SchemaDataHashAlgorithmSha256 {
return time.Time{}, NewInvalidSignatureError(fmt.Sprintf(`Unexpected "data.hash.algorithm" value %#v`, *hashedRekordV001.Data.Hash.Algorithm))
}
if hashedRekordV001.Data.Hash.Value == nil {
diff --git a/vendor/github.com/containers/image/v5/signature/internal/rekor_set_stub.go b/vendor/github.com/containers/image/v5/signature/internal/rekor_set_stub.go
deleted file mode 100644
index 4ff3da7edb..0000000000
--- a/vendor/github.com/containers/image/v5/signature/internal/rekor_set_stub.go
+++ /dev/null
@@ -1,14 +0,0 @@
-//go:build containers_image_rekor_stub
-
-package internal
-
-import (
- "crypto/ecdsa"
- "time"
-)
-
-// VerifyRekorSET verifies that unverifiedRekorSET is correctly signed by publicKey and matches the rest of the data.
-// Returns bundle upload time on success.
-func VerifyRekorSET(publicKeys []*ecdsa.PublicKey, unverifiedRekorSET []byte, unverifiedKeyOrCertBytes []byte, unverifiedBase64Signature string, unverifiedPayloadBytes []byte) (time.Time, error) {
- return time.Time{}, NewInvalidSignatureError("rekor disabled at compile-time")
-}
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio.go b/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio.go
index 1e55b6ba08..8d1620371b 100644
--- a/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio.go
+++ b/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio.go
@@ -1,5 +1,3 @@
-//go:build !containers_image_fulcio_stub
-
package fulcio
import (
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio_stub.go b/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio_stub.go
deleted file mode 100644
index 1fd22c8fc9..0000000000
--- a/vendor/github.com/containers/image/v5/signature/sigstore/fulcio/fulcio_stub.go
+++ /dev/null
@@ -1,44 +0,0 @@
-//go:build containers_image_fulcio_stub
-
-package fulcio
-
-import (
- "fmt"
- "io"
- "net/url"
-
- "github.com/containers/image/v5/signature/sigstore/internal"
-)
-
-func WithFulcioAndPreexistingOIDCIDToken(fulcioURL *url.URL, oidcIDToken string) internal.Option {
- return func(s *internal.SigstoreSigner) error {
- return fmt.Errorf("fulcio disabled at compile time")
- }
-}
-
-// WithFulcioAndDeviceAuthorizationGrantOIDC sets up signing to use a short-lived key and a Fulcio-issued certificate
-// based on an OIDC ID token obtained using a device authorization grant (RFC 8628).
-//
-// interactiveOutput must be directly accessible to a human user in real time (i.e. not be just a log file).
-func WithFulcioAndDeviceAuthorizationGrantOIDC(fulcioURL *url.URL, oidcIssuerURL *url.URL, oidcClientID, oidcClientSecret string,
- interactiveOutput io.Writer) internal.Option {
- return func(s *internal.SigstoreSigner) error {
- return fmt.Errorf("fulcio disabled at compile time")
- }
-}
-
-// WithFulcioAndInterativeOIDC sets up signing to use a short-lived key and a Fulcio-issued certificate
-// based on an interactively-obtained OIDC ID token.
-// The token is obtained
-// - directly using a browser, listening on localhost, automatically opening a browser to the OIDC issuer,
-// to be redirected on localhost. (I.e. the current environment must allow launching a browser that connect back to the current process;
-// either or both may be impossible in a container or a remote VM).
-// - or by instructing the user to manually open a browser, obtain the OIDC code, and interactively input it as text.
-//
-// interactiveInput and interactiveOutput must both be directly operable by a human user in real time (i.e. not be just a log file).
-func WithFulcioAndInteractiveOIDC(fulcioURL *url.URL, oidcIssuerURL *url.URL, oidcClientID, oidcClientSecret string,
- interactiveInput io.Reader, interactiveOutput io.Writer) internal.Option {
- return func(s *internal.SigstoreSigner) error {
- return fmt.Errorf("fulcio disabled at compile time")
- }
-}
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/openapi_infra.go b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/openapi_infra.go
new file mode 100644
index 0000000000..6d51897cf9
--- /dev/null
+++ b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/openapi_infra.go
@@ -0,0 +1,79 @@
+package rekor
+
+// The following code is the essence of the relevant code paths from github.com/go-openapi/runtime,
+// heavily modified since.
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "io"
+ "net/http"
+ "path"
+)
+
+// makeRequest makes a http request to the requested requestPath, and returns the received response.
+func (r *rekorClient) makeRequest(ctx context.Context, method, requestPath string, bodyContent any) (*http.Response, error) {
+ ctx, cancel := context.WithCancel(ctx)
+ defer cancel()
+
+ var body io.Reader
+ headers := http.Header{}
+
+ headers.Set("Accept", "application/json")
+ if bodyContent != nil {
+ buf := bytes.NewBuffer(nil)
+ body = buf
+ headers.Set("Content-Type", "application/json")
+ enc := json.NewEncoder(buf)
+ enc.SetEscapeHTML(false)
+ if err := enc.Encode(bodyContent); err != nil {
+ return nil, err
+ }
+ }
+
+ req, err := http.NewRequestWithContext(ctx, method, path.Join(r.basePath, requestPath), body)
+ if err != nil {
+ return nil, err
+ }
+ // Only Scheme and Host are used from rekorURL.
+ // Really this should probabbly use r.rekorURL.JoinPath(requestPath) (which, notably, correctly deals with path escaping),
+ // and pass that to NewRequestWithContext, but this use of path.Join is consistent with go-openapi/runtime v0.24.1 .
+ req.URL.Scheme = r.rekorURL.Scheme
+ req.URL.Host = r.rekorURL.Host
+ req.Header = headers
+
+ res, err := r.httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ // Note that we don’t care to even read the Content-Type: header; we blindly assume the format is the requested JSON.
+ return res, nil
+}
+
+// decodeHTTPResponseBodyAsJSON decodes the body of a HTTP response in a manner compatible with github.com/go-openapi/runtime.
+func decodeHTTPResponseBodyAsJSON(res *http.Response, data any) error {
+ dec := json.NewDecoder(res.Body)
+ dec.UseNumber()
+ err := dec.Decode(data)
+ if err == io.EOF {
+ // This seems unwanted at a first glance; go-swagger added it in https://github.com/go-swagger/go-swagger/issues/192 , it’s unclear
+ // whether it’s correct or still necessary.
+ err = nil
+ }
+ return err
+}
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor.go b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor.go
index 9f745a2b99..7b10fc8348 100644
--- a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor.go
+++ b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor.go
@@ -1,5 +1,3 @@
-//go:build !containers_image_rekor_stub
-
package rekor
import (
@@ -10,44 +8,57 @@ import (
"encoding/json"
"errors"
"fmt"
+ "net/http"
"net/url"
"strings"
"github.com/containers/image/v5/signature/internal"
signerInternal "github.com/containers/image/v5/signature/sigstore/internal"
- "github.com/go-openapi/strfmt"
- rekor "github.com/sigstore/rekor/pkg/client"
- "github.com/sigstore/rekor/pkg/generated/client"
- "github.com/sigstore/rekor/pkg/generated/client/entries"
- "github.com/sigstore/rekor/pkg/generated/models"
+ "github.com/hashicorp/go-retryablehttp"
"github.com/sirupsen/logrus"
)
+const (
+ // defaultRetryCount is the default number of retries
+ defaultRetryCount = 3
+)
+
// WithRekor asks the generated signature to be uploaded to the specified Rekor server,
// and to include a log inclusion proof in the signature.
func WithRekor(rekorURL *url.URL) signerInternal.Option {
return func(s *signerInternal.SigstoreSigner) error {
logrus.Debugf("Using Rekor server at %s", rekorURL.Redacted())
- client, err := rekor.GetRekorClient(rekorURL.String(),
- rekor.WithLogger(leveledLoggerForLogrus(logrus.StandardLogger())))
- if err != nil {
- return fmt.Errorf("creating Rekor client: %w", err)
- }
- u := uploader{
- client: client,
- }
- s.RekorUploader = u.uploadKeyOrCert
+ client := newRekorClient(rekorURL)
+ s.RekorUploader = client.uploadKeyOrCert
return nil
}
}
-// uploader wraps a Rekor client, basically so that we can set RekorUploader to a method instead of an one-off closure.
-type uploader struct {
- client *client.Rekor
+// rekorClient allows uploading entries to Rekor.
+type rekorClient struct {
+ rekorURL *url.URL // Only Scheme and Host is actually used, consistent with github.com/sigstore/rekor/pkg/client.
+ basePath string
+ httpClient *http.Client
+}
+
+// newRekorClient creates a rekorClient for rekorURL.
+func newRekorClient(rekorURL *url.URL) *rekorClient {
+ retryableClient := retryablehttp.NewClient()
+ retryableClient.RetryMax = defaultRetryCount
+ retryableClient.Logger = leveledLoggerForLogrus(logrus.StandardLogger())
+ basePath := rekorURL.Path
+ if !strings.HasPrefix(basePath, "/") { // Includes basePath == "", i.e. URL just a https://hostname
+ basePath = "/" + basePath
+ }
+ return &rekorClient{
+ rekorURL: rekorURL,
+ basePath: basePath,
+ httpClient: retryableClient.StandardClient(),
+ }
}
// rekorEntryToSET converts a Rekor log entry into a sigstore “signed entry timestamp”.
-func rekorEntryToSET(entry *models.LogEntryAnon) (internal.UntrustedRekorSET, error) {
+func rekorEntryToSET(entry *rekorLogEntryAnon) (internal.UntrustedRekorSET, error) {
// We could plausibly call entry.Validate() here; that mostly just uses unnecessary reflection instead of direct == nil checks.
// Right now the only extra validation .Validate() does is *entry.LogIndex >= 0 and a regex check on *entry.LogID;
// we don’t particularly care about either of these (notably signature verification only uses the Body value).
@@ -79,67 +90,67 @@ func rekorEntryToSET(entry *models.LogEntryAnon) (internal.UntrustedRekorSET, er
}
// uploadEntry ensures proposedEntry exists in Rekor (usually uploading it), and returns the resulting log entry.
-func (u *uploader) uploadEntry(ctx context.Context, proposedEntry models.ProposedEntry) (models.LogEntry, error) {
- params := entries.NewCreateLogEntryParamsWithContext(ctx)
- params.SetProposedEntry(proposedEntry)
+func (r *rekorClient) uploadEntry(ctx context.Context, proposedEntry rekorProposedEntry) (rekorLogEntry, error) {
logrus.Debugf("Calling Rekor's CreateLogEntry")
- resp, err := u.client.Entries.CreateLogEntry(params)
+ resp, err := r.createLogEntry(ctx, proposedEntry)
if err != nil {
// In ordinary operation, we should not get duplicate entries, because our payload contains a timestamp,
// so it is supposed to be unique; and the default key format, ECDSA p256, also contains a nonce.
// But conflicts can fairly easily happen during debugging and experimentation, so it pays to handle this.
- var conflictErr *entries.CreateLogEntryConflict
- if errors.As(err, &conflictErr) && conflictErr.Location != "" {
- location := conflictErr.Location.String()
+ var conflictErr *createLogEntryConflictError
+ if errors.As(err, &conflictErr) && conflictErr.location != "" {
+ location := conflictErr.location
logrus.Debugf("CreateLogEntry reported a conflict, location = %s", location)
- // We might be able to just GET the returned Location, but let’s use the generated API client.
+ // We might be able to just GET the returned Location, but let’s use the formal API method.
// OTOH that requires us to hard-code the URI structure…
uuidDelimiter := strings.LastIndexByte(location, '/')
if uuidDelimiter != -1 { // Otherwise the URI is unexpected, and fall through to the bottom
uuid := location[uuidDelimiter+1:]
logrus.Debugf("Calling Rekor's NewGetLogEntryByUUIDParamsWithContext")
- params2 := entries.NewGetLogEntryByUUIDParamsWithContext(ctx)
- params2.SetEntryUUID(uuid)
- resp2, err := u.client.Entries.GetLogEntryByUUID(params2)
+ resp2, err := r.getLogEntryByUUID(ctx, uuid)
if err != nil {
return nil, fmt.Errorf("Error re-loading previously-created log entry with UUID %s: %w", uuid, err)
}
- return resp2.GetPayload(), nil
+ return resp2, nil
}
}
return nil, fmt.Errorf("Error uploading a log entry: %w", err)
}
- return resp.GetPayload(), nil
+ return resp, nil
}
-// stringPtr returns a pointer to the provided string value.
-func stringPtr(s string) *string {
+// stringPointer is a helper to create *string fields in JSON data.
+func stringPointer(s string) *string {
return &s
}
// uploadKeyOrCert integrates this code into sigstore/internal.Signer.
// Given components of the created signature, it returns a SET that should be added to the signature.
-func (u *uploader) uploadKeyOrCert(ctx context.Context, keyOrCertBytes []byte, signatureBytes []byte, payloadBytes []byte) ([]byte, error) {
+func (r *rekorClient) uploadKeyOrCert(ctx context.Context, keyOrCertBytes []byte, signatureBytes []byte, payloadBytes []byte) ([]byte, error) {
payloadHash := sha256.Sum256(payloadBytes) // HashedRecord only accepts SHA-256
- proposedEntry := models.Hashedrekord{
- APIVersion: stringPtr(internal.HashedRekordV001APIVersion),
- Spec: models.HashedrekordV001Schema{
- Data: &models.HashedrekordV001SchemaData{
- Hash: &models.HashedrekordV001SchemaDataHash{
- Algorithm: stringPtr(models.HashedrekordV001SchemaDataHashAlgorithmSha256),
- Value: stringPtr(hex.EncodeToString(payloadHash[:])),
- },
- },
- Signature: &models.HashedrekordV001SchemaSignature{
- Content: strfmt.Base64(signatureBytes),
- PublicKey: &models.HashedrekordV001SchemaSignaturePublicKey{
- Content: strfmt.Base64(keyOrCertBytes),
- },
+ hashedRekordSpec, err := json.Marshal(internal.RekorHashedrekordV001Schema{
+ Data: &internal.RekorHashedrekordV001SchemaData{
+ Hash: &internal.RekorHashedrekordV001SchemaDataHash{
+ Algorithm: stringPointer(internal.RekorHashedrekordV001SchemaDataHashAlgorithmSha256),
+ Value: stringPointer(hex.EncodeToString(payloadHash[:])),
},
},
+ Signature: &internal.RekorHashedrekordV001SchemaSignature{
+ Content: signatureBytes,
+ PublicKey: &internal.RekorHashedrekordV001SchemaSignaturePublicKey{
+ Content: keyOrCertBytes,
+ },
+ },
+ })
+ if err != nil {
+ return nil, err
+ }
+ proposedEntry := internal.RekorHashedrekord{
+ APIVersion: stringPointer(internal.RekorHashedRekordV001APIVersion),
+ Spec: hashedRekordSpec,
}
- uploadedPayload, err := u.uploadEntry(ctx, &proposedEntry)
+ uploadedPayload, err := r.uploadEntry(ctx, &proposedEntry)
if err != nil {
return nil, err
}
@@ -147,7 +158,7 @@ func (u *uploader) uploadKeyOrCert(ctx context.Context, keyOrCertBytes []byte, s
if len(uploadedPayload) != 1 {
return nil, fmt.Errorf("expected 1 Rekor entry, got %d", len(uploadedPayload))
}
- var storedEntry *models.LogEntryAnon
+ var storedEntry *rekorLogEntryAnon
// This “loop” extracts the single value from the uploadedPayload map.
for _, p := range uploadedPayload {
storedEntry = &p
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_api.go b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_api.go
new file mode 100644
index 0000000000..053fec65a8
--- /dev/null
+++ b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_api.go
@@ -0,0 +1,112 @@
+package rekor
+
+// The following code is the essence of the relevant code paths from github.com/sigstore/rekor/pkg/generated/client/...,
+// heavily modified since.
+
+// Copyright 2021 The Sigstore Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+)
+
+// createLogEntryConflictError describes a response with status code 409:
+// The request conflicts with the current state of the transparency log.
+// This typically happens when trying to upload an existing entry again.
+type createLogEntryConflictError struct {
+ location string
+ err string
+}
+
+func (o *createLogEntryConflictError) Error() string {
+ return o.err
+}
+
+// createLogEntry creates an entry in the transparency log
+//
+// Creates an entry in the transparency log for a detached signature, public key, and content. Items can be included in the request or fetched by the server when URLs are specified.
+func (r *rekorClient) createLogEntry(ctx context.Context, proposedEntry rekorProposedEntry) (rekorLogEntry, error) {
+ res, err := r.makeRequest(ctx, http.MethodPost, "/api/v1/log/entries", proposedEntry)
+ if err != nil {
+ return nil, err
+ }
+ defer res.Body.Close()
+
+ switch res.StatusCode {
+ case http.StatusCreated:
+ result := rekorLogEntry{}
+ if err := decodeHTTPResponseBodyAsJSON(res, &result); err != nil {
+ return nil, err
+ }
+ return result, nil
+
+ case http.StatusBadRequest:
+ result := rekorError{}
+ if err := decodeHTTPResponseBodyAsJSON(res, &result); err != nil {
+ return nil, err
+ }
+ return nil, fmt.Errorf("Rekor /api/v1/log/entries failed: bad request (%d), %+v", res.StatusCode, result)
+
+ case http.StatusConflict:
+ result := rekorError{}
+ if err := decodeHTTPResponseBodyAsJSON(res, &result); err != nil {
+ return nil, err
+ }
+ return nil, &createLogEntryConflictError{
+ location: res.Header.Get("Location"),
+ err: fmt.Sprintf("Rekor /api/v1/log/entries failed with a conflict (%d), %+v", res.StatusCode, result),
+ }
+
+ default:
+ result := rekorError{}
+ if err := decodeHTTPResponseBodyAsJSON(res, &result); err != nil {
+ return nil, err
+ }
+ return nil, fmt.Errorf("Rekor /api/v1/log/entries failed with unexpected status %d: %+v", res.StatusCode, result)
+ }
+}
+
+// getLogEntryByUUID gets log entry and information required to generate an inclusion proof for the entry in the transparency log
+//
+// Returns the entry, root hash, tree size, and a list of hashes that can be used to calculate proof of an entry being included in the transparency log
+func (r *rekorClient) getLogEntryByUUID(ctx context.Context, entryUUID string) (rekorLogEntry, error) {
+ res, err := r.makeRequest(ctx, http.MethodGet, "/api/v1/log/entries/"+url.PathEscape(entryUUID), nil)
+ if err != nil {
+ return nil, err
+ }
+ defer res.Body.Close()
+
+ switch res.StatusCode {
+ case http.StatusOK:
+ result := rekorLogEntry{}
+ if err := decodeHTTPResponseBodyAsJSON(res, &result); err != nil {
+ return nil, err
+ }
+ return result, nil
+
+ case http.StatusNotFound: // We don’t care to define a separate error type; we don’t need it ourselves.
+ return nil, fmt.Errorf("Rekor /api/v1/log/entries/{entryUUID}: entry not found (%d)", res.StatusCode)
+
+ default:
+ result := rekorError{}
+ if err := decodeHTTPResponseBodyAsJSON(res, &result); err != nil {
+ return nil, err
+ }
+ return nil, fmt.Errorf("Rekor /api/v1/log/entries/{entryUUID} failed with unexpected status %d: %+v", res.StatusCode, result)
+ }
+}
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_api_types.go b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_api_types.go
new file mode 100644
index 0000000000..229fe1be54
--- /dev/null
+++ b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_api_types.go
@@ -0,0 +1,43 @@
+package rekor
+
+type rekorError struct {
+ Code int64 `json:"code,omitempty"`
+ Message string `json:"message,omitempty"`
+}
+
+type rekorProposedEntry interface {
+ // Actually the code, currently, accepts anything that can be marshaled into JSON; use at least the Kind marker from
+ // shared between RekorHashedrekord / and other accepted formats for minimal sanity checking (but without hard-coding
+ // RekorHashedRekord in particular).
+
+ Kind() string
+ SetKind(string)
+}
+
+type rekorLogEntryAnon struct {
+ Attestation *rekorLogEntryAnonAttestation `json:"attestation,omitempty"`
+ Body any `json:"body"`
+ IntegratedTime *int64 `json:"integratedTime"`
+ LogID *string `json:"logID"`
+ LogIndex *int64 `json:"logIndex"`
+ Verification *rekorLogEntryAnonVerification `json:"verification,omitempty"`
+}
+
+type rekorLogEntryAnonAttestation struct {
+ Data []byte `json:"data,omitempty"`
+}
+
+type rekorLogEntryAnonVerification struct {
+ InclusionProof *rekorInclusionProof `json:"inclusionProof,omitempty"`
+ SignedEntryTimestamp []byte `json:"signedEntryTimestamp,omitempty"`
+}
+
+type rekorLogEntry map[string]rekorLogEntryAnon
+
+type rekorInclusionProof struct {
+ Checkpoint *string `json:"checkpoint"`
+ Hashes []string `json:"hashes"`
+ LogIndex *int64 `json:"logIndex"`
+ RootHash *string `json:"rootHash"`
+ TreeSize *int64 `json:"treeSize"`
+}
diff --git a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_stub.go b/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_stub.go
deleted file mode 100644
index 4dc0b88da9..0000000000
--- a/vendor/github.com/containers/image/v5/signature/sigstore/rekor/rekor_stub.go
+++ /dev/null
@@ -1,16 +0,0 @@
-//go:build containers_image_rekor_stub
-
-package rekor
-
-import (
- "fmt"
- "net/url"
-
- signerInternal "github.com/containers/image/v5/signature/sigstore/internal"
-)
-
-func WithRekor(rekorURL *url.URL) signerInternal.Option {
- return func(s *signerInternal.SigstoreSigner) error {
- return fmt.Errorf("rekor disabled at build time")
- }
-}
diff --git a/vendor/github.com/containers/storage/.cirrus.yml b/vendor/github.com/containers/storage/.cirrus.yml
index 7f84f47476..623ede2dd6 100644
--- a/vendor/github.com/containers/storage/.cirrus.yml
+++ b/vendor/github.com/containers/storage/.cirrus.yml
@@ -128,6 +128,7 @@ lint_task:
apt-get update
apt-get install -y libbtrfs-dev libsubid-dev
test_script: |
+ [ -n "${CIRRUS_BASE_SHA}" ] && git fetch origin ${CIRRUS_BASE_SHA} # Make ${CIRRUS_BASE_SHA} resolvable for git-validation
make TAGS=regex_precompile local-validate
make lint
make clean
diff --git a/vendor/github.com/containers/storage/drivers/aufs/aufs.go b/vendor/github.com/containers/storage/drivers/aufs/aufs.go
index 5231c6f55f..964ba8c918 100644
--- a/vendor/github.com/containers/storage/drivers/aufs/aufs.go
+++ b/vendor/github.com/containers/storage/drivers/aufs/aufs.go
@@ -772,8 +772,8 @@ func (a *Driver) UpdateLayerIDMap(id string, toContainer, toHost *idtools.IDMapp
return fmt.Errorf("aufs doesn't support changing ID mappings")
}
-// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs in an userNS
-func (a *Driver) SupportsShifting() bool {
+// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs to the provided mapping in an userNS
+func (a *Driver) SupportsShifting(uidmap, gidmap []idtools.IDMap) bool {
return false
}
diff --git a/vendor/github.com/containers/storage/drivers/chown.go b/vendor/github.com/containers/storage/drivers/chown.go
index d728e919ba..df602760ed 100644
--- a/vendor/github.com/containers/storage/drivers/chown.go
+++ b/vendor/github.com/containers/storage/drivers/chown.go
@@ -131,7 +131,7 @@ func (n *naiveLayerIDMapUpdater) UpdateLayerIDMap(id string, toContainer, toHost
return ChownPathByMaps(layerFs, toContainer, toHost)
}
-// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs in an userNS
-func (n *naiveLayerIDMapUpdater) SupportsShifting() bool {
+// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs to the provided mapping in an userNS
+func (n *naiveLayerIDMapUpdater) SupportsShifting(uidmap, gidmap []idtools.IDMap) bool {
return false
}
diff --git a/vendor/github.com/containers/storage/drivers/driver.go b/vendor/github.com/containers/storage/drivers/driver.go
index 7a7b7550d1..24d7b66b08 100644
--- a/vendor/github.com/containers/storage/drivers/driver.go
+++ b/vendor/github.com/containers/storage/drivers/driver.go
@@ -193,8 +193,9 @@ type LayerIDMapUpdater interface {
UpdateLayerIDMap(id string, toContainer, toHost *idtools.IDMappings, mountLabel string) error
// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs in a
- // image and it is not required to Chown the files when running in an user namespace.
- SupportsShifting() bool
+ // image to the provided mapping and it is not required to Chown the files when running in
+ // an user namespace.
+ SupportsShifting(uidmap, gidmap []idtools.IDMap) bool
}
// Driver is the interface for layered/snapshot file system drivers.
diff --git a/vendor/github.com/containers/storage/drivers/overlay/overlay.go b/vendor/github.com/containers/storage/drivers/overlay/overlay.go
index 5345ff9d6d..a01d6b3698 100644
--- a/vendor/github.com/containers/storage/drivers/overlay/overlay.go
+++ b/vendor/github.com/containers/storage/drivers/overlay/overlay.go
@@ -23,6 +23,7 @@ import (
"github.com/containers/storage/drivers/overlayutils"
"github.com/containers/storage/drivers/quota"
"github.com/containers/storage/internal/dedup"
+ "github.com/containers/storage/internal/staging_lockfile"
"github.com/containers/storage/pkg/archive"
"github.com/containers/storage/pkg/chrootarchive"
"github.com/containers/storage/pkg/directory"
@@ -30,7 +31,6 @@ import (
"github.com/containers/storage/pkg/fsutils"
"github.com/containers/storage/pkg/idmap"
"github.com/containers/storage/pkg/idtools"
- "github.com/containers/storage/pkg/lockfile"
"github.com/containers/storage/pkg/mount"
"github.com/containers/storage/pkg/parsers"
"github.com/containers/storage/pkg/system"
@@ -133,7 +133,7 @@ type Driver struct {
stagingDirsLocksMutex sync.Mutex
// stagingDirsLocks access is not thread safe, it is required that callers take
// stagingDirsLocksMutex on each access to guard against concurrent map writes.
- stagingDirsLocks map[string]*lockfile.LockFile
+ stagingDirsLocks map[string]*staging_lockfile.StagingLockFile
supportsIDMappedMounts *bool
}
@@ -222,7 +222,7 @@ func checkAndRecordIDMappedSupport(home, runhome string) (bool, error) {
return supportsIDMappedMounts, err
}
-func checkAndRecordOverlaySupport(fsMagic graphdriver.FsMagic, home, runhome string) (bool, error) {
+func checkAndRecordOverlaySupport(home, runhome string) (bool, error) {
var supportsDType bool
if os.Geteuid() != 0 {
@@ -242,7 +242,7 @@ func checkAndRecordOverlaySupport(fsMagic graphdriver.FsMagic, home, runhome str
return false, errors.New(overlayCacheText)
}
} else {
- supportsDType, err = supportsOverlay(home, fsMagic, 0, 0)
+ supportsDType, err = supportsOverlay(home, 0, 0)
if err != nil {
os.Remove(filepath.Join(home, linkDir))
os.Remove(home)
@@ -388,7 +388,7 @@ func Init(home string, options graphdriver.Options) (graphdriver.Driver, error)
t := true
supportsVolatile = &t
} else {
- supportsDType, err = checkAndRecordOverlaySupport(fsMagic, home, runhome)
+ supportsDType, err = checkAndRecordOverlaySupport(home, runhome)
if err != nil {
return nil, err
}
@@ -442,7 +442,7 @@ func Init(home string, options graphdriver.Options) (graphdriver.Driver, error)
usingComposefs: opts.useComposefs,
options: *opts,
stagingDirsLocksMutex: sync.Mutex{},
- stagingDirsLocks: make(map[string]*lockfile.LockFile),
+ stagingDirsLocks: make(map[string]*staging_lockfile.StagingLockFile),
}
d.naiveDiff = graphdriver.NewNaiveDiffDriver(d, graphdriver.NewNaiveLayerIDMapUpdater(d))
@@ -666,16 +666,11 @@ func SupportsNativeOverlay(home, runhome string) (bool, error) {
}
}
- fsMagic, err := graphdriver.GetFSMagic(home)
- if err != nil {
- return false, err
- }
-
- supportsDType, _ := checkAndRecordOverlaySupport(fsMagic, home, runhome)
+ supportsDType, _ := checkAndRecordOverlaySupport(home, runhome)
return supportsDType, nil
}
-func supportsOverlay(home string, homeMagic graphdriver.FsMagic, rootUID, rootGID int) (supportsDType bool, err error) {
+func supportsOverlay(home string, rootUID, rootGID int) (supportsDType bool, err error) {
selinuxLabelTest := selinux.PrivContainerMountLabel()
logLevel := logrus.ErrorLevel
@@ -828,7 +823,7 @@ func (d *Driver) Status() [][2]string {
{"Supports d_type", strconv.FormatBool(d.supportsDType)},
{"Native Overlay Diff", strconv.FormatBool(!d.useNaiveDiff())},
{"Using metacopy", strconv.FormatBool(d.usingMetacopy)},
- {"Supports shifting", strconv.FormatBool(d.SupportsShifting())},
+ {"Supports shifting", strconv.FormatBool(d.SupportsShifting(nil, nil))},
{"Supports volatile", strconv.FormatBool(supportsVolatile)},
}
}
@@ -874,7 +869,9 @@ func (d *Driver) Cleanup() error {
func (d *Driver) pruneStagingDirectories() bool {
d.stagingDirsLocksMutex.Lock()
for _, lock := range d.stagingDirsLocks {
- lock.Unlock()
+ if err := lock.UnlockAndDelete(); err != nil {
+ logrus.Warnf("Failed to unlock and delete staging lock file: %v", err)
+ }
}
clear(d.stagingDirsLocks)
d.stagingDirsLocksMutex.Unlock()
@@ -886,17 +883,15 @@ func (d *Driver) pruneStagingDirectories() bool {
if err == nil {
for _, dir := range dirs {
stagingDirToRemove := filepath.Join(stagingDirBase, dir.Name())
- lock, err := lockfile.GetLockFile(filepath.Join(stagingDirToRemove, stagingLockFile))
+ lock, err := staging_lockfile.TryLockPath(filepath.Join(stagingDirToRemove, stagingLockFile))
if err != nil {
anyPresent = true
continue
}
- if err := lock.TryLock(); err != nil {
- anyPresent = true
- continue
- }
_ = os.RemoveAll(stagingDirToRemove)
- lock.Unlock()
+ if err := lock.UnlockAndDelete(); err != nil {
+ logrus.Warnf("Failed to unlock and delete staging lock file: %v", err)
+ }
}
}
return anyPresent
@@ -1483,7 +1478,7 @@ func (d *Driver) get(id string, disableShifting bool, options graphdriver.MountO
readWrite := !inAdditionalStore
- if !d.SupportsShifting() || options.DisableShifting {
+ if !d.SupportsShifting(options.UidMaps, options.GidMaps) || options.DisableShifting {
disableShifting = true
}
@@ -2178,7 +2173,10 @@ func (d *Driver) CleanupStagingDirectory(stagingDirectory string) error {
d.stagingDirsLocksMutex.Lock()
if lock, ok := d.stagingDirsLocks[parentStagingDir]; ok {
delete(d.stagingDirsLocks, parentStagingDir)
- lock.Unlock()
+ if err := lock.UnlockAndDelete(); err != nil {
+ d.stagingDirsLocksMutex.Unlock()
+ return err
+ }
}
d.stagingDirsLocksMutex.Unlock()
@@ -2233,7 +2231,7 @@ func (d *Driver) ApplyDiffWithDiffer(options *graphdriver.ApplyDiffWithDifferOpt
return graphdriver.DriverWithDifferOutput{}, err
}
- lock, err := lockfile.GetLockFile(filepath.Join(layerDir, stagingLockFile))
+ lock, err := staging_lockfile.TryLockPath(filepath.Join(layerDir, stagingLockFile))
if err != nil {
return graphdriver.DriverWithDifferOutput{}, err
}
@@ -2242,13 +2240,14 @@ func (d *Driver) ApplyDiffWithDiffer(options *graphdriver.ApplyDiffWithDifferOpt
d.stagingDirsLocksMutex.Lock()
delete(d.stagingDirsLocks, layerDir)
d.stagingDirsLocksMutex.Unlock()
- lock.Unlock()
+ if err := lock.UnlockAndDelete(); err != nil {
+ errRet = errors.Join(errRet, err)
+ }
}
}()
d.stagingDirsLocksMutex.Lock()
d.stagingDirsLocks[layerDir] = lock
d.stagingDirsLocksMutex.Unlock()
- lock.Lock()
logrus.Debugf("Applying differ in %s", applyDir)
@@ -2274,7 +2273,7 @@ func (d *Driver) ApplyDiffWithDiffer(options *graphdriver.ApplyDiffWithDifferOpt
}
// ApplyDiffFromStagingDirectory applies the changes using the specified staging directory.
-func (d *Driver) ApplyDiffFromStagingDirectory(id, parent string, diffOutput *graphdriver.DriverWithDifferOutput, options *graphdriver.ApplyDiffWithDifferOpts) error {
+func (d *Driver) ApplyDiffFromStagingDirectory(id, parent string, diffOutput *graphdriver.DriverWithDifferOutput, options *graphdriver.ApplyDiffWithDifferOpts) (errRet error) {
stagingDirectory := diffOutput.Target
parentStagingDir := filepath.Dir(stagingDirectory)
@@ -2282,7 +2281,9 @@ func (d *Driver) ApplyDiffFromStagingDirectory(id, parent string, diffOutput *gr
d.stagingDirsLocksMutex.Lock()
if lock, ok := d.stagingDirsLocks[parentStagingDir]; ok {
delete(d.stagingDirsLocks, parentStagingDir)
- lock.Unlock()
+ if err := lock.UnlockAndDelete(); err != nil {
+ errRet = errors.Join(errRet, err)
+ }
}
d.stagingDirsLocksMutex.Unlock()
}()
@@ -2553,12 +2554,20 @@ func (d *Driver) supportsIDmappedMounts() bool {
return false
}
-// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs in an userNS
-func (d *Driver) SupportsShifting() bool {
+// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs to the provided mapping in an userNS
+func (d *Driver) SupportsShifting(uidmap, gidmap []idtools.IDMap) bool {
if os.Getenv("_CONTAINERS_OVERLAY_DISABLE_IDMAP") == "yes" {
return false
}
if d.options.mountProgram != "" {
+ // fuse-overlayfs supports only contiguous mappings, since it performs the mapping on the
+ // upper layer too, to avoid https://github.com/containers/podman/issues/10272
+ if !idtools.IsContiguous(uidmap) {
+ return false
+ }
+ if !idtools.IsContiguous(gidmap) {
+ return false
+ }
return true
}
return d.supportsIDmappedMounts()
diff --git a/vendor/github.com/containers/storage/drivers/vfs/driver.go b/vendor/github.com/containers/storage/drivers/vfs/driver.go
index 98dc55b0e3..87ff885ec3 100644
--- a/vendor/github.com/containers/storage/drivers/vfs/driver.go
+++ b/vendor/github.com/containers/storage/drivers/vfs/driver.go
@@ -312,9 +312,9 @@ func (d *Driver) AdditionalImageStores() []string {
return nil
}
-// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs in an userNS
-func (d *Driver) SupportsShifting() bool {
- return d.updater.SupportsShifting()
+// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs to the provided mapping in an userNS
+func (d *Driver) SupportsShifting(uidmap, gidmap []idtools.IDMap) bool {
+ return d.updater.SupportsShifting(uidmap, gidmap)
}
// UpdateLayerIDMap updates ID mappings in a from matching the ones specified
diff --git a/vendor/github.com/containers/storage/drivers/windows/windows.go b/vendor/github.com/containers/storage/drivers/windows/windows.go
index 59ed9a7567..6a5c9bcd1b 100644
--- a/vendor/github.com/containers/storage/drivers/windows/windows.go
+++ b/vendor/github.com/containers/storage/drivers/windows/windows.go
@@ -986,8 +986,8 @@ func (d *Driver) UpdateLayerIDMap(id string, toContainer, toHost *idtools.IDMapp
return fmt.Errorf("windows doesn't support changing ID mappings")
}
-// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs in an userNS
-func (d *Driver) SupportsShifting() bool {
+// SupportsShifting tells whether the driver support shifting of the UIDs/GIDs to the provided mapping in an userNS
+func (d *Driver) SupportsShifting(uidmap, gidmap []idtools.IDMap) bool {
return false
}
diff --git a/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock.go b/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock.go
new file mode 100644
index 0000000000..4f340ae3c1
--- /dev/null
+++ b/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock.go
@@ -0,0 +1,64 @@
+package rawfilelock
+
+import (
+ "os"
+)
+
+type LockType byte
+
+const (
+ ReadLock LockType = iota
+ WriteLock
+)
+
+type FileHandle = fileHandle
+
+// OpenLock opens a file for locking
+// WARNING: This is the underlying file locking primitive of the OS;
+// because closing FileHandle releases the lock, it is not suitable for use
+// if there is any chance of two concurrent goroutines attempting to use the same lock.
+// Most users should use the higher-level operations from internal/staging_lockfile or pkg/lockfile.
+func OpenLock(path string, readOnly bool) (FileHandle, error) {
+ flags := os.O_CREATE
+ if readOnly {
+ flags |= os.O_RDONLY
+ } else {
+ flags |= os.O_RDWR
+ }
+
+ fd, err := openHandle(path, flags)
+ if err == nil {
+ return fd, nil
+ }
+
+ return fd, &os.PathError{Op: "open", Path: path, Err: err}
+}
+
+// TryLockFile attempts to lock a file handle
+func TryLockFile(fd FileHandle, lockType LockType) error {
+ return lockHandle(fd, lockType, true)
+}
+
+// LockFile locks a file handle
+func LockFile(fd FileHandle, lockType LockType) error {
+ return lockHandle(fd, lockType, false)
+}
+
+// UnlockAndClose unlocks and closes a file handle
+func UnlockAndCloseHandle(fd FileHandle) {
+ unlockAndCloseHandle(fd)
+}
+
+// CloseHandle closes a file handle without unlocking
+//
+// WARNING: This is a last-resort function for error handling only!
+// On Unix systems, closing a file descriptor automatically releases any locks,
+// so "closing without unlocking" is impossible. This function will release
+// the lock as a side effect of closing the file.
+//
+// This function should only be used in error paths where the lock state
+// is already corrupted or when giving up on lock management entirely.
+// Normal code should use UnlockAndCloseHandle instead.
+func CloseHandle(fd FileHandle) {
+ closeHandle(fd)
+}
diff --git a/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock_unix.go b/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock_unix.go
new file mode 100644
index 0000000000..2685540769
--- /dev/null
+++ b/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock_unix.go
@@ -0,0 +1,49 @@
+//go:build !windows
+
+package rawfilelock
+
+import (
+ "time"
+
+ "golang.org/x/sys/unix"
+)
+
+type fileHandle uintptr
+
+func openHandle(path string, mode int) (fileHandle, error) {
+ mode |= unix.O_CLOEXEC
+ fd, err := unix.Open(path, mode, 0o644)
+ return fileHandle(fd), err
+}
+
+func lockHandle(fd fileHandle, lType LockType, nonblocking bool) error {
+ fType := unix.F_RDLCK
+ if lType != ReadLock {
+ fType = unix.F_WRLCK
+ }
+ lk := unix.Flock_t{
+ Type: int16(fType),
+ Whence: int16(unix.SEEK_SET),
+ Start: 0,
+ Len: 0,
+ }
+ cmd := unix.F_SETLKW
+ if nonblocking {
+ cmd = unix.F_SETLK
+ }
+ for {
+ err := unix.FcntlFlock(uintptr(fd), cmd, &lk)
+ if err == nil || nonblocking {
+ return err
+ }
+ time.Sleep(10 * time.Millisecond)
+ }
+}
+
+func unlockAndCloseHandle(fd fileHandle) {
+ unix.Close(int(fd))
+}
+
+func closeHandle(fd fileHandle) {
+ unix.Close(int(fd))
+}
diff --git a/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock_windows.go b/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock_windows.go
new file mode 100644
index 0000000000..9c0d692f8a
--- /dev/null
+++ b/vendor/github.com/containers/storage/internal/rawfilelock/rawfilelock_windows.go
@@ -0,0 +1,48 @@
+//go:build windows
+
+package rawfilelock
+
+import (
+ "golang.org/x/sys/windows"
+)
+
+const (
+ reserved = 0
+ allBytes = ^uint32(0)
+)
+
+type fileHandle windows.Handle
+
+func openHandle(path string, mode int) (fileHandle, error) {
+ mode |= windows.O_CLOEXEC
+ fd, err := windows.Open(path, mode, windows.S_IWRITE)
+ return fileHandle(fd), err
+}
+
+func lockHandle(fd fileHandle, lType LockType, nonblocking bool) error {
+ flags := 0
+ if lType != ReadLock {
+ flags = windows.LOCKFILE_EXCLUSIVE_LOCK
+ }
+ if nonblocking {
+ flags |= windows.LOCKFILE_FAIL_IMMEDIATELY
+ }
+ ol := new(windows.Overlapped)
+ if err := windows.LockFileEx(windows.Handle(fd), uint32(flags), reserved, allBytes, allBytes, ol); err != nil {
+ if nonblocking {
+ return err
+ }
+ panic(err)
+ }
+ return nil
+}
+
+func unlockAndCloseHandle(fd fileHandle) {
+ ol := new(windows.Overlapped)
+ windows.UnlockFileEx(windows.Handle(fd), reserved, allBytes, allBytes, ol)
+ closeHandle(fd)
+}
+
+func closeHandle(fd fileHandle) {
+ windows.Close(windows.Handle(fd))
+}
diff --git a/vendor/github.com/containers/storage/internal/staging_lockfile/staging_lockfile.go b/vendor/github.com/containers/storage/internal/staging_lockfile/staging_lockfile.go
new file mode 100644
index 0000000000..1cb2a3327b
--- /dev/null
+++ b/vendor/github.com/containers/storage/internal/staging_lockfile/staging_lockfile.go
@@ -0,0 +1,147 @@
+package staging_lockfile
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+
+ "github.com/containers/storage/internal/rawfilelock"
+)
+
+// StagingLockFile represents a file lock used to coordinate access to staging areas.
+// Typical usage is via CreateAndLock or TryLockPath, both of which return a StagingLockFile
+// that must eventually be released with UnlockAndDelete. This ensures that access
+// to the staging file is properly synchronized both within and across processes.
+//
+// WARNING: This struct MUST NOT be created manually. Use the provided helper functions instead.
+type StagingLockFile struct {
+ // Locking invariant: If stagingLockFileLock is not locked, a StagingLockFile for a particular
+ // path exists if the current process currently owns the lock for that file, and it is recorded in stagingLockFiles.
+ //
+ // The following fields can only be accessed by the goroutine owning the lock.
+ //
+ // An empty string in the file field means that the lock has been released and the StagingLockFile is no longer valid.
+ file string // Also the key in stagingLockFiles
+ fd rawfilelock.FileHandle
+}
+
+const maxRetries = 1000
+
+var (
+ stagingLockFiles map[string]*StagingLockFile
+ stagingLockFileLock sync.Mutex
+)
+
+// tryAcquireLockForFile attempts to acquire a lock for the specified file path.
+func tryAcquireLockForFile(path string) (*StagingLockFile, error) {
+ cleanPath, err := filepath.Abs(path)
+ if err != nil {
+ return nil, fmt.Errorf("ensuring that path %q is an absolute path: %w", path, err)
+ }
+
+ stagingLockFileLock.Lock()
+ defer stagingLockFileLock.Unlock()
+
+ if stagingLockFiles == nil {
+ stagingLockFiles = make(map[string]*StagingLockFile)
+ }
+
+ if _, ok := stagingLockFiles[cleanPath]; ok {
+ return nil, fmt.Errorf("lock %q is used already with other thread", cleanPath)
+ }
+
+ fd, err := rawfilelock.OpenLock(cleanPath, false)
+ if err != nil {
+ return nil, err
+ }
+
+ if err = rawfilelock.TryLockFile(fd, rawfilelock.WriteLock); err != nil {
+ // Lock acquisition failed, but holding stagingLockFileLock ensures
+ // no other goroutine in this process could have obtained a lock for this file,
+ // so closing it is still safe.
+ rawfilelock.CloseHandle(fd)
+ return nil, fmt.Errorf("failed to acquire lock on %q: %w", cleanPath, err)
+ }
+
+ lockFile := &StagingLockFile{
+ file: cleanPath,
+ fd: fd,
+ }
+
+ stagingLockFiles[cleanPath] = lockFile
+ return lockFile, nil
+}
+
+// UnlockAndDelete releases the lock, removes the associated file from the filesystem.
+//
+// WARNING: After this operation, the StagingLockFile becomes invalid for further use.
+func (l *StagingLockFile) UnlockAndDelete() error {
+ stagingLockFileLock.Lock()
+ defer stagingLockFileLock.Unlock()
+
+ if l.file == "" {
+ // Panic when unlocking an unlocked lock. That's a violation
+ // of the lock semantics and will reveal such.
+ panic("calling Unlock on unlocked lock")
+ }
+
+ defer func() {
+ // It’s important that this happens while we are still holding stagingLockFileLock, to ensure
+ // that no other goroutine has l.file open = that this close is not unlocking the lock under any
+ // other goroutine. (defer ordering is LIFO, so this will happen before we release the stagingLockFileLock)
+ rawfilelock.UnlockAndCloseHandle(l.fd)
+ delete(stagingLockFiles, l.file)
+ l.file = ""
+ }()
+ if err := os.Remove(l.file); err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ return nil
+}
+
+// CreateAndLock creates a new temporary file in the specified directory with the given pattern,
+// then creates and locks a StagingLockFile for it. The file is created using os.CreateTemp.
+// Typically, the caller would use the returned lock file path to derive a path to the lock-controlled resource
+// (e.g. by replacing the "pattern" part of the returned file name with a different prefix)
+// Caller MUST call UnlockAndDelete() on the returned StagingLockFile to release the lock and delete the file.
+//
+// Returns:
+// - The locked StagingLockFile
+// - The name of created lock file
+// - Any error that occurred during the process
+//
+// If the file cannot be locked, this function will retry up to maxRetries times before failing.
+func CreateAndLock(dir string, pattern string) (*StagingLockFile, string, error) {
+ for try := 0; ; try++ {
+ file, err := os.CreateTemp(dir, pattern)
+ if err != nil {
+ return nil, "", err
+ }
+ file.Close()
+
+ path := file.Name()
+ l, err := tryAcquireLockForFile(path)
+ if err != nil {
+ if try < maxRetries {
+ continue // Retry if the lock cannot be acquired
+ }
+ return nil, "", fmt.Errorf(
+ "failed to allocate lock in %q after %d attempts; last failure on %q: %w",
+ dir, try, filepath.Base(path), err,
+ )
+ }
+
+ return l, filepath.Base(path), nil
+ }
+}
+
+// TryLockPath attempts to acquire a lock on an specific path. If the file does not exist,
+// it will be created.
+//
+// Warning: If acquiring a lock is successful, it returns a new StagingLockFile
+// instance for the file. Caller MUST call UnlockAndDelete() on the returned StagingLockFile
+// to release the lock and delete the file.
+func TryLockPath(path string) (*StagingLockFile, error) {
+ return tryAcquireLockForFile(path)
+}
diff --git a/vendor/github.com/containers/storage/layers.go b/vendor/github.com/containers/storage/layers.go
index 3fbd9b8bd1..a84706e4c1 100644
--- a/vendor/github.com/containers/storage/layers.go
+++ b/vendor/github.com/containers/storage/layers.go
@@ -1634,7 +1634,7 @@ func (r *layerStore) Mount(id string, options drivers.MountOpts) (string, error)
options.MountLabel = layer.MountLabel
}
- if (options.UidMaps != nil || options.GidMaps != nil) && !r.driver.SupportsShifting() {
+ if (options.UidMaps != nil || options.GidMaps != nil) && !r.driver.SupportsShifting(options.UidMaps, options.GidMaps) {
if !reflect.DeepEqual(options.UidMaps, layer.UIDMap) || !reflect.DeepEqual(options.GidMaps, layer.GIDMap) {
return "", fmt.Errorf("cannot mount layer %v: shifting not enabled", layer.ID)
}
diff --git a/vendor/github.com/containers/storage/pkg/chunked/compression_linux.go b/vendor/github.com/containers/storage/pkg/chunked/compression_linux.go
index 3cf6100be1..ddd7ff53b1 100644
--- a/vendor/github.com/containers/storage/pkg/chunked/compression_linux.go
+++ b/vendor/github.com/containers/storage/pkg/chunked/compression_linux.go
@@ -185,7 +185,10 @@ func openTmpFileNoTmpFile(tmpDir string) (*os.File, error) {
// Returns (manifest blob, parsed manifest, tar-split file or nil, manifest offset).
// The opened tar-split file’s position is unspecified.
// It may return an error matching ErrFallbackToOrdinaryLayerDownload / errFallbackCanConvert.
-func readZstdChunkedManifest(tmpDir string, blobStream ImageSourceSeekable, tocDigest digest.Digest, annotations map[string]string) (_ []byte, _ *minimal.TOC, _ *os.File, _ int64, retErr error) {
+// The compressed parameter indicates whether the manifest and tar-split data are zstd-compressed
+// (true) or stored uncompressed (false). Uncompressed data is used only for an optimization to convert
+// a regular OCI layer to zstd:chunked when convert_images is set, and it is not used for distributed images.
+func readZstdChunkedManifest(tmpDir string, blobStream ImageSourceSeekable, tocDigest digest.Digest, annotations map[string]string, compressed bool) (_ []byte, _ *minimal.TOC, _ *os.File, _ int64, retErr error) {
offsetMetadata := annotations[minimal.ManifestInfoKey]
if offsetMetadata == "" {
return nil, nil, nil, 0, fmt.Errorf("%q annotation missing", minimal.ManifestInfoKey)
@@ -261,7 +264,7 @@ func readZstdChunkedManifest(tmpDir string, blobStream ImageSourceSeekable, tocD
return nil, nil, nil, 0, err
}
- decodedBlob, err := decodeAndValidateBlob(manifest, manifestLengthUncompressed, tocDigest.String())
+ decodedBlob, err := decodeAndValidateBlob(manifest, manifestLengthUncompressed, tocDigest.String(), compressed)
if err != nil {
return nil, nil, nil, 0, fmt.Errorf("validating and decompressing TOC: %w", err)
}
@@ -288,7 +291,7 @@ func readZstdChunkedManifest(tmpDir string, blobStream ImageSourceSeekable, tocD
decodedTarSplit.Close()
}
}()
- if err := decodeAndValidateBlobToStream(tarSplit, decodedTarSplit, toc.TarSplitDigest.String()); err != nil {
+ if err := decodeAndValidateBlobToStream(tarSplit, decodedTarSplit, toc.TarSplitDigest.String(), compressed); err != nil {
return nil, nil, nil, 0, fmt.Errorf("validating and decompressing tar-split: %w", err)
}
// We use the TOC for creating on-disk files, but the tar-split for creating metadata
@@ -487,11 +490,15 @@ func validateBlob(blob []byte, expectedCompressedChecksum string) error {
return nil
}
-func decodeAndValidateBlob(blob []byte, lengthUncompressed uint64, expectedCompressedChecksum string) ([]byte, error) {
+func decodeAndValidateBlob(blob []byte, lengthUncompressed uint64, expectedCompressedChecksum string, compressed bool) ([]byte, error) {
if err := validateBlob(blob, expectedCompressedChecksum); err != nil {
return nil, err
}
+ if !compressed {
+ return blob, nil
+ }
+
decoder, err := zstd.NewReader(nil)
if err != nil {
return nil, err
@@ -502,11 +509,16 @@ func decodeAndValidateBlob(blob []byte, lengthUncompressed uint64, expectedCompr
return decoder.DecodeAll(blob, b)
}
-func decodeAndValidateBlobToStream(blob []byte, w *os.File, expectedCompressedChecksum string) error {
+func decodeAndValidateBlobToStream(blob []byte, w *os.File, expectedCompressedChecksum string, compressed bool) error {
if err := validateBlob(blob, expectedCompressedChecksum); err != nil {
return err
}
+ if !compressed {
+ _, err := w.Write(blob)
+ return err
+ }
+
decoder, err := zstd.NewReader(bytes.NewReader(blob))
if err != nil {
return err
diff --git a/vendor/github.com/containers/storage/pkg/chunked/compressor/compressor.go b/vendor/github.com/containers/storage/pkg/chunked/compressor/compressor.go
index 0de063a24c..2930723aa7 100644
--- a/vendor/github.com/containers/storage/pkg/chunked/compressor/compressor.go
+++ b/vendor/github.com/containers/storage/pkg/chunked/compressor/compressor.go
@@ -11,7 +11,6 @@ import (
"github.com/containers/storage/pkg/chunked/internal/minimal"
"github.com/containers/storage/pkg/ioutils"
- "github.com/klauspost/compress/zstd"
"github.com/opencontainers/go-digest"
"github.com/vbatts/tar-split/archive/tar"
"github.com/vbatts/tar-split/tar/asm"
@@ -202,15 +201,15 @@ type tarSplitData struct {
compressed *bytes.Buffer
digester digest.Digester
uncompressedCounter *ioutils.WriteCounter
- zstd *zstd.Encoder
+ zstd minimal.ZstdWriter
packer storage.Packer
}
-func newTarSplitData(level int) (*tarSplitData, error) {
+func newTarSplitData(createZstdWriter minimal.CreateZstdWriterFunc) (*tarSplitData, error) {
compressed := bytes.NewBuffer(nil)
digester := digest.Canonical.Digester()
- zstdWriter, err := minimal.ZstdWriterWithLevel(io.MultiWriter(compressed, digester.Hash()), level)
+ zstdWriter, err := createZstdWriter(io.MultiWriter(compressed, digester.Hash()))
if err != nil {
return nil, err
}
@@ -227,11 +226,11 @@ func newTarSplitData(level int) (*tarSplitData, error) {
}, nil
}
-func writeZstdChunkedStream(destFile io.Writer, outMetadata map[string]string, reader io.Reader, level int) error {
+func writeZstdChunkedStream(destFile io.Writer, outMetadata map[string]string, reader io.Reader, createZstdWriter minimal.CreateZstdWriterFunc) error {
// total written so far. Used to retrieve partial offsets in the file
dest := ioutils.NewWriteCounter(destFile)
- tarSplitData, err := newTarSplitData(level)
+ tarSplitData, err := newTarSplitData(createZstdWriter)
if err != nil {
return err
}
@@ -251,7 +250,7 @@ func writeZstdChunkedStream(destFile io.Writer, outMetadata map[string]string, r
buf := make([]byte, 4096)
- zstdWriter, err := minimal.ZstdWriterWithLevel(dest, level)
+ zstdWriter, err := createZstdWriter(dest)
if err != nil {
return err
}
@@ -404,18 +403,11 @@ func writeZstdChunkedStream(destFile io.Writer, outMetadata map[string]string, r
return err
}
- if err := zstdWriter.Flush(); err != nil {
- zstdWriter.Close()
- return err
- }
if err := zstdWriter.Close(); err != nil {
return err
}
zstdWriter = nil
- if err := tarSplitData.zstd.Flush(); err != nil {
- return err
- }
if err := tarSplitData.zstd.Close(); err != nil {
return err
}
@@ -427,7 +419,7 @@ func writeZstdChunkedStream(destFile io.Writer, outMetadata map[string]string, r
UncompressedSize: tarSplitData.uncompressedCounter.Count,
}
- return minimal.WriteZstdChunkedManifest(dest, outMetadata, uint64(dest.Count), &ts, metadata, level)
+ return minimal.WriteZstdChunkedManifest(dest, outMetadata, uint64(dest.Count), &ts, metadata, createZstdWriter)
}
type zstdChunkedWriter struct {
@@ -454,7 +446,7 @@ func (w zstdChunkedWriter) Write(p []byte) (int, error) {
}
}
-// zstdChunkedWriterWithLevel writes a zstd compressed tarball where each file is
+// makeZstdChunkedWriter writes a zstd compressed tarball where each file is
// compressed separately so it can be addressed separately. Idea based on CRFS:
// https://github.com/google/crfs
// The difference with CRFS is that the zstd compression is used instead of gzip.
@@ -469,12 +461,12 @@ func (w zstdChunkedWriter) Write(p []byte) (int, error) {
// [SKIPPABLE FRAME 1]: [ZSTD SKIPPABLE FRAME, SIZE=MANIFEST LENGTH][MANIFEST]
// [SKIPPABLE FRAME 2]: [ZSTD SKIPPABLE FRAME, SIZE=16][MANIFEST_OFFSET][MANIFEST_LENGTH][MANIFEST_LENGTH_UNCOMPRESSED][MANIFEST_TYPE][CHUNKED_ZSTD_MAGIC_NUMBER]
// MANIFEST_OFFSET, MANIFEST_LENGTH, MANIFEST_LENGTH_UNCOMPRESSED and CHUNKED_ZSTD_MAGIC_NUMBER are 64 bits unsigned in little endian format.
-func zstdChunkedWriterWithLevel(out io.Writer, metadata map[string]string, level int) (io.WriteCloser, error) {
+func makeZstdChunkedWriter(out io.Writer, metadata map[string]string, createZstdWriter minimal.CreateZstdWriterFunc) (io.WriteCloser, error) {
ch := make(chan error, 1)
r, w := io.Pipe()
go func() {
- ch <- writeZstdChunkedStream(out, metadata, r, level)
+ ch <- writeZstdChunkedStream(out, metadata, r, createZstdWriter)
_, _ = io.Copy(io.Discard, r) // Ordinarily writeZstdChunkedStream consumes all of r. If it fails, ensure the write end never blocks and eventually terminates.
r.Close()
close(ch)
@@ -493,5 +485,40 @@ func ZstdCompressor(r io.Writer, metadata map[string]string, level *int) (io.Wri
level = &l
}
- return zstdChunkedWriterWithLevel(r, metadata, *level)
+ createZstdWriter := func(dest io.Writer) (minimal.ZstdWriter, error) {
+ return minimal.ZstdWriterWithLevel(dest, *level)
+ }
+
+ return makeZstdChunkedWriter(r, metadata, createZstdWriter)
+}
+
+type noCompression struct {
+ dest io.Writer
+}
+
+func (n *noCompression) Write(p []byte) (int, error) {
+ return n.dest.Write(p)
+}
+
+func (n *noCompression) Close() error {
+ return nil
+}
+
+func (n *noCompression) Flush() error {
+ return nil
+}
+
+func (n *noCompression) Reset(dest io.Writer) {
+ n.dest = dest
+}
+
+// NoCompression writes directly to the output file without any compression
+//
+// Such an output does not follow the zstd:chunked spec and cannot be generally consumed; this function
+// only exists for internal purposes and should not be called from outside c/storage.
+func NoCompression(r io.Writer, metadata map[string]string) (io.WriteCloser, error) {
+ createZstdWriter := func(dest io.Writer) (minimal.ZstdWriter, error) {
+ return &noCompression{dest: dest}, nil
+ }
+ return makeZstdChunkedWriter(r, metadata, createZstdWriter)
}
diff --git a/vendor/github.com/containers/storage/pkg/chunked/internal/minimal/compression.go b/vendor/github.com/containers/storage/pkg/chunked/internal/minimal/compression.go
index f85c5973ca..4191524cc5 100644
--- a/vendor/github.com/containers/storage/pkg/chunked/internal/minimal/compression.go
+++ b/vendor/github.com/containers/storage/pkg/chunked/internal/minimal/compression.go
@@ -20,6 +20,15 @@ import (
"github.com/vbatts/tar-split/archive/tar"
)
+// ZstdWriter is an interface that wraps standard io.WriteCloser and Reset() to reuse the compressor with a new writer.
+type ZstdWriter interface {
+ io.WriteCloser
+ Reset(dest io.Writer)
+}
+
+// CreateZstdWriterFunc is a function that creates a ZstdWriter for the provided destination writer.
+type CreateZstdWriterFunc func(dest io.Writer) (ZstdWriter, error)
+
// TOC is short for Table of Contents and is used by the zstd:chunked
// file format to effectively add an overall index into the contents
// of a tarball; it also includes file metadata.
@@ -179,7 +188,7 @@ type TarSplitData struct {
UncompressedSize int64
}
-func WriteZstdChunkedManifest(dest io.Writer, outMetadata map[string]string, offset uint64, tarSplitData *TarSplitData, metadata []FileMetadata, level int) error {
+func WriteZstdChunkedManifest(dest io.Writer, outMetadata map[string]string, offset uint64, tarSplitData *TarSplitData, metadata []FileMetadata, createZstdWriter CreateZstdWriterFunc) error {
// 8 is the size of the zstd skippable frame header + the frame size
const zstdSkippableFrameHeader = 8
manifestOffset := offset + zstdSkippableFrameHeader
@@ -198,7 +207,7 @@ func WriteZstdChunkedManifest(dest io.Writer, outMetadata map[string]string, off
}
var compressedBuffer bytes.Buffer
- zstdWriter, err := ZstdWriterWithLevel(&compressedBuffer, level)
+ zstdWriter, err := createZstdWriter(&compressedBuffer)
if err != nil {
return err
}
@@ -244,7 +253,7 @@ func WriteZstdChunkedManifest(dest io.Writer, outMetadata map[string]string, off
return appendZstdSkippableFrame(dest, manifestDataLE)
}
-func ZstdWriterWithLevel(dest io.Writer, level int) (*zstd.Encoder, error) {
+func ZstdWriterWithLevel(dest io.Writer, level int) (ZstdWriter, error) {
el := zstd.EncoderLevelFromZstd(level)
return zstd.NewWriter(dest, zstd.WithEncoderLevel(el))
}
diff --git a/vendor/github.com/containers/storage/pkg/chunked/storage_linux.go b/vendor/github.com/containers/storage/pkg/chunked/storage_linux.go
index 97dc9b8141..f23a96b7a2 100644
--- a/vendor/github.com/containers/storage/pkg/chunked/storage_linux.go
+++ b/vendor/github.com/containers/storage/pkg/chunked/storage_linux.go
@@ -170,8 +170,7 @@ func (c *chunkedDiffer) convertTarToZstdChunked(destDirectory string, payload *o
}
newAnnotations := make(map[string]string)
- level := 1
- chunked, err := compressor.ZstdCompressor(f, newAnnotations, &level)
+ chunked, err := compressor.NoCompression(f, newAnnotations)
if err != nil {
f.Close()
return 0, nil, "", nil, err
@@ -341,7 +340,7 @@ func makeConvertFromRawDiffer(store storage.Store, blobDigest digest.Digest, blo
// makeZstdChunkedDiffer sets up a chunkedDiffer for a zstd:chunked layer.
// It may return an error matching ErrFallbackToOrdinaryLayerDownload / errFallbackCanConvert.
func makeZstdChunkedDiffer(store storage.Store, blobSize int64, tocDigest digest.Digest, annotations map[string]string, iss ImageSourceSeekable, pullOptions pullOptions) (_ *chunkedDiffer, retErr error) {
- manifest, toc, tarSplit, tocOffset, err := readZstdChunkedManifest(store.RunRoot(), iss, tocDigest, annotations)
+ manifest, toc, tarSplit, tocOffset, err := readZstdChunkedManifest(store.RunRoot(), iss, tocDigest, annotations, true)
if err != nil { // May be ErrFallbackToOrdinaryLayerDownload / errFallbackCanConvert
return nil, fmt.Errorf("read zstd:chunked manifest: %w", err)
}
@@ -666,20 +665,17 @@ func (o *originFile) OpenFile() (io.ReadCloser, error) {
return srcFile, nil
}
-func (c *chunkedDiffer) prepareCompressedStreamToFile(partCompression compressedFileType, from io.Reader, mf *missingFileChunk) (compressedFileType, error) {
+func (c *chunkedDiffer) prepareCompressedStreamToFile(partCompression compressedFileType, mf *missingFileChunk) (compressedFileType, error) {
switch {
case partCompression == fileTypeHole:
// The entire part is a hole. Do not need to read from a file.
- c.rawReader = nil
return fileTypeHole, nil
case mf.Hole:
// Only the missing chunk in the requested part refers to a hole.
// The received data must be discarded.
- limitReader := io.LimitReader(from, mf.CompressedSize)
- _, err := io.CopyBuffer(io.Discard, limitReader, c.copyBuffer)
+ _, err := io.CopyBuffer(io.Discard, c.rawReader, c.copyBuffer)
return fileTypeHole, err
case partCompression == fileTypeZstdChunked:
- c.rawReader = io.LimitReader(from, mf.CompressedSize)
if c.zstdReader == nil {
r, err := zstd.NewReader(c.rawReader)
if err != nil {
@@ -692,7 +688,6 @@ func (c *chunkedDiffer) prepareCompressedStreamToFile(partCompression compressed
}
}
case partCompression == fileTypeEstargz:
- c.rawReader = io.LimitReader(from, mf.CompressedSize)
if c.gzipReader == nil {
r, err := pgzip.NewReader(c.rawReader)
if err != nil {
@@ -705,7 +700,7 @@ func (c *chunkedDiffer) prepareCompressedStreamToFile(partCompression compressed
}
}
case partCompression == fileTypeNoCompression:
- c.rawReader = io.LimitReader(from, mf.UncompressedSize)
+ return fileTypeNoCompression, nil
default:
return partCompression, fmt.Errorf("unknown file type %q", c.fileType)
}
@@ -905,6 +900,7 @@ func (c *chunkedDiffer) storeMissingFiles(streams chan io.ReadCloser, errs chan
for _, missingPart := range missingParts {
var part io.ReadCloser
partCompression := c.fileType
+ readingFromLocalFile := false
switch {
case missingPart.Hole:
partCompression = fileTypeHole
@@ -915,6 +911,7 @@ func (c *chunkedDiffer) storeMissingFiles(streams chan io.ReadCloser, errs chan
return err
}
partCompression = fileTypeNoCompression
+ readingFromLocalFile = true
case missingPart.SourceChunk != nil:
select {
case p := <-streams:
@@ -948,7 +945,18 @@ func (c *chunkedDiffer) storeMissingFiles(streams chan io.ReadCloser, errs chan
goto exit
}
- compression, err := c.prepareCompressedStreamToFile(partCompression, part, &mf)
+ c.rawReader = nil
+ if part != nil {
+ limit := mf.CompressedSize
+ // If we are reading from a source file, use the uncompressed size to limit the reader, because
+ // the compressed size refers to the original layer stream.
+ if readingFromLocalFile {
+ limit = mf.UncompressedSize
+ }
+ c.rawReader = io.LimitReader(part, limit)
+ }
+
+ compression, err := c.prepareCompressedStreamToFile(partCompression, &mf)
if err != nil {
Err = err
goto exit
@@ -1440,7 +1448,9 @@ func (c *chunkedDiffer) ApplyDiff(dest string, options *archive.TarOptions, diff
if err != nil {
return graphdriver.DriverWithDifferOutput{}, err
}
+
c.uncompressedTarSize = tarSize
+
// fileSource is a O_TMPFILE file descriptor, so we
// need to keep it open until the entire file is processed.
defer fileSource.Close()
@@ -1456,7 +1466,7 @@ func (c *chunkedDiffer) ApplyDiff(dest string, options *archive.TarOptions, diff
if tocDigest == nil {
return graphdriver.DriverWithDifferOutput{}, fmt.Errorf("internal error: just-created zstd:chunked missing TOC digest")
}
- manifest, toc, tarSplit, tocOffset, err := readZstdChunkedManifest(dest, fileSource, *tocDigest, annotations)
+ manifest, toc, tarSplit, tocOffset, err := readZstdChunkedManifest(dest, fileSource, *tocDigest, annotations, false)
if err != nil {
return graphdriver.DriverWithDifferOutput{}, fmt.Errorf("read zstd:chunked manifest: %w", err)
}
@@ -1465,7 +1475,7 @@ func (c *chunkedDiffer) ApplyDiff(dest string, options *archive.TarOptions, diff
stream = fileSource
// fill the chunkedDiffer with the data we just read.
- c.fileType = fileTypeZstdChunked
+ c.fileType = fileTypeNoCompression
c.manifest = manifest
c.toc = toc
c.tarSplit = tarSplit
diff --git a/vendor/github.com/containers/storage/pkg/lockfile/lockfile.go b/vendor/github.com/containers/storage/pkg/lockfile/lockfile.go
index 52f6c0a62c..dfe81c2458 100644
--- a/vendor/github.com/containers/storage/pkg/lockfile/lockfile.go
+++ b/vendor/github.com/containers/storage/pkg/lockfile/lockfile.go
@@ -6,6 +6,8 @@ import (
"path/filepath"
"sync"
"time"
+
+ "github.com/containers/storage/internal/rawfilelock"
)
// A Locker represents a file lock where the file is used to cache an
@@ -55,13 +57,6 @@ type Locker interface {
AssertLockedForWriting()
}
-type lockType byte
-
-const (
- readLock lockType = iota
- writeLock
-)
-
// LockFile represents a file lock where the file is used to cache an
// identifier of the last party that made changes to whatever's being protected
// by the lock.
@@ -79,12 +74,12 @@ type LockFile struct {
stateMutex *sync.Mutex
counter int64
lw LastWrite // A global value valid as of the last .Touch() or .Modified()
- lockType lockType
+ lockType rawfilelock.LockType
locked bool
// The following fields are only modified on transitions between counter == 0 / counter != 0.
// Thus, they can be safely accessed by users _that currently hold the LockFile_ without locking.
// In other cases, they need to be protected using stateMutex.
- fd fileHandle
+ fd rawfilelock.FileHandle
}
var (
@@ -129,12 +124,12 @@ func (l *LockFile) Lock() {
if l.ro {
panic("can't take write lock on read-only lock file")
}
- l.lock(writeLock)
+ l.lock(rawfilelock.WriteLock)
}
// RLock locks the lockfile as a reader.
func (l *LockFile) RLock() {
- l.lock(readLock)
+ l.lock(rawfilelock.ReadLock)
}
// TryLock attempts to lock the lockfile as a writer. Panic if the lock is a read-only one.
@@ -142,12 +137,12 @@ func (l *LockFile) TryLock() error {
if l.ro {
panic("can't take write lock on read-only lock file")
}
- return l.tryLock(writeLock)
+ return l.tryLock(rawfilelock.WriteLock)
}
// TryRLock attempts to lock the lockfile as a reader.
func (l *LockFile) TryRLock() error {
- return l.tryLock(readLock)
+ return l.tryLock(rawfilelock.ReadLock)
}
// Unlock unlocks the lockfile.
@@ -172,9 +167,9 @@ func (l *LockFile) Unlock() {
l.locked = false
// Close the file descriptor on the last unlock, releasing the
// file lock.
- unlockAndCloseHandle(l.fd)
+ rawfilelock.UnlockAndCloseHandle(l.fd)
}
- if l.lockType == readLock {
+ if l.lockType == rawfilelock.ReadLock {
l.rwMutex.RUnlock()
} else {
l.rwMutex.Unlock()
@@ -206,7 +201,7 @@ func (l *LockFile) AssertLockedForWriting() {
l.AssertLocked()
// Like AssertLocked, don’t even bother with l.stateMutex.
- if l.lockType == readLock {
+ if l.lockType == rawfilelock.ReadLock {
panic("internal error: lock is not held for writing")
}
}
@@ -273,7 +268,7 @@ func (l *LockFile) Touch() error {
return err
}
l.stateMutex.Lock()
- if !l.locked || (l.lockType == readLock) {
+ if !l.locked || (l.lockType == rawfilelock.ReadLock) {
panic("attempted to update last-writer in lockfile without the write lock")
}
defer l.stateMutex.Unlock()
@@ -324,6 +319,24 @@ func getLockfile(path string, ro bool) (*LockFile, error) {
return lockFile, nil
}
+// openLock opens a lock file at the specified path, creating the parent directory if it does not exist.
+func openLock(path string, readOnly bool) (rawfilelock.FileHandle, error) {
+ fd, err := rawfilelock.OpenLock(path, readOnly)
+ if err == nil {
+ return fd, nil
+ }
+
+ // the directory of the lockfile seems to be removed, try to create it
+ if os.IsNotExist(err) {
+ if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
+ return fd, fmt.Errorf("creating lock file directory: %w", err)
+ }
+
+ return openLock(path, readOnly)
+ }
+ return fd, &os.PathError{Op: "open", Path: path, Err: err}
+}
+
// createLockFileForPath returns new *LockFile object, possibly (depending on the platform)
// working inter-process and associated with the specified path.
//
@@ -343,11 +356,11 @@ func createLockFileForPath(path string, ro bool) (*LockFile, error) {
if err != nil {
return nil, err
}
- unlockAndCloseHandle(fd)
+ rawfilelock.UnlockAndCloseHandle(fd)
- lType := writeLock
+ lType := rawfilelock.WriteLock
if ro {
- lType = readLock
+ lType = rawfilelock.ReadLock
}
return &LockFile{
@@ -362,40 +375,10 @@ func createLockFileForPath(path string, ro bool) (*LockFile, error) {
}, nil
}
-// openLock opens the file at path and returns the corresponding file
-// descriptor. The path is opened either read-only or read-write,
-// depending on the value of ro argument.
-//
-// openLock will create the file and its parent directories,
-// if necessary.
-func openLock(path string, ro bool) (fd fileHandle, err error) {
- flags := os.O_CREATE
- if ro {
- flags |= os.O_RDONLY
- } else {
- flags |= os.O_RDWR
- }
- fd, err = openHandle(path, flags)
- if err == nil {
- return fd, nil
- }
-
- // the directory of the lockfile seems to be removed, try to create it
- if os.IsNotExist(err) {
- if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
- return fd, fmt.Errorf("creating lock file directory: %w", err)
- }
-
- return openLock(path, ro)
- }
-
- return fd, &os.PathError{Op: "open", Path: path, Err: err}
-}
-
// lock locks the lockfile via syscall based on the specified type and
// command.
-func (l *LockFile) lock(lType lockType) {
- if lType == readLock {
+func (l *LockFile) lock(lType rawfilelock.LockType) {
+ if lType == rawfilelock.ReadLock {
l.rwMutex.RLock()
} else {
l.rwMutex.Lock()
@@ -413,7 +396,7 @@ func (l *LockFile) lock(lType lockType) {
// Optimization: only use the (expensive) syscall when
// the counter is 0. In this case, we're either the first
// reader lock or a writer lock.
- if err := lockHandle(l.fd, lType, false); err != nil {
+ if err := rawfilelock.LockFile(l.fd, lType); err != nil {
panic(err)
}
}
@@ -424,10 +407,10 @@ func (l *LockFile) lock(lType lockType) {
// lock locks the lockfile via syscall based on the specified type and
// command.
-func (l *LockFile) tryLock(lType lockType) error {
+func (l *LockFile) tryLock(lType rawfilelock.LockType) error {
var success bool
var rwMutexUnlocker func()
- if lType == readLock {
+ if lType == rawfilelock.ReadLock {
success = l.rwMutex.TryRLock()
rwMutexUnlocker = l.rwMutex.RUnlock
} else {
@@ -451,8 +434,8 @@ func (l *LockFile) tryLock(lType lockType) error {
// Optimization: only use the (expensive) syscall when
// the counter is 0. In this case, we're either the first
// reader lock or a writer lock.
- if err = lockHandle(l.fd, lType, true); err != nil {
- closeHandle(fd)
+ if err = rawfilelock.TryLockFile(l.fd, lType); err != nil {
+ rawfilelock.CloseHandle(fd)
rwMutexUnlocker()
return err
}
diff --git a/vendor/github.com/containers/storage/pkg/lockfile/lockfile_unix.go b/vendor/github.com/containers/storage/pkg/lockfile/lockfile_unix.go
index 885f2f88a2..14c27c51fb 100644
--- a/vendor/github.com/containers/storage/pkg/lockfile/lockfile_unix.go
+++ b/vendor/github.com/containers/storage/pkg/lockfile/lockfile_unix.go
@@ -9,8 +9,6 @@ import (
"golang.org/x/sys/unix"
)
-type fileHandle uintptr
-
// GetLastWrite returns a LastWrite value corresponding to current state of the lock.
// This is typically called before (_not after_) loading the state when initializing a consumer
// of the data protected by the lock.
@@ -66,41 +64,3 @@ func (l *LockFile) TouchedSince(when time.Time) bool {
touched := time.Unix(mtim.Unix())
return when.Before(touched)
}
-
-func openHandle(path string, mode int) (fileHandle, error) {
- mode |= unix.O_CLOEXEC
- fd, err := unix.Open(path, mode, 0o644)
- return fileHandle(fd), err
-}
-
-func lockHandle(fd fileHandle, lType lockType, nonblocking bool) error {
- fType := unix.F_RDLCK
- if lType != readLock {
- fType = unix.F_WRLCK
- }
- lk := unix.Flock_t{
- Type: int16(fType),
- Whence: int16(unix.SEEK_SET),
- Start: 0,
- Len: 0,
- }
- cmd := unix.F_SETLKW
- if nonblocking {
- cmd = unix.F_SETLK
- }
- for {
- err := unix.FcntlFlock(uintptr(fd), cmd, &lk)
- if err == nil || nonblocking {
- return err
- }
- time.Sleep(10 * time.Millisecond)
- }
-}
-
-func unlockAndCloseHandle(fd fileHandle) {
- unix.Close(int(fd))
-}
-
-func closeHandle(fd fileHandle) {
- unix.Close(int(fd))
-}
diff --git a/vendor/github.com/containers/storage/pkg/lockfile/lockfile_windows.go b/vendor/github.com/containers/storage/pkg/lockfile/lockfile_windows.go
index 0cc1c50cc8..e66f7bfbbc 100644
--- a/vendor/github.com/containers/storage/pkg/lockfile/lockfile_windows.go
+++ b/vendor/github.com/containers/storage/pkg/lockfile/lockfile_windows.go
@@ -14,8 +14,6 @@ const (
allBytes = ^uint32(0)
)
-type fileHandle windows.Handle
-
// GetLastWrite returns a LastWrite value corresponding to current state of the lock.
// This is typically called before (_not after_) loading the state when initializing a consumer
// of the data protected by the lock.
@@ -73,37 +71,3 @@ func (l *LockFile) TouchedSince(when time.Time) bool {
}
return when.Before(stat.ModTime())
}
-
-func openHandle(path string, mode int) (fileHandle, error) {
- mode |= windows.O_CLOEXEC
- fd, err := windows.Open(path, mode, windows.S_IWRITE)
- return fileHandle(fd), err
-}
-
-func lockHandle(fd fileHandle, lType lockType, nonblocking bool) error {
- flags := 0
- if lType != readLock {
- flags = windows.LOCKFILE_EXCLUSIVE_LOCK
- }
- if nonblocking {
- flags |= windows.LOCKFILE_FAIL_IMMEDIATELY
- }
- ol := new(windows.Overlapped)
- if err := windows.LockFileEx(windows.Handle(fd), uint32(flags), reserved, allBytes, allBytes, ol); err != nil {
- if nonblocking {
- return err
- }
- panic(err)
- }
- return nil
-}
-
-func unlockAndCloseHandle(fd fileHandle) {
- ol := new(windows.Overlapped)
- windows.UnlockFileEx(windows.Handle(fd), reserved, allBytes, allBytes, ol)
- closeHandle(fd)
-}
-
-func closeHandle(fd fileHandle) {
- windows.Close(windows.Handle(fd))
-}
diff --git a/vendor/github.com/containers/storage/store.go b/vendor/github.com/containers/storage/store.go
index 1c5973623a..073a766f89 100644
--- a/vendor/github.com/containers/storage/store.go
+++ b/vendor/github.com/containers/storage/store.go
@@ -1445,16 +1445,7 @@ func (s *store) writeToAllStores(fn func(rlstore rwLayerStore) error) error {
// On entry:
// - rlstore must be locked for writing
func (s *store) canUseShifting(uidmap, gidmap []idtools.IDMap) bool {
- if !s.graphDriver.SupportsShifting() {
- return false
- }
- if uidmap != nil && !idtools.IsContiguous(uidmap) {
- return false
- }
- if gidmap != nil && !idtools.IsContiguous(gidmap) {
- return false
- }
- return true
+ return s.graphDriver.SupportsShifting(uidmap, gidmap)
}
// On entry:
diff --git a/vendor/github.com/go-openapi/analysis/.codecov.yml b/vendor/github.com/go-openapi/analysis/.codecov.yml
deleted file mode 100644
index 841c4281e2..0000000000
--- a/vendor/github.com/go-openapi/analysis/.codecov.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-coverage:
- status:
- patch:
- default:
- target: 80%
diff --git a/vendor/github.com/go-openapi/analysis/.gitattributes b/vendor/github.com/go-openapi/analysis/.gitattributes
deleted file mode 100644
index d020be8ea4..0000000000
--- a/vendor/github.com/go-openapi/analysis/.gitattributes
+++ /dev/null
@@ -1,2 +0,0 @@
-*.go text eol=lf
-
diff --git a/vendor/github.com/go-openapi/analysis/.gitignore b/vendor/github.com/go-openapi/analysis/.gitignore
deleted file mode 100644
index 87c3bd3e66..0000000000
--- a/vendor/github.com/go-openapi/analysis/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-secrets.yml
-coverage.out
-coverage.txt
-*.cov
-.idea
diff --git a/vendor/github.com/go-openapi/analysis/.golangci.yml b/vendor/github.com/go-openapi/analysis/.golangci.yml
deleted file mode 100644
index 22f8d21cca..0000000000
--- a/vendor/github.com/go-openapi/analysis/.golangci.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-linters-settings:
- govet:
- check-shadowing: true
- golint:
- min-confidence: 0
- gocyclo:
- min-complexity: 45
- maligned:
- suggest-new: true
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - maligned
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - gomnd
- - exhaustivestruct
- - goerr113
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - ifshort
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- - deadcode
- - interfacer
- - scopelint
- - varcheck
- - structcheck
- - golint
- - nosnakecase
diff --git a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/analysis/LICENSE b/vendor/github.com/go-openapi/analysis/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/analysis/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/analysis/README.md b/vendor/github.com/go-openapi/analysis/README.md
deleted file mode 100644
index e005d4b37b..0000000000
--- a/vendor/github.com/go-openapi/analysis/README.md
+++ /dev/null
@@ -1,27 +0,0 @@
-# OpenAPI analysis [](https://github.com/go-openapi/analysis/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/analysis)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/analysis)
-[](https://goreportcard.com/report/github.com/go-openapi/analysis)
-
-
-A foundational library to analyze an OAI specification document for easier reasoning about the content.
-
-## What's inside?
-
-* An analyzer providing methods to walk the functional content of a specification
-* A spec flattener producing a self-contained document bundle, while preserving `$ref`s
-* A spec merger ("mixin") to merge several spec documents into a primary spec
-* A spec "fixer" ensuring that response descriptions are non empty
-
-[Documentation](https://pkg.go.dev/github.com/go-openapi/analysis)
-
-## FAQ
-
-* Does this library support OpenAPI 3?
-
-> No.
-> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
-> There is no plan to make it evolve toward supporting OpenAPI 3.x.
-> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
diff --git a/vendor/github.com/go-openapi/analysis/analyzer.go b/vendor/github.com/go-openapi/analysis/analyzer.go
deleted file mode 100644
index c17aee1b61..0000000000
--- a/vendor/github.com/go-openapi/analysis/analyzer.go
+++ /dev/null
@@ -1,1064 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import (
- "fmt"
- slashpath "path"
- "strconv"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-type referenceAnalysis struct {
- schemas map[string]spec.Ref
- responses map[string]spec.Ref
- parameters map[string]spec.Ref
- items map[string]spec.Ref
- headerItems map[string]spec.Ref
- parameterItems map[string]spec.Ref
- allRefs map[string]spec.Ref
- pathItems map[string]spec.Ref
-}
-
-func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
- r.allRefs["#"+key] = ref
-}
-
-func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
- r.items["#"+key] = items.Ref
- r.addRef(key, items.Ref)
- if location == "header" {
- // NOTE: in swagger 2.0, headers and parameters (but not body param schemas) are simple schemas
- // and $ref are not supported here. However it is possible to analyze this.
- r.headerItems["#"+key] = items.Ref
- } else {
- r.parameterItems["#"+key] = items.Ref
- }
-}
-
-func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
- r.schemas["#"+key] = ref.Schema.Ref
- r.addRef(key, ref.Schema.Ref)
-}
-
-func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
- r.responses["#"+key] = resp.Ref
- r.addRef(key, resp.Ref)
-}
-
-func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
- r.parameters["#"+key] = param.Ref
- r.addRef(key, param.Ref)
-}
-
-func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
- r.pathItems["#"+key] = pathItem.Ref
- r.addRef(key, pathItem.Ref)
-}
-
-type patternAnalysis struct {
- parameters map[string]string
- headers map[string]string
- items map[string]string
- schemas map[string]string
- allPatterns map[string]string
-}
-
-func (p *patternAnalysis) addPattern(key, pattern string) {
- p.allPatterns["#"+key] = pattern
-}
-
-func (p *patternAnalysis) addParameterPattern(key, pattern string) {
- p.parameters["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
- p.headers["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-func (p *patternAnalysis) addItemsPattern(key, pattern string) {
- p.items["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
- p.schemas["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-type enumAnalysis struct {
- parameters map[string][]interface{}
- headers map[string][]interface{}
- items map[string][]interface{}
- schemas map[string][]interface{}
- allEnums map[string][]interface{}
-}
-
-func (p *enumAnalysis) addEnum(key string, enum []interface{}) {
- p.allEnums["#"+key] = enum
-}
-
-func (p *enumAnalysis) addParameterEnum(key string, enum []interface{}) {
- p.parameters["#"+key] = enum
- p.addEnum(key, enum)
-}
-
-func (p *enumAnalysis) addHeaderEnum(key string, enum []interface{}) {
- p.headers["#"+key] = enum
- p.addEnum(key, enum)
-}
-
-func (p *enumAnalysis) addItemsEnum(key string, enum []interface{}) {
- p.items["#"+key] = enum
- p.addEnum(key, enum)
-}
-
-func (p *enumAnalysis) addSchemaEnum(key string, enum []interface{}) {
- p.schemas["#"+key] = enum
- p.addEnum(key, enum)
-}
-
-// New takes a swagger spec object and returns an analyzed spec document.
-// The analyzed document contains a number of indices that make it easier to
-// reason about semantics of a swagger specification for use in code generation
-// or validation etc.
-func New(doc *spec.Swagger) *Spec {
- a := &Spec{
- spec: doc,
- references: referenceAnalysis{},
- patterns: patternAnalysis{},
- enums: enumAnalysis{},
- }
- a.reset()
- a.initialize()
-
- return a
-}
-
-// Spec is an analyzed specification object. It takes a swagger spec object and turns it into a registry
-// with a bunch of utility methods to act on the information in the spec.
-type Spec struct {
- spec *spec.Swagger
- consumes map[string]struct{}
- produces map[string]struct{}
- authSchemes map[string]struct{}
- operations map[string]map[string]*spec.Operation
- references referenceAnalysis
- patterns patternAnalysis
- enums enumAnalysis
- allSchemas map[string]SchemaRef
- allOfs map[string]SchemaRef
-}
-
-func (s *Spec) reset() {
- s.consumes = make(map[string]struct{}, 150)
- s.produces = make(map[string]struct{}, 150)
- s.authSchemes = make(map[string]struct{}, 150)
- s.operations = make(map[string]map[string]*spec.Operation, 150)
- s.allSchemas = make(map[string]SchemaRef, 150)
- s.allOfs = make(map[string]SchemaRef, 150)
- s.references.schemas = make(map[string]spec.Ref, 150)
- s.references.pathItems = make(map[string]spec.Ref, 150)
- s.references.responses = make(map[string]spec.Ref, 150)
- s.references.parameters = make(map[string]spec.Ref, 150)
- s.references.items = make(map[string]spec.Ref, 150)
- s.references.headerItems = make(map[string]spec.Ref, 150)
- s.references.parameterItems = make(map[string]spec.Ref, 150)
- s.references.allRefs = make(map[string]spec.Ref, 150)
- s.patterns.parameters = make(map[string]string, 150)
- s.patterns.headers = make(map[string]string, 150)
- s.patterns.items = make(map[string]string, 150)
- s.patterns.schemas = make(map[string]string, 150)
- s.patterns.allPatterns = make(map[string]string, 150)
- s.enums.parameters = make(map[string][]interface{}, 150)
- s.enums.headers = make(map[string][]interface{}, 150)
- s.enums.items = make(map[string][]interface{}, 150)
- s.enums.schemas = make(map[string][]interface{}, 150)
- s.enums.allEnums = make(map[string][]interface{}, 150)
-}
-
-func (s *Spec) reload() {
- s.reset()
- s.initialize()
-}
-
-func (s *Spec) initialize() {
- for _, c := range s.spec.Consumes {
- s.consumes[c] = struct{}{}
- }
- for _, c := range s.spec.Produces {
- s.produces[c] = struct{}{}
- }
- for _, ss := range s.spec.Security {
- for k := range ss {
- s.authSchemes[k] = struct{}{}
- }
- }
- for path, pathItem := range s.AllPaths() {
- s.analyzeOperations(path, &pathItem) //#nosec
- }
-
- for name, parameter := range s.spec.Parameters {
- refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
- if parameter.Items != nil {
- s.analyzeItems("items", parameter.Items, refPref, "parameter")
- }
- if parameter.In == "body" && parameter.Schema != nil {
- s.analyzeSchema("schema", parameter.Schema, refPref)
- }
- if parameter.Pattern != "" {
- s.patterns.addParameterPattern(refPref, parameter.Pattern)
- }
- if len(parameter.Enum) > 0 {
- s.enums.addParameterEnum(refPref, parameter.Enum)
- }
- }
-
- for name, response := range s.spec.Responses {
- refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
- for k, v := range response.Headers {
- hRefPref := slashpath.Join(refPref, "headers", k)
- if v.Items != nil {
- s.analyzeItems("items", v.Items, hRefPref, "header")
- }
- if v.Pattern != "" {
- s.patterns.addHeaderPattern(hRefPref, v.Pattern)
- }
- if len(v.Enum) > 0 {
- s.enums.addHeaderEnum(hRefPref, v.Enum)
- }
- }
- if response.Schema != nil {
- s.analyzeSchema("schema", response.Schema, refPref)
- }
- }
-
- for name := range s.spec.Definitions {
- schema := s.spec.Definitions[name]
- s.analyzeSchema(name, &schema, "/definitions")
- }
- // TODO: after analyzing all things and flattening schemas etc
- // resolve all the collected references to their final representations
- // best put in a separate method because this could get expensive
-}
-
-func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
- // TODO: resolve refs here?
- // Currently, operations declared via pathItem $ref are known only after expansion
- op := pi
- if pi.Ref.String() != "" {
- key := slashpath.Join("/paths", jsonpointer.Escape(path))
- s.references.addPathItemRef(key, pi)
- }
- s.analyzeOperation("GET", path, op.Get)
- s.analyzeOperation("PUT", path, op.Put)
- s.analyzeOperation("POST", path, op.Post)
- s.analyzeOperation("PATCH", path, op.Patch)
- s.analyzeOperation("DELETE", path, op.Delete)
- s.analyzeOperation("HEAD", path, op.Head)
- s.analyzeOperation("OPTIONS", path, op.Options)
- for i, param := range op.Parameters {
- refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
- if param.Ref.String() != "" {
- s.references.addParamRef(refPref, ¶m) //#nosec
- }
- if param.Pattern != "" {
- s.patterns.addParameterPattern(refPref, param.Pattern)
- }
- if len(param.Enum) > 0 {
- s.enums.addParameterEnum(refPref, param.Enum)
- }
- if param.Items != nil {
- s.analyzeItems("items", param.Items, refPref, "parameter")
- }
- if param.Schema != nil {
- s.analyzeSchema("schema", param.Schema, refPref)
- }
- }
-}
-
-func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
- if items == nil {
- return
- }
- refPref := slashpath.Join(prefix, name)
- s.analyzeItems(name, items.Items, refPref, location)
- if items.Ref.String() != "" {
- s.references.addItemsRef(refPref, items, location)
- }
- if items.Pattern != "" {
- s.patterns.addItemsPattern(refPref, items.Pattern)
- }
- if len(items.Enum) > 0 {
- s.enums.addItemsEnum(refPref, items.Enum)
- }
-}
-
-func (s *Spec) analyzeParameter(prefix string, i int, param spec.Parameter) {
- refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
- if param.Ref.String() != "" {
- s.references.addParamRef(refPref, ¶m) //#nosec
- }
-
- if param.Pattern != "" {
- s.patterns.addParameterPattern(refPref, param.Pattern)
- }
-
- if len(param.Enum) > 0 {
- s.enums.addParameterEnum(refPref, param.Enum)
- }
-
- s.analyzeItems("items", param.Items, refPref, "parameter")
- if param.In == "body" && param.Schema != nil {
- s.analyzeSchema("schema", param.Schema, refPref)
- }
-}
-
-func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
- if op == nil {
- return
- }
-
- for _, c := range op.Consumes {
- s.consumes[c] = struct{}{}
- }
-
- for _, c := range op.Produces {
- s.produces[c] = struct{}{}
- }
-
- for _, ss := range op.Security {
- for k := range ss {
- s.authSchemes[k] = struct{}{}
- }
- }
-
- if _, ok := s.operations[method]; !ok {
- s.operations[method] = make(map[string]*spec.Operation)
- }
-
- s.operations[method][path] = op
- prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
- for i, param := range op.Parameters {
- s.analyzeParameter(prefix, i, param)
- }
-
- if op.Responses == nil {
- return
- }
-
- if op.Responses.Default != nil {
- s.analyzeDefaultResponse(prefix, op.Responses.Default)
- }
-
- for k, res := range op.Responses.StatusCodeResponses {
- s.analyzeResponse(prefix, k, res)
- }
-}
-
-func (s *Spec) analyzeDefaultResponse(prefix string, res *spec.Response) {
- refPref := slashpath.Join(prefix, "responses", "default")
- if res.Ref.String() != "" {
- s.references.addResponseRef(refPref, res)
- }
-
- for k, v := range res.Headers {
- hRefPref := slashpath.Join(refPref, "headers", k)
- s.analyzeItems("items", v.Items, hRefPref, "header")
- if v.Pattern != "" {
- s.patterns.addHeaderPattern(hRefPref, v.Pattern)
- }
- }
-
- if res.Schema != nil {
- s.analyzeSchema("schema", res.Schema, refPref)
- }
-}
-
-func (s *Spec) analyzeResponse(prefix string, k int, res spec.Response) {
- refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
- if res.Ref.String() != "" {
- s.references.addResponseRef(refPref, &res) //#nosec
- }
-
- for k, v := range res.Headers {
- hRefPref := slashpath.Join(refPref, "headers", k)
- s.analyzeItems("items", v.Items, hRefPref, "header")
- if v.Pattern != "" {
- s.patterns.addHeaderPattern(hRefPref, v.Pattern)
- }
-
- if len(v.Enum) > 0 {
- s.enums.addHeaderEnum(hRefPref, v.Enum)
- }
- }
-
- if res.Schema != nil {
- s.analyzeSchema("schema", res.Schema, refPref)
- }
-}
-
-func (s *Spec) analyzeSchema(name string, schema *spec.Schema, prefix string) {
- refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
- schRef := SchemaRef{
- Name: name,
- Schema: schema,
- Ref: spec.MustCreateRef("#" + refURI),
- TopLevel: prefix == "/definitions",
- }
-
- s.allSchemas["#"+refURI] = schRef
-
- if schema.Ref.String() != "" {
- s.references.addSchemaRef(refURI, schRef)
- }
-
- if schema.Pattern != "" {
- s.patterns.addSchemaPattern(refURI, schema.Pattern)
- }
-
- if len(schema.Enum) > 0 {
- s.enums.addSchemaEnum(refURI, schema.Enum)
- }
-
- for k, v := range schema.Definitions {
- v := v
- s.analyzeSchema(k, &v, slashpath.Join(refURI, "definitions"))
- }
-
- for k, v := range schema.Properties {
- v := v
- s.analyzeSchema(k, &v, slashpath.Join(refURI, "properties"))
- }
-
- for k, v := range schema.PatternProperties {
- v := v
- // NOTE: swagger 2.0 does not support PatternProperties.
- // However it is possible to analyze this in a schema
- s.analyzeSchema(k, &v, slashpath.Join(refURI, "patternProperties"))
- }
-
- for i := range schema.AllOf {
- v := &schema.AllOf[i]
- s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
- }
-
- if len(schema.AllOf) > 0 {
- s.allOfs["#"+refURI] = schRef
- }
-
- for i := range schema.AnyOf {
- v := &schema.AnyOf[i]
- // NOTE: swagger 2.0 does not support anyOf constructs.
- // However it is possible to analyze this in a schema
- s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
- }
-
- for i := range schema.OneOf {
- v := &schema.OneOf[i]
- // NOTE: swagger 2.0 does not support oneOf constructs.
- // However it is possible to analyze this in a schema
- s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
- }
-
- if schema.Not != nil {
- // NOTE: swagger 2.0 does not support "not" constructs.
- // However it is possible to analyze this in a schema
- s.analyzeSchema("not", schema.Not, refURI)
- }
-
- if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
- s.analyzeSchema("additionalProperties", schema.AdditionalProperties.Schema, refURI)
- }
-
- if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
- // NOTE: swagger 2.0 does not support AdditionalItems.
- // However it is possible to analyze this in a schema
- s.analyzeSchema("additionalItems", schema.AdditionalItems.Schema, refURI)
- }
-
- if schema.Items != nil {
- if schema.Items.Schema != nil {
- s.analyzeSchema("items", schema.Items.Schema, refURI)
- }
-
- for i := range schema.Items.Schemas {
- sch := &schema.Items.Schemas[i]
- s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
- }
- }
-}
-
-// SecurityRequirement is a representation of a security requirement for an operation
-type SecurityRequirement struct {
- Name string
- Scopes []string
-}
-
-// SecurityRequirementsFor gets the security requirements for the operation
-func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) [][]SecurityRequirement {
- if s.spec.Security == nil && operation.Security == nil {
- return nil
- }
-
- schemes := s.spec.Security
- if operation.Security != nil {
- schemes = operation.Security
- }
-
- result := [][]SecurityRequirement{}
- for _, scheme := range schemes {
- if len(scheme) == 0 {
- // append a zero object for anonymous
- result = append(result, []SecurityRequirement{{}})
-
- continue
- }
-
- var reqs []SecurityRequirement
- for k, v := range scheme {
- if v == nil {
- v = []string{}
- }
- reqs = append(reqs, SecurityRequirement{Name: k, Scopes: v})
- }
-
- result = append(result, reqs)
- }
-
- return result
-}
-
-// SecurityDefinitionsForRequirements gets the matching security definitions for a set of requirements
-func (s *Spec) SecurityDefinitionsForRequirements(requirements []SecurityRequirement) map[string]spec.SecurityScheme {
- result := make(map[string]spec.SecurityScheme)
-
- for _, v := range requirements {
- if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
- if definition != nil {
- result[v.Name] = *definition
- }
- }
- }
-
- return result
-}
-
-// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
-func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
- requirements := s.SecurityRequirementsFor(operation)
- if len(requirements) == 0 {
- return nil
- }
-
- result := make(map[string]spec.SecurityScheme)
- for _, reqs := range requirements {
- for _, v := range reqs {
- if v.Name == "" {
- // optional requirement
- continue
- }
-
- if _, ok := result[v.Name]; ok {
- // duplicate requirement
- continue
- }
-
- if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
- if definition != nil {
- result[v.Name] = *definition
- }
- }
- }
- }
-
- return result
-}
-
-// ConsumesFor gets the mediatypes for the operation
-func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
- if len(operation.Consumes) == 0 {
- cons := make(map[string]struct{}, len(s.spec.Consumes))
- for _, k := range s.spec.Consumes {
- cons[k] = struct{}{}
- }
-
- return s.structMapKeys(cons)
- }
-
- cons := make(map[string]struct{}, len(operation.Consumes))
- for _, c := range operation.Consumes {
- cons[c] = struct{}{}
- }
-
- return s.structMapKeys(cons)
-}
-
-// ProducesFor gets the mediatypes for the operation
-func (s *Spec) ProducesFor(operation *spec.Operation) []string {
- if len(operation.Produces) == 0 {
- prod := make(map[string]struct{}, len(s.spec.Produces))
- for _, k := range s.spec.Produces {
- prod[k] = struct{}{}
- }
-
- return s.structMapKeys(prod)
- }
-
- prod := make(map[string]struct{}, len(operation.Produces))
- for _, c := range operation.Produces {
- prod[c] = struct{}{}
- }
-
- return s.structMapKeys(prod)
-}
-
-func mapKeyFromParam(param *spec.Parameter) string {
- return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
-}
-
-func fieldNameFromParam(param *spec.Parameter) string {
- // TODO: this should be x-go-name
- if nm, ok := param.Extensions.GetString("go-name"); ok {
- return nm
- }
-
- return swag.ToGoName(param.Name)
-}
-
-// ErrorOnParamFunc is a callback function to be invoked
-// whenever an error is encountered while resolving references
-// on parameters.
-//
-// This function takes as input the spec.Parameter which triggered the
-// error and the error itself.
-//
-// If the callback function returns false, the calling function should bail.
-//
-// If it returns true, the calling function should continue evaluating parameters.
-// A nil ErrorOnParamFunc must be evaluated as equivalent to panic().
-type ErrorOnParamFunc func(spec.Parameter, error) bool
-
-func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter, callmeOnError ErrorOnParamFunc) {
- for _, param := range parameters {
- pr := param
- if pr.Ref.String() == "" {
- res[mapKeyFromParam(&pr)] = pr
-
- continue
- }
-
- // resolve $ref
- if callmeOnError == nil {
- callmeOnError = func(_ spec.Parameter, err error) bool {
- panic(err)
- }
- }
-
- obj, _, err := pr.Ref.GetPointer().Get(s.spec)
- if err != nil {
- if callmeOnError(param, fmt.Errorf("invalid reference: %q", pr.Ref.String())) {
- continue
- }
-
- break
- }
-
- objAsParam, ok := obj.(spec.Parameter)
- if !ok {
- if callmeOnError(param, fmt.Errorf("resolved reference is not a parameter: %q", pr.Ref.String())) {
- continue
- }
-
- break
- }
-
- pr = objAsParam
- res[mapKeyFromParam(&pr)] = pr
- }
-}
-
-// ParametersFor the specified operation id.
-//
-// Assumes parameters properly resolve references if any and that
-// such references actually resolve to a parameter object.
-// Otherwise, panics.
-func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
- return s.SafeParametersFor(operationID, nil)
-}
-
-// SafeParametersFor the specified operation id.
-//
-// Does not assume parameters properly resolve references or that
-// such references actually resolve to a parameter object.
-//
-// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
-// parameters. If the callback is set to nil, panics upon errors.
-func (s *Spec) SafeParametersFor(operationID string, callmeOnError ErrorOnParamFunc) []spec.Parameter {
- gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
- bag := make(map[string]spec.Parameter)
- s.paramsAsMap(pi.Parameters, bag, callmeOnError)
- s.paramsAsMap(op.Parameters, bag, callmeOnError)
-
- var res []spec.Parameter
- for _, v := range bag {
- res = append(res, v)
- }
-
- return res
- }
-
- for _, pi := range s.spec.Paths.Paths {
- if pi.Get != nil && pi.Get.ID == operationID {
- return gatherParams(&pi, pi.Get) //#nosec
- }
- if pi.Head != nil && pi.Head.ID == operationID {
- return gatherParams(&pi, pi.Head) //#nosec
- }
- if pi.Options != nil && pi.Options.ID == operationID {
- return gatherParams(&pi, pi.Options) //#nosec
- }
- if pi.Post != nil && pi.Post.ID == operationID {
- return gatherParams(&pi, pi.Post) //#nosec
- }
- if pi.Patch != nil && pi.Patch.ID == operationID {
- return gatherParams(&pi, pi.Patch) //#nosec
- }
- if pi.Put != nil && pi.Put.ID == operationID {
- return gatherParams(&pi, pi.Put) //#nosec
- }
- if pi.Delete != nil && pi.Delete.ID == operationID {
- return gatherParams(&pi, pi.Delete) //#nosec
- }
- }
-
- return nil
-}
-
-// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
-// apply for the method and path.
-//
-// Assumes parameters properly resolve references if any and that
-// such references actually resolve to a parameter object.
-// Otherwise, panics.
-func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
- return s.SafeParamsFor(method, path, nil)
-}
-
-// SafeParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
-// apply for the method and path.
-//
-// Does not assume parameters properly resolve references or that
-// such references actually resolve to a parameter object.
-//
-// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
-// parameters. If the callback is set to nil, panics upon errors.
-func (s *Spec) SafeParamsFor(method, path string, callmeOnError ErrorOnParamFunc) map[string]spec.Parameter {
- res := make(map[string]spec.Parameter)
- if pi, ok := s.spec.Paths.Paths[path]; ok {
- s.paramsAsMap(pi.Parameters, res, callmeOnError)
- s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res, callmeOnError)
- }
-
- return res
-}
-
-// OperationForName gets the operation for the given id
-func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
- for method, pathItem := range s.operations {
- for path, op := range pathItem {
- if operationID == op.ID {
- return method, path, op, true
- }
- }
- }
-
- return "", "", nil, false
-}
-
-// OperationFor the given method and path
-func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
- if mp, ok := s.operations[strings.ToUpper(method)]; ok {
- op, fn := mp[path]
-
- return op, fn
- }
-
- return nil, false
-}
-
-// Operations gathers all the operations specified in the spec document
-func (s *Spec) Operations() map[string]map[string]*spec.Operation {
- return s.operations
-}
-
-func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
- if len(mp) == 0 {
- return nil
- }
-
- result := make([]string, 0, len(mp))
- for k := range mp {
- result = append(result, k)
- }
-
- return result
-}
-
-// AllPaths returns all the paths in the swagger spec
-func (s *Spec) AllPaths() map[string]spec.PathItem {
- if s.spec == nil || s.spec.Paths == nil {
- return nil
- }
-
- return s.spec.Paths.Paths
-}
-
-// OperationIDs gets all the operation ids based on method an dpath
-func (s *Spec) OperationIDs() []string {
- if len(s.operations) == 0 {
- return nil
- }
-
- result := make([]string, 0, len(s.operations))
- for method, v := range s.operations {
- for p, o := range v {
- if o.ID != "" {
- result = append(result, o.ID)
- } else {
- result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
- }
- }
- }
-
- return result
-}
-
-// OperationMethodPaths gets all the operation ids based on method an dpath
-func (s *Spec) OperationMethodPaths() []string {
- if len(s.operations) == 0 {
- return nil
- }
-
- result := make([]string, 0, len(s.operations))
- for method, v := range s.operations {
- for p := range v {
- result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
- }
- }
-
- return result
-}
-
-// RequiredConsumes gets all the distinct consumes that are specified in the specification document
-func (s *Spec) RequiredConsumes() []string {
- return s.structMapKeys(s.consumes)
-}
-
-// RequiredProduces gets all the distinct produces that are specified in the specification document
-func (s *Spec) RequiredProduces() []string {
- return s.structMapKeys(s.produces)
-}
-
-// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
-func (s *Spec) RequiredSecuritySchemes() []string {
- return s.structMapKeys(s.authSchemes)
-}
-
-// SchemaRef is a reference to a schema
-type SchemaRef struct {
- Name string
- Ref spec.Ref
- Schema *spec.Schema
- TopLevel bool
-}
-
-// SchemasWithAllOf returns schema references to all schemas that are defined
-// with an allOf key
-func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
- for _, v := range s.allOfs {
- result = append(result, v)
- }
-
- return
-}
-
-// AllDefinitions returns schema references for all the definitions that were discovered
-func (s *Spec) AllDefinitions() (result []SchemaRef) {
- for _, v := range s.allSchemas {
- result = append(result, v)
- }
-
- return
-}
-
-// AllDefinitionReferences returns json refs for all the discovered schemas
-func (s *Spec) AllDefinitionReferences() (result []string) {
- for _, v := range s.references.schemas {
- result = append(result, v.String())
- }
-
- return
-}
-
-// AllParameterReferences returns json refs for all the discovered parameters
-func (s *Spec) AllParameterReferences() (result []string) {
- for _, v := range s.references.parameters {
- result = append(result, v.String())
- }
-
- return
-}
-
-// AllResponseReferences returns json refs for all the discovered responses
-func (s *Spec) AllResponseReferences() (result []string) {
- for _, v := range s.references.responses {
- result = append(result, v.String())
- }
-
- return
-}
-
-// AllPathItemReferences returns the references for all the items
-func (s *Spec) AllPathItemReferences() (result []string) {
- for _, v := range s.references.pathItems {
- result = append(result, v.String())
- }
-
- return
-}
-
-// AllItemsReferences returns the references for all the items in simple schemas (parameters or headers).
-//
-// NOTE: since Swagger 2.0 forbids $ref in simple params, this should always yield an empty slice for a valid
-// Swagger 2.0 spec.
-func (s *Spec) AllItemsReferences() (result []string) {
- for _, v := range s.references.items {
- result = append(result, v.String())
- }
-
- return
-}
-
-// AllReferences returns all the references found in the document, with possible duplicates
-func (s *Spec) AllReferences() (result []string) {
- for _, v := range s.references.allRefs {
- result = append(result, v.String())
- }
-
- return
-}
-
-// AllRefs returns all the unique references found in the document
-func (s *Spec) AllRefs() (result []spec.Ref) {
- set := make(map[string]struct{})
- for _, v := range s.references.allRefs {
- a := v.String()
- if a == "" {
- continue
- }
-
- if _, ok := set[a]; !ok {
- set[a] = struct{}{}
- result = append(result, v)
- }
- }
-
- return
-}
-
-func cloneStringMap(source map[string]string) map[string]string {
- res := make(map[string]string, len(source))
- for k, v := range source {
- res[k] = v
- }
-
- return res
-}
-
-func cloneEnumMap(source map[string][]interface{}) map[string][]interface{} {
- res := make(map[string][]interface{}, len(source))
- for k, v := range source {
- res[k] = v
- }
-
- return res
-}
-
-// ParameterPatterns returns all the patterns found in parameters
-// the map is cloned to avoid accidental changes
-func (s *Spec) ParameterPatterns() map[string]string {
- return cloneStringMap(s.patterns.parameters)
-}
-
-// HeaderPatterns returns all the patterns found in response headers
-// the map is cloned to avoid accidental changes
-func (s *Spec) HeaderPatterns() map[string]string {
- return cloneStringMap(s.patterns.headers)
-}
-
-// ItemsPatterns returns all the patterns found in simple array items
-// the map is cloned to avoid accidental changes
-func (s *Spec) ItemsPatterns() map[string]string {
- return cloneStringMap(s.patterns.items)
-}
-
-// SchemaPatterns returns all the patterns found in schemas
-// the map is cloned to avoid accidental changes
-func (s *Spec) SchemaPatterns() map[string]string {
- return cloneStringMap(s.patterns.schemas)
-}
-
-// AllPatterns returns all the patterns found in the spec
-// the map is cloned to avoid accidental changes
-func (s *Spec) AllPatterns() map[string]string {
- return cloneStringMap(s.patterns.allPatterns)
-}
-
-// ParameterEnums returns all the enums found in parameters
-// the map is cloned to avoid accidental changes
-func (s *Spec) ParameterEnums() map[string][]interface{} {
- return cloneEnumMap(s.enums.parameters)
-}
-
-// HeaderEnums returns all the enums found in response headers
-// the map is cloned to avoid accidental changes
-func (s *Spec) HeaderEnums() map[string][]interface{} {
- return cloneEnumMap(s.enums.headers)
-}
-
-// ItemsEnums returns all the enums found in simple array items
-// the map is cloned to avoid accidental changes
-func (s *Spec) ItemsEnums() map[string][]interface{} {
- return cloneEnumMap(s.enums.items)
-}
-
-// SchemaEnums returns all the enums found in schemas
-// the map is cloned to avoid accidental changes
-func (s *Spec) SchemaEnums() map[string][]interface{} {
- return cloneEnumMap(s.enums.schemas)
-}
-
-// AllEnums returns all the enums found in the spec
-// the map is cloned to avoid accidental changes
-func (s *Spec) AllEnums() map[string][]interface{} {
- return cloneEnumMap(s.enums.allEnums)
-}
diff --git a/vendor/github.com/go-openapi/analysis/debug.go b/vendor/github.com/go-openapi/analysis/debug.go
deleted file mode 100644
index 33c15704ec..0000000000
--- a/vendor/github.com/go-openapi/analysis/debug.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import (
- "os"
-
- "github.com/go-openapi/analysis/internal/debug"
-)
-
-var debugLog = debug.GetLogger("analysis", os.Getenv("SWAGGER_DEBUG") != "")
diff --git a/vendor/github.com/go-openapi/analysis/doc.go b/vendor/github.com/go-openapi/analysis/doc.go
deleted file mode 100644
index e8d9f9b131..0000000000
--- a/vendor/github.com/go-openapi/analysis/doc.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/*
-Package analysis provides methods to work with a Swagger specification document from
-package go-openapi/spec.
-
-## Analyzing a specification
-
-An analysed specification object (type Spec) provides methods to work with swagger definition.
-
-## Flattening or expanding a specification
-
-Flattening a specification bundles all remote $ref in the main spec document.
-Depending on flattening options, additional preprocessing may take place:
- - full flattening: replacing all inline complex constructs by a named entry in #/definitions
- - expand: replace all $ref's in the document by their expanded content
-
-## Merging several specifications
-
-Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
-
-## Fixing a specification
-
-Unmarshalling a specification with golang json unmarshalling may lead to
-some unwanted result on present but empty fields.
-
-## Analyzing a Swagger schema
-
-Swagger schemas are analyzed to determine their complexity and qualify their content.
-*/
-package analysis
diff --git a/vendor/github.com/go-openapi/analysis/fixer.go b/vendor/github.com/go-openapi/analysis/fixer.go
deleted file mode 100644
index 7c2ca08416..0000000000
--- a/vendor/github.com/go-openapi/analysis/fixer.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import "github.com/go-openapi/spec"
-
-// FixEmptyResponseDescriptions replaces empty ("") response
-// descriptions in the input with "(empty)" to ensure that the
-// resulting Swagger is stays valid. The problem appears to arise
-// from reading in valid specs that have a explicit response
-// description of "" (valid, response.description is required), but
-// due to zero values being omitted upon re-serializing (omitempty) we
-// lose them unless we stick some chars in there.
-func FixEmptyResponseDescriptions(s *spec.Swagger) {
- for k, v := range s.Responses {
- FixEmptyDesc(&v) //#nosec
- s.Responses[k] = v
- }
-
- if s.Paths == nil {
- return
- }
-
- for _, v := range s.Paths.Paths {
- if v.Get != nil {
- FixEmptyDescs(v.Get.Responses)
- }
- if v.Put != nil {
- FixEmptyDescs(v.Put.Responses)
- }
- if v.Post != nil {
- FixEmptyDescs(v.Post.Responses)
- }
- if v.Delete != nil {
- FixEmptyDescs(v.Delete.Responses)
- }
- if v.Options != nil {
- FixEmptyDescs(v.Options.Responses)
- }
- if v.Head != nil {
- FixEmptyDescs(v.Head.Responses)
- }
- if v.Patch != nil {
- FixEmptyDescs(v.Patch.Responses)
- }
- }
-}
-
-// FixEmptyDescs adds "(empty)" as the description for any Response in
-// the given Responses object that doesn't already have one.
-func FixEmptyDescs(rs *spec.Responses) {
- FixEmptyDesc(rs.Default)
- for k, v := range rs.StatusCodeResponses {
- FixEmptyDesc(&v) //#nosec
- rs.StatusCodeResponses[k] = v
- }
-}
-
-// FixEmptyDesc adds "(empty)" as the description to the given
-// Response object if it doesn't already have one and isn't a
-// ref. No-op on nil input.
-func FixEmptyDesc(rs *spec.Response) {
- if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
- return
- }
- rs.Description = "(empty)"
-}
diff --git a/vendor/github.com/go-openapi/analysis/flatten.go b/vendor/github.com/go-openapi/analysis/flatten.go
deleted file mode 100644
index ebedcc9df3..0000000000
--- a/vendor/github.com/go-openapi/analysis/flatten.go
+++ /dev/null
@@ -1,814 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import (
- "fmt"
- "log"
- "path"
- "sort"
- "strings"
-
- "github.com/go-openapi/analysis/internal/flatten/normalize"
- "github.com/go-openapi/analysis/internal/flatten/operations"
- "github.com/go-openapi/analysis/internal/flatten/replace"
- "github.com/go-openapi/analysis/internal/flatten/schutils"
- "github.com/go-openapi/analysis/internal/flatten/sortref"
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
-)
-
-const definitionsPath = "#/definitions"
-
-// newRef stores information about refs created during the flattening process
-type newRef struct {
- key string
- newName string
- path string
- isOAIGen bool
- resolved bool
- schema *spec.Schema
- parents []string
-}
-
-// context stores intermediary results from flatten
-type context struct {
- newRefs map[string]*newRef
- warnings []string
- resolved map[string]string
-}
-
-func newContext() *context {
- return &context{
- newRefs: make(map[string]*newRef, 150),
- warnings: make([]string, 0),
- resolved: make(map[string]string, 50),
- }
-}
-
-// Flatten an analyzed spec and produce a self-contained spec bundle.
-//
-// There is a minimal and a full flattening mode.
-//
-// Minimally flattening a spec means:
-// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left
-// unscathed)
-// - Importing external (http, file) references so they become internal to the document
-// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers
-// like "$ref": "#/definitions/myObject/allOfs/1")
-//
-// A minimally flattened spec thus guarantees the following properties:
-// - all $refs point to a local definition (i.e. '#/definitions/...')
-// - definitions are unique
-//
-// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they
-// represent a complex schema or express commonality in the spec.
-// Otherwise, they are simply expanded.
-// Self-referencing JSON pointers cannot resolve to a type and trigger an error.
-//
-// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger.
-//
-// Fully flattening a spec means:
-// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion.
-//
-// By complex, we mean every JSON object with some properties.
-// Arrays, when they do not define a tuple,
-// or empty objects with or without additionalProperties, are not considered complex and remain inline.
-//
-// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions
-// have been created.
-//
-// Available flattening options:
-// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched
-// - Expand: expand all $ref's in the document (inoperant if Minimal set to true)
-// - Verbose: croaks about name conflicts detected
-// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening
-//
-// NOTE: expansion removes all $ref save circular $ref, which remain in place
-//
-// TODO: additional options
-// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a
-// x-go-name extension
-// - LiftAllOfs:
-// - limit the flattening of allOf members when simple objects
-// - merge allOf with validation only
-// - merge allOf with extensions only
-// - ...
-func Flatten(opts FlattenOpts) error {
- debugLog("FlattenOpts: %#v", opts)
-
- opts.flattenContext = newContext()
-
- // 1. Recursively expand responses, parameters, path items and items in simple schemas.
- //
- // This simplifies the spec and leaves only the $ref's in schema objects.
- if err := expand(&opts); err != nil {
- return err
- }
-
- // 2. Strip the current document from absolute $ref's that actually a in the root,
- // so we can recognize them as proper definitions
- //
- // In particular, this works around issue go-openapi/spec#76: leading absolute file in $ref is stripped
- if err := normalizeRef(&opts); err != nil {
- return err
- }
-
- // 3. Optionally remove shared parameters and responses already expanded (now unused).
- //
- // Operation parameters (i.e. under paths) remain.
- if opts.RemoveUnused {
- removeUnusedShared(&opts)
- }
-
- // 4. Import all remote references.
- if err := importReferences(&opts); err != nil {
- return err
- }
-
- // 5. full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps)
- if !opts.Minimal && !opts.Expand {
- if err := nameInlinedSchemas(&opts); err != nil {
- return err
- }
- }
-
- // 6. Rewrite JSON pointers other than $ref to named definitions
- // and attempt to resolve conflicting names whenever possible.
- if err := stripPointersAndOAIGen(&opts); err != nil {
- return err
- }
-
- // 7. Strip the spec from unused definitions
- if opts.RemoveUnused {
- removeUnused(&opts)
- }
-
- // 8. Issue warning notifications, if any
- opts.croak()
-
- // TODO: simplify known schema patterns to flat objects with properties
- // examples:
- // - lift simple allOf object,
- // - empty allOf with validation only or extensions only
- // - rework allOf arrays
- // - rework allOf additionalProperties
-
- return nil
-}
-
-func expand(opts *FlattenOpts) error {
- if err := spec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil {
- return err
- }
-
- opts.Spec.reload() // re-analyze
-
- return nil
-}
-
-// normalizeRef strips the current file from any absolute file $ref. This works around issue go-openapi/spec#76:
-// leading absolute file in $ref is stripped
-func normalizeRef(opts *FlattenOpts) error {
- debugLog("normalizeRef")
-
- altered := false
- for k, w := range opts.Spec.references.allRefs {
- if !strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS
- continue
- }
-
- altered = true
- debugLog("stripping absolute path for: %s", w.String())
-
- // strip the base path from definition
- if err := replace.UpdateRef(opts.Swagger(), k,
- spec.MustCreateRef(path.Join(definitionsPath, path.Base(w.String())))); err != nil {
- return err
- }
- }
-
- if altered {
- opts.Spec.reload() // re-analyze
- }
-
- return nil
-}
-
-func removeUnusedShared(opts *FlattenOpts) {
- opts.Swagger().Parameters = nil
- opts.Swagger().Responses = nil
-
- opts.Spec.reload() // re-analyze
-}
-
-func importReferences(opts *FlattenOpts) error {
- var (
- imported bool
- err error
- )
-
- for !imported && err == nil {
- // iteratively import remote references until none left.
- // This inlining deals with name conflicts by introducing auto-generated names ("OAIGen")
- imported, err = importExternalReferences(opts)
-
- opts.Spec.reload() // re-analyze
- }
-
- return err
-}
-
-// nameInlinedSchemas replaces every complex inline construct by a named definition.
-func nameInlinedSchemas(opts *FlattenOpts) error {
- debugLog("nameInlinedSchemas")
-
- namer := &InlineSchemaNamer{
- Spec: opts.Swagger(),
- Operations: operations.AllOpRefsByRef(opts.Spec, nil),
- flattenContext: opts.flattenContext,
- opts: opts,
- }
-
- depthFirst := sortref.DepthFirst(opts.Spec.allSchemas)
- for _, key := range depthFirst {
- sch := opts.Spec.allSchemas[key]
- if sch.Schema == nil || sch.Schema.Ref.String() != "" || sch.TopLevel {
- continue
- }
-
- asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
- if err != nil {
- return fmt.Errorf("schema analysis [%s]: %w", key, err)
- }
-
- if asch.isAnalyzedAsComplex() { // move complex schemas to definitions
- if err := namer.Name(key, sch.Schema, asch); err != nil {
- return err
- }
- }
- }
-
- opts.Spec.reload() // re-analyze
-
- return nil
-}
-
-func removeUnused(opts *FlattenOpts) {
- for removeUnusedSinglePass(opts) {
- // continue until no unused definition remains
- }
-}
-
-func removeUnusedSinglePass(opts *FlattenOpts) (hasRemoved bool) {
- expected := make(map[string]struct{})
- for k := range opts.Swagger().Definitions {
- expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{}
- }
-
- for _, k := range opts.Spec.AllDefinitionReferences() {
- delete(expected, k)
- }
-
- for k := range expected {
- hasRemoved = true
- debugLog("removing unused definition %s", path.Base(k))
- if opts.Verbose {
- log.Printf("info: removing unused definition: %s", path.Base(k))
- }
- delete(opts.Swagger().Definitions, path.Base(k))
- }
-
- opts.Spec.reload() // re-analyze
-
- return hasRemoved
-}
-
-func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error {
- // rewrite ref with already resolved external ref (useful for cyclical refs):
- // rewrite external refs to local ones
- debugLog("resolving known ref [%s] to %s", refStr, newName)
-
- for _, key := range entry.Keys {
- if err := replace.UpdateRef(opts.Swagger(), key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) error {
- var (
- isOAIGen bool
- newName string
- )
-
- debugLog("resolving schema from remote $ref [%s]", refStr)
-
- sch, err := spec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false))
- if err != nil {
- return fmt.Errorf("could not resolve schema: %w", err)
- }
-
- // at this stage only $ref analysis matters
- partialAnalyzer := &Spec{
- references: referenceAnalysis{},
- patterns: patternAnalysis{},
- enums: enumAnalysis{},
- }
- partialAnalyzer.reset()
- partialAnalyzer.analyzeSchema("", sch, "/")
-
- // now rewrite those refs with rebase
- for key, ref := range partialAnalyzer.references.allRefs {
- if err := replace.UpdateRef(sch, key, spec.MustCreateRef(normalize.RebaseRef(entry.Ref.String(), ref.String()))); err != nil {
- return fmt.Errorf("failed to rewrite ref for key %q at %s: %w", key, entry.Ref.String(), err)
- }
- }
-
- // generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name
- newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref, opts))
- debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen)
-
- opts.flattenContext.resolved[refStr] = newName
-
- // rewrite the external refs to local ones
- for _, key := range entry.Keys {
- if err := replace.UpdateRef(opts.Swagger(), key,
- spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
- return err
- }
-
- // keep track of created refs
- resolved := false
- if _, ok := opts.flattenContext.newRefs[key]; ok {
- resolved = opts.flattenContext.newRefs[key].resolved
- }
-
- debugLog("keeping track of ref: %s (%s), resolved: %t", key, newName, resolved)
- opts.flattenContext.newRefs[key] = &newRef{
- key: key,
- newName: newName,
- path: path.Join(definitionsPath, newName),
- isOAIGen: isOAIGen,
- resolved: resolved,
- schema: sch,
- }
- }
-
- // add the resolved schema to the definitions
- schutils.Save(opts.Swagger(), newName, sch)
-
- return nil
-}
-
-// importExternalReferences iteratively digs remote references and imports them into the main schema.
-//
-// At every iteration, new remotes may be found when digging deeper: they are rebased to the current schema before being imported.
-//
-// This returns true when no more remote references can be found.
-func importExternalReferences(opts *FlattenOpts) (bool, error) {
- debugLog("importExternalReferences")
-
- groupedRefs := sortref.ReverseIndex(opts.Spec.references.schemas, opts.BasePath)
- sortedRefStr := make([]string, 0, len(groupedRefs))
- if opts.flattenContext == nil {
- opts.flattenContext = newContext()
- }
-
- // sort $ref resolution to ensure deterministic name conflict resolution
- for refStr := range groupedRefs {
- sortedRefStr = append(sortedRefStr, refStr)
- }
- sort.Strings(sortedRefStr)
-
- complete := true
-
- for _, refStr := range sortedRefStr {
- entry := groupedRefs[refStr]
- if entry.Ref.HasFragmentOnly {
- continue
- }
-
- complete = false
-
- newName := opts.flattenContext.resolved[refStr]
- if newName != "" {
- if err := importKnownRef(entry, refStr, newName, opts); err != nil {
- return false, err
- }
-
- continue
- }
-
- // resolve schemas
- if err := importNewRef(entry, refStr, opts); err != nil {
- return false, err
- }
- }
-
- // maintains ref index entries
- for k := range opts.flattenContext.newRefs {
- r := opts.flattenContext.newRefs[k]
-
- // update tracking with resolved schemas
- if r.schema.Ref.String() != "" {
- ref := spec.MustCreateRef(r.path)
- sch, err := spec.ResolveRefWithBase(opts.Swagger(), &ref, opts.ExpandOpts(false))
- if err != nil {
- return false, fmt.Errorf("could not resolve schema: %w", err)
- }
-
- r.schema = sch
- }
-
- if r.path == k {
- continue
- }
-
- // update tracking with renamed keys: got a cascade of refs
- renamed := *r
- renamed.key = r.path
- opts.flattenContext.newRefs[renamed.path] = &renamed
-
- // indirect ref
- r.newName = path.Base(k)
- r.schema = spec.RefSchema(r.path)
- r.path = k
- r.isOAIGen = strings.Contains(k, "OAIGen")
- }
-
- return complete, nil
-}
-
-// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler.
-// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible.
-func stripPointersAndOAIGen(opts *FlattenOpts) error {
- // name all JSON pointers to anonymous documents
- if err := namePointers(opts); err != nil {
- return err
- }
-
- // remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts)
- hasIntroducedPointerOrInline, ers := stripOAIGen(opts)
- if ers != nil {
- return ers
- }
-
- // iterate as pointer or OAIGen resolution may introduce inline schemas or pointers
- for hasIntroducedPointerOrInline {
- if !opts.Minimal {
- opts.Spec.reload() // re-analyze
- if err := nameInlinedSchemas(opts); err != nil {
- return err
- }
- }
-
- if err := namePointers(opts); err != nil {
- return err
- }
-
- // restrip and re-analyze
- var err error
- if hasIntroducedPointerOrInline, err = stripOAIGen(opts); err != nil {
- return err
- }
- }
-
- return nil
-}
-
-// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions.
-//
-// A dedupe is deemed unnecessary whenever:
-// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining)
-// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to
-// the first parent.
-//
-// This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate
-// pointer and name resolution again.
-func stripOAIGen(opts *FlattenOpts) (bool, error) {
- debugLog("stripOAIGen")
- replacedWithComplex := false
-
- // figure out referers of OAIGen definitions (doing it before the ref start mutating)
- for _, r := range opts.flattenContext.newRefs {
- updateRefParents(opts.Spec.references.allRefs, r)
- }
-
- for k := range opts.flattenContext.newRefs {
- r := opts.flattenContext.newRefs[k]
- debugLog("newRefs[%s]: isOAIGen: %t, resolved: %t, name: %s, path:%s, #parents: %d, parents: %v, ref: %s",
- k, r.isOAIGen, r.resolved, r.newName, r.path, len(r.parents), r.parents, r.schema.Ref.String())
-
- if !r.isOAIGen || len(r.parents) == 0 {
- continue
- }
-
- hasReplacedWithComplex, err := stripOAIGenForRef(opts, k, r)
- if err != nil {
- return replacedWithComplex, err
- }
-
- replacedWithComplex = replacedWithComplex || hasReplacedWithComplex
- }
-
- debugLog("replacedWithComplex: %t", replacedWithComplex)
- opts.Spec.reload() // re-analyze
-
- return replacedWithComplex, nil
-}
-
-// updateRefParents updates all parents of an updated $ref
-func updateRefParents(allRefs map[string]spec.Ref, r *newRef) {
- if !r.isOAIGen || r.resolved { // bail on already resolved entries (avoid looping)
- return
- }
- for k, v := range allRefs {
- if r.path != v.String() {
- continue
- }
-
- found := false
- for _, p := range r.parents {
- if p == k {
- found = true
-
- break
- }
- }
- if !found {
- r.parents = append(r.parents, k)
- }
- }
-}
-
-func stripOAIGenForRef(opts *FlattenOpts, k string, r *newRef) (bool, error) {
- replacedWithComplex := false
-
- pr := sortref.TopmostFirst(r.parents)
-
- // rewrite first parent schema in hierarchical then lexicographical order
- debugLog("rewrite first parent %s with schema", pr[0])
- if err := replace.UpdateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil {
- return false, err
- }
-
- if pa, ok := opts.flattenContext.newRefs[pr[0]]; ok && pa.isOAIGen {
- // update parent in ref index entry
- debugLog("update parent entry: %s", pr[0])
- pa.schema = r.schema
- pa.resolved = false
- replacedWithComplex = true
- }
-
- // rewrite other parents to point to first parent
- if len(pr) > 1 {
- for _, p := range pr[1:] {
- replacingRef := spec.MustCreateRef(pr[0])
-
- // set complex when replacing ref is an anonymous jsonpointer: further processing may be required
- replacedWithComplex = replacedWithComplex || path.Dir(replacingRef.String()) != definitionsPath
- debugLog("rewrite parent with ref: %s", replacingRef.String())
-
- // NOTE: it is possible at this stage to introduce json pointers (to non-definitions places).
- // Those are stripped later on.
- if err := replace.UpdateRef(opts.Swagger(), p, replacingRef); err != nil {
- return false, err
- }
-
- if pa, ok := opts.flattenContext.newRefs[p]; ok && pa.isOAIGen {
- // update parent in ref index
- debugLog("update parent entry: %s", p)
- pa.schema = r.schema
- pa.resolved = false
- replacedWithComplex = true
- }
- }
- }
-
- // remove OAIGen definition
- debugLog("removing definition %s", path.Base(r.path))
- delete(opts.Swagger().Definitions, path.Base(r.path))
-
- // propagate changes in ref index for keys which have this one as a parent
- for kk, value := range opts.flattenContext.newRefs {
- if kk == k || !value.isOAIGen || value.resolved {
- continue
- }
-
- found := false
- newParents := make([]string, 0, len(value.parents))
- for _, parent := range value.parents {
- switch {
- case parent == r.path:
- found = true
- parent = pr[0]
- case strings.HasPrefix(parent, r.path+"/"):
- found = true
- parent = path.Join(pr[0], strings.TrimPrefix(parent, r.path))
- }
-
- newParents = append(newParents, parent)
- }
-
- if found {
- value.parents = newParents
- }
- }
-
- // mark naming conflict as resolved
- debugLog("marking naming conflict resolved for key: %s", r.key)
- opts.flattenContext.newRefs[r.key].isOAIGen = false
- opts.flattenContext.newRefs[r.key].resolved = true
-
- // determine if the previous substitution did inline a complex schema
- if r.schema != nil && r.schema.Ref.String() == "" { // inline schema
- asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath})
- if err != nil {
- return false, err
- }
-
- debugLog("re-inlined schema: parent: %s, %t", pr[0], asch.isAnalyzedAsComplex())
- replacedWithComplex = replacedWithComplex || !(path.Dir(pr[0]) == definitionsPath) && asch.isAnalyzedAsComplex()
- }
-
- return replacedWithComplex, nil
-}
-
-// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions.
-//
-// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself.
-// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used).
-func namePointers(opts *FlattenOpts) error {
- debugLog("name pointers")
-
- refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas))
- for k, ref := range opts.Spec.references.allRefs {
- debugLog("name pointers: %q => %#v", k, ref)
- if path.Dir(ref.String()) == definitionsPath {
- // this a ref to a top-level definition: ok
- continue
- }
-
- result, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), ref)
- if err != nil {
- return fmt.Errorf("at %s, %w", k, err)
- }
-
- replacingRef := result.Ref
- sch := result.Schema
- if opts.flattenContext != nil {
- opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
- }
-
- debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String())
- refsToReplace[k] = SchemaRef{
- Name: k, // caller
- Ref: replacingRef, // called
- Schema: sch,
- TopLevel: path.Dir(replacingRef.String()) == definitionsPath,
- }
- }
-
- depthFirst := sortref.DepthFirst(refsToReplace)
- namer := &InlineSchemaNamer{
- Spec: opts.Swagger(),
- Operations: operations.AllOpRefsByRef(opts.Spec, nil),
- flattenContext: opts.flattenContext,
- opts: opts,
- }
-
- for _, key := range depthFirst {
- v := refsToReplace[key]
- // update current replacement, which may have been updated by previous changes of deeper elements
- result, erd := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), v.Ref)
- if erd != nil {
- return fmt.Errorf("at %s, %w", key, erd)
- }
-
- if opts.flattenContext != nil {
- opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
- }
-
- v.Ref = result.Ref
- v.Schema = result.Schema
- v.TopLevel = path.Dir(result.Ref.String()) == definitionsPath
- debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String())
-
- if v.TopLevel {
- debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String())
-
- // if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref
- if err := replace.UpdateRef(opts.Swagger(), key, v.Ref); err != nil {
- return err
- }
-
- continue
- }
-
- if err := flattenAnonPointer(key, v, refsToReplace, namer, opts); err != nil {
- return err
- }
- }
-
- opts.Spec.reload() // re-analyze
-
- return nil
-}
-
-func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]SchemaRef, namer *InlineSchemaNamer, opts *FlattenOpts) error {
- // this is a JSON pointer to an anonymous document (internal or external):
- // create a definition for this schema when:
- // - it is a complex schema
- // - or it is pointed by more than one $ref (i.e. expresses commonality)
- // otherwise, expand the pointer (single reference to a simple type)
- //
- // The named definition for this follows the target's key, not the caller's
- debugLog("namePointers at %s for %s", key, v.Ref.String())
-
- // qualify the expanded schema
- asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
- if ers != nil {
- return fmt.Errorf("schema analysis [%s]: %w", key, ers)
- }
- callers := make([]string, 0, 64)
-
- debugLog("looking for callers")
-
- an := New(opts.Swagger())
- for k, w := range an.references.allRefs {
- r, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), w)
- if err != nil {
- return fmt.Errorf("at %s, %w", key, err)
- }
-
- if opts.flattenContext != nil {
- opts.flattenContext.warnings = append(opts.flattenContext.warnings, r.Warnings...)
- }
-
- if r.Ref.String() == v.Ref.String() {
- callers = append(callers, k)
- }
- }
-
- debugLog("callers for %s: %d", v.Ref.String(), len(callers))
- if len(callers) == 0 {
- // has already been updated and resolved
- return nil
- }
-
- parts := sortref.KeyParts(v.Ref.String())
- debugLog("number of callers for %s: %d", v.Ref.String(), len(callers))
-
- // identifying edge case when the namer did nothing because we point to a non-schema object
- // no definition is created and we expand the $ref for all callers
- debugLog("decide what to do with the schema pointed to: asch.IsSimpleSchema=%t, len(callers)=%d, parts.IsSharedParam=%t, parts.IsSharedResponse=%t",
- asch.IsSimpleSchema, len(callers), parts.IsSharedParam(), parts.IsSharedResponse(),
- )
-
- if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() {
- debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String())
- if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil {
- return err
- }
-
- // regular case: we named the $ref as a definition, and we move all callers to this new $ref
- for _, caller := range callers {
- if caller == key {
- continue
- }
-
- // move $ref for next to resolve
- debugLog("identified caller of %s at [%s]", v.Ref.String(), caller)
- c := refsToReplace[caller]
- c.Ref = v.Ref
- refsToReplace[caller] = c
- }
-
- return nil
- }
-
- // everything that is a simple schema and not factorizable is expanded
- debugLog("expand JSON pointer for key=%s", key)
-
- if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil {
- return err
- }
- // NOTE: there is no other caller to update
-
- return nil
-}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_name.go b/vendor/github.com/go-openapi/analysis/flatten_name.go
deleted file mode 100644
index c7d7938ebe..0000000000
--- a/vendor/github.com/go-openapi/analysis/flatten_name.go
+++ /dev/null
@@ -1,308 +0,0 @@
-package analysis
-
-import (
- "fmt"
- "path"
- "sort"
- "strings"
-
- "github.com/go-openapi/analysis/internal/flatten/operations"
- "github.com/go-openapi/analysis/internal/flatten/replace"
- "github.com/go-openapi/analysis/internal/flatten/schutils"
- "github.com/go-openapi/analysis/internal/flatten/sortref"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-// InlineSchemaNamer finds a new name for an inlined type
-type InlineSchemaNamer struct {
- Spec *spec.Swagger
- Operations map[string]operations.OpRef
- flattenContext *context
- opts *FlattenOpts
-}
-
-// Name yields a new name for the inline schema
-func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *AnalyzedSchema) error {
- debugLog("naming inlined schema at %s", key)
-
- parts := sortref.KeyParts(key)
- for _, name := range namesFromKey(parts, aschema, isn.Operations) {
- if name == "" {
- continue
- }
-
- // create unique name
- mangle := mangler(isn.opts)
- newName, isOAIGen := uniqifyName(isn.Spec.Definitions, mangle(name))
-
- // clone schema
- sch := schutils.Clone(schema)
-
- // replace values on schema
- debugLog("rewriting schema to ref: key=%s with new name: %s", key, newName)
- if err := replace.RewriteSchemaToRef(isn.Spec, key,
- spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
- return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err)
- }
-
- // rewrite any dependent $ref pointing to this place,
- // when not already pointing to a top-level definition.
- //
- // NOTE: this is important if such referers use arbitrary JSON pointers.
- an := New(isn.Spec)
- for k, v := range an.references.allRefs {
- r, erd := replace.DeepestRef(isn.opts.Swagger(), isn.opts.ExpandOpts(false), v)
- if erd != nil {
- return fmt.Errorf("at %s, %w", k, erd)
- }
-
- if isn.opts.flattenContext != nil {
- isn.opts.flattenContext.warnings = append(isn.opts.flattenContext.warnings, r.Warnings...)
- }
-
- if r.Ref.String() != key && (r.Ref.String() != path.Join(definitionsPath, newName) || path.Dir(v.String()) == definitionsPath) {
- continue
- }
-
- debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String())
-
- // rewrite $ref to the new target
- if err := replace.UpdateRef(isn.Spec, k,
- spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
- return err
- }
- }
-
- // NOTE: this extension is currently not used by go-swagger (provided for information only)
- sch.AddExtension("x-go-gen-location", GenLocation(parts))
-
- // save cloned schema to definitions
- schutils.Save(isn.Spec, newName, sch)
-
- // keep track of created refs
- if isn.flattenContext == nil {
- continue
- }
-
- debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen)
- resolved := false
-
- if _, ok := isn.flattenContext.newRefs[key]; ok {
- resolved = isn.flattenContext.newRefs[key].resolved
- }
-
- isn.flattenContext.newRefs[key] = &newRef{
- key: key,
- newName: newName,
- path: path.Join(definitionsPath, newName),
- isOAIGen: isOAIGen,
- resolved: resolved,
- schema: sch,
- }
- }
-
- return nil
-}
-
-// uniqifyName yields a unique name for a definition
-func uniqifyName(definitions spec.Definitions, name string) (string, bool) {
- isOAIGen := false
- if name == "" {
- name = "oaiGen"
- isOAIGen = true
- }
-
- if len(definitions) == 0 {
- return name, isOAIGen
- }
-
- unq := true
- for k := range definitions {
- if strings.EqualFold(k, name) {
- unq = false
-
- break
- }
- }
-
- if unq {
- return name, isOAIGen
- }
-
- name += "OAIGen"
- isOAIGen = true
- var idx int
- unique := name
- _, known := definitions[unique]
-
- for known {
- idx++
- unique = fmt.Sprintf("%s%d", name, idx)
- _, known = definitions[unique]
- }
-
- return unique, isOAIGen
-}
-
-func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations map[string]operations.OpRef) []string {
- var (
- baseNames [][]string
- startIndex int
- )
-
- switch {
- case parts.IsOperation():
- baseNames, startIndex = namesForOperation(parts, operations)
- case parts.IsDefinition():
- baseNames, startIndex = namesForDefinition(parts)
- default:
- // this a non-standard pointer: build a name by concatenating its parts
- baseNames = [][]string{parts}
- startIndex = len(baseNames) + 1
- }
-
- result := make([]string, 0, len(baseNames))
- for _, segments := range baseNames {
- nm := parts.BuildName(segments, startIndex, partAdder(aschema))
- if nm == "" {
- continue
- }
-
- result = append(result, nm)
- }
- sort.Strings(result)
-
- debugLog("names from parts: %v => %v", parts, result)
- return result
-}
-
-func namesForParam(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
- var (
- baseNames [][]string
- startIndex int
- )
-
- piref := parts.PathItemRef()
- if piref.String() != "" && parts.IsOperationParam() {
- if op, ok := operations[piref.String()]; ok {
- startIndex = 5
- baseNames = append(baseNames, []string{op.ID, "params", "body"})
- }
- } else if parts.IsSharedOperationParam() {
- pref := parts.PathRef()
- for k, v := range operations {
- if strings.HasPrefix(k, pref.String()) {
- startIndex = 4
- baseNames = append(baseNames, []string{v.ID, "params", "body"})
- }
- }
- }
-
- return baseNames, startIndex
-}
-
-func namesForOperation(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
- var (
- baseNames [][]string
- startIndex int
- )
-
- // params
- if parts.IsOperationParam() || parts.IsSharedOperationParam() {
- baseNames, startIndex = namesForParam(parts, operations)
- }
-
- // responses
- if parts.IsOperationResponse() {
- piref := parts.PathItemRef()
- if piref.String() != "" {
- if op, ok := operations[piref.String()]; ok {
- startIndex = 6
- baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
- }
- }
- }
-
- return baseNames, startIndex
-}
-
-func namesForDefinition(parts sortref.SplitKey) ([][]string, int) {
- nm := parts.DefinitionName()
- if nm != "" {
- return [][]string{{parts.DefinitionName()}}, 2
- }
-
- return [][]string{}, 0
-}
-
-// partAdder knows how to interpret a schema when it comes to build a name from parts
-func partAdder(aschema *AnalyzedSchema) sortref.PartAdder {
- return func(part string) []string {
- segments := make([]string, 0, 2)
-
- if part == "items" || part == "additionalItems" {
- if aschema.IsTuple || aschema.IsTupleWithExtra {
- segments = append(segments, "tuple")
- } else {
- segments = append(segments, "items")
- }
-
- if part == "additionalItems" {
- segments = append(segments, part)
- }
-
- return segments
- }
-
- segments = append(segments, part)
-
- return segments
- }
-}
-
-func mangler(o *FlattenOpts) func(string) string {
- if o.KeepNames {
- return func(in string) string { return in }
- }
-
- return swag.ToJSONName
-}
-
-func nameFromRef(ref spec.Ref, o *FlattenOpts) string {
- mangle := mangler(o)
-
- u := ref.GetURL()
- if u.Fragment != "" {
- return mangle(path.Base(u.Fragment))
- }
-
- if u.Path != "" {
- bn := path.Base(u.Path)
- if bn != "" && bn != "/" {
- ext := path.Ext(bn)
- if ext != "" {
- return mangle(bn[:len(bn)-len(ext)])
- }
-
- return mangle(bn)
- }
- }
-
- return mangle(strings.ReplaceAll(u.Host, ".", " "))
-}
-
-// GenLocation indicates from which section of the specification (models or operations) a definition has been created.
-//
-// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is provided
-// for information only.
-func GenLocation(parts sortref.SplitKey) string {
- switch {
- case parts.IsOperation():
- return "operations"
- case parts.IsDefinition():
- return "models"
- default:
- return ""
- }
-}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_options.go b/vendor/github.com/go-openapi/analysis/flatten_options.go
deleted file mode 100644
index c943fe1e84..0000000000
--- a/vendor/github.com/go-openapi/analysis/flatten_options.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package analysis
-
-import (
- "log"
-
- "github.com/go-openapi/spec"
-)
-
-// FlattenOpts configuration for flattening a swagger specification.
-//
-// The BasePath parameter is used to locate remote relative $ref found in the specification.
-// This path is a file: it points to the location of the root document and may be either a local
-// file path or a URL.
-//
-// If none specified, relative references (e.g. "$ref": "folder/schema.yaml#/definitions/...")
-// found in the spec are searched from the current working directory.
-type FlattenOpts struct {
- Spec *Spec // The analyzed spec to work with
- flattenContext *context // Internal context to track flattening activity
-
- BasePath string // The location of the root document for this spec to resolve relative $ref
-
- // Flattening options
- Expand bool // When true, skip flattening the spec and expand it instead (if Minimal is false)
- Minimal bool // When true, do not decompose complex structures such as allOf
- Verbose bool // enable some reporting on possible name conflicts detected
- RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening
- ContinueOnError bool // Continue when spec expansion issues are found
- KeepNames bool // Do not attempt to jsonify names from references when flattening
-
- /* Extra keys */
- _ struct{} // require keys
-}
-
-// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
-func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *spec.ExpandOptions {
- return &spec.ExpandOptions{
- RelativeBase: f.BasePath,
- SkipSchemas: skipSchemas,
- ContinueOnError: f.ContinueOnError,
- }
-}
-
-// Swagger gets the swagger specification for this flatten operation
-func (f *FlattenOpts) Swagger() *spec.Swagger {
- return f.Spec.spec
-}
-
-// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting
-// from flattening a spec
-func (f *FlattenOpts) croak() {
- if !f.Verbose {
- return
- }
-
- reported := make(map[string]bool, len(f.flattenContext.newRefs))
- for _, v := range f.Spec.references.allRefs {
- // warns about duplicate handling
- for _, r := range f.flattenContext.newRefs {
- if r.isOAIGen && r.path == v.String() {
- reported[r.newName] = true
- }
- }
- }
-
- for k := range reported {
- log.Printf("warning: duplicate flattened definition name resolved as %s", k)
- }
-
- // warns about possible type mismatches
- uniqueMsg := make(map[string]bool)
- for _, msg := range f.flattenContext.warnings {
- if _, ok := uniqueMsg[msg]; ok {
- continue
- }
- log.Printf("warning: %s", msg)
- uniqueMsg[msg] = true
- }
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go
deleted file mode 100644
index 39f55a97bf..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package debug
-
-import (
- "fmt"
- "log"
- "os"
- "path/filepath"
- "runtime"
-)
-
-var (
- output = os.Stdout
-)
-
-// GetLogger provides a prefix debug logger
-func GetLogger(prefix string, debug bool) func(string, ...interface{}) {
- if debug {
- logger := log.New(output, prefix+":", log.LstdFlags)
-
- return func(msg string, args ...interface{}) {
- _, file1, pos1, _ := runtime.Caller(1)
- logger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
- }
- }
-
- return func(_ string, _ ...interface{}) {}
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
deleted file mode 100644
index 8c9df0580d..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
+++ /dev/null
@@ -1,87 +0,0 @@
-package normalize
-
-import (
- "net/url"
- "path"
- "path/filepath"
- "strings"
-
- "github.com/go-openapi/spec"
-)
-
-// RebaseRef rebases a remote ref relative to a base ref.
-//
-// NOTE: does not support JSONschema ID for $ref (we assume we are working with swagger specs here).
-//
-// NOTE(windows):
-// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
-// * "/ in paths may appear as escape sequences
-func RebaseRef(baseRef string, ref string) string {
- baseRef, _ = url.PathUnescape(baseRef)
- ref, _ = url.PathUnescape(ref)
-
- if baseRef == "" || baseRef == "." || strings.HasPrefix(baseRef, "#") {
- return ref
- }
-
- parts := strings.Split(ref, "#")
-
- baseParts := strings.Split(baseRef, "#")
- baseURL, _ := url.Parse(baseParts[0])
- if strings.HasPrefix(ref, "#") {
- if baseURL.Host == "" {
- return strings.Join([]string{baseParts[0], parts[1]}, "#")
- }
-
- return strings.Join([]string{baseParts[0], parts[1]}, "#")
- }
-
- refURL, _ := url.Parse(parts[0])
- if refURL.Host != "" || filepath.IsAbs(parts[0]) {
- // not rebasing an absolute path
- return ref
- }
-
- // there is a relative path
- var basePath string
- if baseURL.Host != "" {
- // when there is a host, standard URI rules apply (with "/")
- baseURL.Path = path.Dir(baseURL.Path)
- baseURL.Path = path.Join(baseURL.Path, "/"+parts[0])
-
- return baseURL.String()
- }
-
- // this is a local relative path
- // basePart[0] and parts[0] are local filesystem directories/files
- basePath = filepath.Dir(baseParts[0])
- relPath := filepath.Join(basePath, string(filepath.Separator)+parts[0])
- if len(parts) > 1 {
- return strings.Join([]string{relPath, parts[1]}, "#")
- }
-
- return relPath
-}
-
-// Path renders absolute path on remote file refs
-//
-// NOTE(windows):
-// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
-// * "/ in paths may appear as escape sequences
-func Path(ref spec.Ref, basePath string) string {
- uri, _ := url.PathUnescape(ref.String())
- if ref.HasFragmentOnly || filepath.IsAbs(uri) {
- return uri
- }
-
- refURL, _ := url.Parse(uri)
- if refURL.Host != "" {
- return uri
- }
-
- parts := strings.Split(uri, "#")
- // BasePath, parts[0] are local filesystem directories, guaranteed to be absolute at this stage
- parts[0] = filepath.Join(filepath.Dir(basePath), parts[0])
-
- return strings.Join(parts, "#")
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
deleted file mode 100644
index 7f3a2b8717..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package operations
-
-import (
- "path"
- "sort"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-// AllOpRefsByRef returns an index of sortable operations
-func AllOpRefsByRef(specDoc Provider, operationIDs []string) map[string]OpRef {
- return OpRefsByRef(GatherOperations(specDoc, operationIDs))
-}
-
-// OpRefsByRef indexes a map of sortable operations
-func OpRefsByRef(oprefs map[string]OpRef) map[string]OpRef {
- result := make(map[string]OpRef, len(oprefs))
- for _, v := range oprefs {
- result[v.Ref.String()] = v
- }
-
- return result
-}
-
-// OpRef is an indexable, sortable operation
-type OpRef struct {
- Method string
- Path string
- Key string
- ID string
- Op *spec.Operation
- Ref spec.Ref
-}
-
-// OpRefs is a sortable collection of operations
-type OpRefs []OpRef
-
-func (o OpRefs) Len() int { return len(o) }
-func (o OpRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
-func (o OpRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
-
-// Provider knows how to collect operations from a spec
-type Provider interface {
- Operations() map[string]map[string]*spec.Operation
-}
-
-// GatherOperations builds a map of sorted operations from a spec
-func GatherOperations(specDoc Provider, operationIDs []string) map[string]OpRef {
- var oprefs OpRefs
-
- for method, pathItem := range specDoc.Operations() {
- for pth, operation := range pathItem {
- vv := *operation
- oprefs = append(oprefs, OpRef{
- Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
- Method: method,
- Path: pth,
- ID: vv.ID,
- Op: &vv,
- Ref: spec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
- })
- }
- }
-
- sort.Sort(oprefs)
-
- operations := make(map[string]OpRef)
- for _, opr := range oprefs {
- nm := opr.ID
- if nm == "" {
- nm = opr.Key
- }
-
- oo, found := operations[nm]
- if found && oo.Method != opr.Method && oo.Path != opr.Path {
- nm = opr.Key
- }
-
- if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
- opr.ID = nm
- opr.Op.ID = nm
- operations[nm] = opr
- }
- }
-
- return operations
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
deleted file mode 100644
index c0f43e728a..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
+++ /dev/null
@@ -1,458 +0,0 @@
-package replace
-
-import (
- "encoding/json"
- "fmt"
- "net/url"
- "os"
- "path"
- "strconv"
-
- "github.com/go-openapi/analysis/internal/debug"
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
-)
-
-const definitionsPath = "#/definitions"
-
-var debugLog = debug.GetLogger("analysis/flatten/replace", os.Getenv("SWAGGER_DEBUG") != "")
-
-// RewriteSchemaToRef replaces a schema with a Ref
-func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error {
- debugLog("rewriting schema to ref for %s with %s", key, ref.String())
- _, value, err := getPointerFromKey(sp, key)
- if err != nil {
- return err
- }
-
- switch refable := value.(type) {
- case *spec.Schema:
- return rewriteParentRef(sp, key, ref)
-
- case spec.Schema:
- return rewriteParentRef(sp, key, ref)
-
- case *spec.SchemaOrArray:
- if refable.Schema != nil {
- refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
- }
-
- case *spec.SchemaOrBool:
- if refable.Schema != nil {
- refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
- }
- case map[string]interface{}: // this happens e.g. if a schema points to an extension unmarshaled as map[string]interface{}
- return rewriteParentRef(sp, key, ref)
- default:
- return fmt.Errorf("no schema with ref found at %s for %T", key, value)
- }
-
- return nil
-}
-
-func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error {
- parent, entry, pvalue, err := getParentFromKey(sp, key)
- if err != nil {
- return err
- }
-
- debugLog("rewriting holder for %T", pvalue)
- switch container := pvalue.(type) {
- case spec.Response:
- if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
- return err
- }
-
- case *spec.Response:
- container.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case *spec.Responses:
- statusCode, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", key[1:], err)
- }
- resp := container.StatusCodeResponses[statusCode]
- resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
- container.StatusCodeResponses[statusCode] = resp
-
- case map[string]spec.Response:
- resp := container[entry]
- resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
- container[entry] = resp
-
- case spec.Parameter:
- if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
- return err
- }
-
- case map[string]spec.Parameter:
- param := container[entry]
- param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
- container[entry] = param
-
- case []spec.Parameter:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", key[1:], err)
- }
- param := container[idx]
- param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
- container[idx] = param
-
- case spec.Definitions:
- container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case map[string]spec.Schema:
- container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case []spec.Schema:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", key[1:], err)
- }
- container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case *spec.SchemaOrArray:
- // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", key[1:], err)
- }
- container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case spec.SchemaProperties:
- container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case *interface{}:
- *container = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
-
- default:
- return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
- }
-
- return nil
-}
-
-// getPointerFromKey retrieves the content of the JSON pointer "key"
-func getPointerFromKey(sp interface{}, key string) (string, interface{}, error) {
- switch sp.(type) {
- case *spec.Schema:
- case *spec.Swagger:
- default:
- panic("unexpected type used in getPointerFromKey")
- }
- if key == "#/" {
- return "", sp, nil
- }
- // unescape chars in key, e.g. "{}" from path params
- pth, _ := url.PathUnescape(key[1:])
- ptr, err := jsonpointer.New(pth)
- if err != nil {
- return "", nil, err
- }
-
- value, _, err := ptr.Get(sp)
- if err != nil {
- debugLog("error when getting key: %s with path: %s", key, pth)
-
- return "", nil, err
- }
-
- return pth, value, nil
-}
-
-// getParentFromKey retrieves the container of the JSON pointer "key"
-func getParentFromKey(sp interface{}, key string) (string, string, interface{}, error) {
- switch sp.(type) {
- case *spec.Schema:
- case *spec.Swagger:
- default:
- panic("unexpected type used in getPointerFromKey")
- }
- // unescape chars in key, e.g. "{}" from path params
- pth, _ := url.PathUnescape(key[1:])
-
- parent, entry := path.Dir(pth), path.Base(pth)
- debugLog("getting schema holder at: %s, with entry: %s", parent, entry)
-
- pptr, err := jsonpointer.New(parent)
- if err != nil {
- return "", "", nil, err
- }
- pvalue, _, err := pptr.Get(sp)
- if err != nil {
- return "", "", nil, fmt.Errorf("can't get parent for %s: %w", parent, err)
- }
-
- return parent, entry, pvalue, nil
-}
-
-// UpdateRef replaces a ref by another one
-func UpdateRef(sp interface{}, key string, ref spec.Ref) error {
- switch sp.(type) {
- case *spec.Schema:
- case *spec.Swagger:
- default:
- panic("unexpected type used in getPointerFromKey")
- }
- debugLog("updating ref for %s with %s", key, ref.String())
- pth, value, err := getPointerFromKey(sp, key)
- if err != nil {
- return err
- }
-
- switch refable := value.(type) {
- case *spec.Schema:
- refable.Ref = ref
- case *spec.SchemaOrArray:
- if refable.Schema != nil {
- refable.Schema.Ref = ref
- }
- case *spec.SchemaOrBool:
- if refable.Schema != nil {
- refable.Schema.Ref = ref
- }
- case spec.Schema:
- debugLog("rewriting holder for %T", refable)
- _, entry, pvalue, erp := getParentFromKey(sp, key)
- if erp != nil {
- return err
- }
- switch container := pvalue.(type) {
- case spec.Definitions:
- container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case map[string]spec.Schema:
- container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case []spec.Schema:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", pth, err)
- }
- container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case *spec.SchemaOrArray:
- // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", pth, err)
- }
- container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- case spec.SchemaProperties:
- container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-
- // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
-
- default:
- return fmt.Errorf("unhandled container type at %s: %T", key, value)
- }
-
- default:
- return fmt.Errorf("no schema with ref found at %s for %T", key, value)
- }
-
- return nil
-}
-
-// UpdateRefWithSchema replaces a ref with a schema (i.e. re-inline schema)
-func UpdateRefWithSchema(sp *spec.Swagger, key string, sch *spec.Schema) error {
- debugLog("updating ref for %s with schema", key)
- pth, value, err := getPointerFromKey(sp, key)
- if err != nil {
- return err
- }
-
- switch refable := value.(type) {
- case *spec.Schema:
- *refable = *sch
- case spec.Schema:
- _, entry, pvalue, erp := getParentFromKey(sp, key)
- if erp != nil {
- return err
- }
- switch container := pvalue.(type) {
- case spec.Definitions:
- container[entry] = *sch
-
- case map[string]spec.Schema:
- container[entry] = *sch
-
- case []spec.Schema:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", pth, err)
- }
- container[idx] = *sch
-
- case *spec.SchemaOrArray:
- // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %w", pth, err)
- }
- container.Schemas[idx] = *sch
-
- case spec.SchemaProperties:
- container[entry] = *sch
-
- // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
-
- default:
- return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value)
- }
- case *spec.SchemaOrArray:
- *refable.Schema = *sch
- // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
- case *spec.SchemaOrBool:
- *refable.Schema = *sch
- default:
- return fmt.Errorf("no schema with ref found at %s for %T", key, value)
- }
-
- return nil
-}
-
-// DeepestRefResult holds the results from DeepestRef analysis
-type DeepestRefResult struct {
- Ref spec.Ref
- Schema *spec.Schema
- Warnings []string
-}
-
-// DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions.
-// - if no definition is found, returns the deepest ref.
-// - pointers to external files are expanded
-//
-// NOTE: all external $ref's are assumed to be already expanded at this stage.
-func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) {
- if !ref.HasFragmentOnly {
- // we found an external $ref, which is odd at this stage:
- // do nothing on external $refs
- return &DeepestRefResult{Ref: ref}, nil
- }
-
- currentRef := ref
- visited := make(map[string]bool, 64)
- warnings := make([]string, 0, 2)
-
-DOWNREF:
- for currentRef.String() != "" {
- if path.Dir(currentRef.String()) == definitionsPath {
- // this is a top-level definition: stop here and return this ref
- return &DeepestRefResult{Ref: currentRef}, nil
- }
-
- if _, beenThere := visited[currentRef.String()]; beenThere {
- return nil,
- fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String())
- }
-
- visited[currentRef.String()] = true
- value, _, err := currentRef.GetPointer().Get(sp)
- if err != nil {
- return nil, err
- }
-
- switch refable := value.(type) {
- case *spec.Schema:
- if refable.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = refable.Ref
-
- case spec.Schema:
- if refable.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = refable.Ref
-
- case *spec.SchemaOrArray:
- if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = refable.Schema.Ref
-
- case *spec.SchemaOrBool:
- if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = refable.Schema.Ref
-
- case spec.Response:
- // a pointer points to a schema initially marshalled in responses section...
- // Attempt to convert this to a schema. If this fails, the spec is invalid
- asJSON, _ := refable.MarshalJSON()
- var asSchema spec.Schema
-
- err := asSchema.UnmarshalJSON(asJSON)
- if err != nil {
- return nil,
- fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T (%v)",
- currentRef.String(), value, err,
- )
- }
- warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String()))
-
- if asSchema.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = asSchema.Ref
-
- case spec.Parameter:
- // a pointer points to a schema initially marshalled in parameters section...
- // Attempt to convert this to a schema. If this fails, the spec is invalid
- asJSON, _ := refable.MarshalJSON()
- var asSchema spec.Schema
- if err := asSchema.UnmarshalJSON(asJSON); err != nil {
- return nil,
- fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T (%v)",
- currentRef.String(), value, err,
- )
- }
-
- warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String()))
-
- if asSchema.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = asSchema.Ref
-
- default:
- // fallback: attempts to resolve the pointer as a schema
- if refable == nil {
- break DOWNREF
- }
-
- asJSON, _ := json.Marshal(refable)
- var asSchema spec.Schema
- if err := asSchema.UnmarshalJSON(asJSON); err != nil {
- return nil,
- fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T (%v)",
- currentRef.String(), value, err,
- )
- }
- warnings = append(warnings, fmt.Sprintf("found $ref %q (%T) interpreted as schema", currentRef.String(), refable))
-
- if asSchema.Ref.String() == "" {
- break DOWNREF
- }
- currentRef = asSchema.Ref
- }
- }
-
- // assess what schema we're ending with
- sch, erv := spec.ResolveRefWithBase(sp, ¤tRef, opts)
- if erv != nil {
- return nil, erv
- }
-
- if sch == nil {
- return nil, fmt.Errorf("no schema found at %s", currentRef.String())
- }
-
- return &DeepestRefResult{Ref: currentRef, Schema: sch, Warnings: warnings}, nil
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
deleted file mode 100644
index 4590236e68..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Package schutils provides tools to save or clone a schema
-// when flattening a spec.
-package schutils
-
-import (
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-// Save registers a schema as an entry in spec #/definitions
-func Save(sp *spec.Swagger, name string, schema *spec.Schema) {
- if schema == nil {
- return
- }
-
- if sp.Definitions == nil {
- sp.Definitions = make(map[string]spec.Schema, 150)
- }
-
- sp.Definitions[name] = *schema
-}
-
-// Clone deep-clones a schema
-func Clone(schema *spec.Schema) *spec.Schema {
- var sch spec.Schema
- _ = swag.FromDynamicJSON(schema, &sch)
-
- return &sch
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
deleted file mode 100644
index ac80fc2e83..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
+++ /dev/null
@@ -1,201 +0,0 @@
-package sortref
-
-import (
- "net/http"
- "path"
- "strconv"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
-)
-
-const (
- paths = "paths"
- responses = "responses"
- parameters = "parameters"
- definitions = "definitions"
-)
-
-var (
- ignoredKeys map[string]struct{}
- validMethods map[string]struct{}
-)
-
-func init() {
- ignoredKeys = map[string]struct{}{
- "schema": {},
- "properties": {},
- "not": {},
- "anyOf": {},
- "oneOf": {},
- }
-
- validMethods = map[string]struct{}{
- "GET": {},
- "HEAD": {},
- "OPTIONS": {},
- "PATCH": {},
- "POST": {},
- "PUT": {},
- "DELETE": {},
- }
-}
-
-// Key represent a key item constructed from /-separated segments
-type Key struct {
- Segments int
- Key string
-}
-
-// Keys is a sortable collable collection of Keys
-type Keys []Key
-
-func (k Keys) Len() int { return len(k) }
-func (k Keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
-func (k Keys) Less(i, j int) bool {
- return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
-}
-
-// KeyParts construct a SplitKey with all its /-separated segments decomposed. It is sortable.
-func KeyParts(key string) SplitKey {
- var res []string
- for _, part := range strings.Split(key[1:], "/") {
- if part != "" {
- res = append(res, jsonpointer.Unescape(part))
- }
- }
-
- return res
-}
-
-// SplitKey holds of the parts of a /-separated key, so that their location may be determined.
-type SplitKey []string
-
-// IsDefinition is true when the split key is in the #/definitions section of a spec
-func (s SplitKey) IsDefinition() bool {
- return len(s) > 1 && s[0] == definitions
-}
-
-// DefinitionName yields the name of the definition
-func (s SplitKey) DefinitionName() string {
- if !s.IsDefinition() {
- return ""
- }
-
- return s[1]
-}
-
-func (s SplitKey) isKeyName(i int) bool {
- if i <= 0 {
- return false
- }
-
- count := 0
- for idx := i - 1; idx > 0; idx-- {
- if s[idx] != "properties" {
- break
- }
- count++
- }
-
- return count%2 != 0
-}
-
-// PartAdder know how to construct the components of a new name
-type PartAdder func(string) []string
-
-// BuildName builds a name from segments
-func (s SplitKey) BuildName(segments []string, startIndex int, adder PartAdder) string {
- for i, part := range s[startIndex:] {
- if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
- segments = append(segments, adder(part)...)
- }
- }
-
- return strings.Join(segments, " ")
-}
-
-// IsOperation is true when the split key is in the operations section
-func (s SplitKey) IsOperation() bool {
- return len(s) > 1 && s[0] == paths
-}
-
-// IsSharedOperationParam is true when the split key is in the parameters section of a path
-func (s SplitKey) IsSharedOperationParam() bool {
- return len(s) > 2 && s[0] == paths && s[2] == parameters
-}
-
-// IsSharedParam is true when the split key is in the #/parameters section of a spec
-func (s SplitKey) IsSharedParam() bool {
- return len(s) > 1 && s[0] == parameters
-}
-
-// IsOperationParam is true when the split key is in the parameters section of an operation
-func (s SplitKey) IsOperationParam() bool {
- return len(s) > 3 && s[0] == paths && s[3] == parameters
-}
-
-// IsOperationResponse is true when the split key is in the responses section of an operation
-func (s SplitKey) IsOperationResponse() bool {
- return len(s) > 3 && s[0] == paths && s[3] == responses
-}
-
-// IsSharedResponse is true when the split key is in the #/responses section of a spec
-func (s SplitKey) IsSharedResponse() bool {
- return len(s) > 1 && s[0] == responses
-}
-
-// IsDefaultResponse is true when the split key is the default response for an operation
-func (s SplitKey) IsDefaultResponse() bool {
- return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default"
-}
-
-// IsStatusCodeResponse is true when the split key is an operation response with a status code
-func (s SplitKey) IsStatusCodeResponse() bool {
- isInt := func() bool {
- _, err := strconv.Atoi(s[4])
-
- return err == nil
- }
-
- return len(s) > 4 && s[0] == paths && s[3] == responses && isInt()
-}
-
-// ResponseName yields either the status code or "Default" for a response
-func (s SplitKey) ResponseName() string {
- if s.IsStatusCodeResponse() {
- code, _ := strconv.Atoi(s[4])
-
- return http.StatusText(code)
- }
-
- if s.IsDefaultResponse() {
- return "Default"
- }
-
- return ""
-}
-
-// PathItemRef constructs a $ref object from a split key of the form /{path}/{method}
-func (s SplitKey) PathItemRef() spec.Ref {
- if len(s) < 3 {
- return spec.Ref{}
- }
-
- pth, method := s[1], s[2]
- if _, isValidMethod := validMethods[strings.ToUpper(method)]; !isValidMethod && !strings.HasPrefix(method, "x-") {
- return spec.Ref{}
- }
-
- return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method)))
-}
-
-// PathRef constructs a $ref object from a split key of the form /paths/{reference}
-func (s SplitKey) PathRef() spec.Ref {
- if !s.IsOperation() {
- return spec.Ref{}
- }
-
- return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(s[1])))
-}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
deleted file mode 100644
index 73243df87f..0000000000
--- a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
+++ /dev/null
@@ -1,141 +0,0 @@
-package sortref
-
-import (
- "reflect"
- "sort"
- "strings"
-
- "github.com/go-openapi/analysis/internal/flatten/normalize"
- "github.com/go-openapi/spec"
-)
-
-var depthGroupOrder = []string{
- "sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition",
-}
-
-type mapIterator struct {
- len int
- mapIter *reflect.MapIter
-}
-
-func (i *mapIterator) Next() bool {
- return i.mapIter.Next()
-}
-
-func (i *mapIterator) Len() int {
- return i.len
-}
-
-func (i *mapIterator) Key() string {
- return i.mapIter.Key().String()
-}
-
-func mustMapIterator(anyMap interface{}) *mapIterator {
- val := reflect.ValueOf(anyMap)
-
- return &mapIterator{mapIter: val.MapRange(), len: val.Len()}
-}
-
-// DepthFirst sorts a map of anything. It groups keys by category
-// (shared params, op param, statuscode response, default response, definitions)
-// sort groups internally by number of parts in the key and lexical names
-// flatten groups into a single list of keys
-func DepthFirst(in interface{}) []string {
- iterator := mustMapIterator(in)
- sorted := make([]string, 0, iterator.Len())
- grouped := make(map[string]Keys, iterator.Len())
-
- for iterator.Next() {
- k := iterator.Key()
- split := KeyParts(k)
- var pk string
-
- if split.IsSharedOperationParam() {
- pk = "sharedOpParam"
- }
- if split.IsOperationParam() {
- pk = "opParam"
- }
- if split.IsStatusCodeResponse() {
- pk = "codeResponse"
- }
- if split.IsDefaultResponse() {
- pk = "defaultResponse"
- }
- if split.IsDefinition() {
- pk = "definition"
- }
- if split.IsSharedParam() {
- pk = "sharedParam"
- }
- if split.IsSharedResponse() {
- pk = "sharedResponse"
- }
- grouped[pk] = append(grouped[pk], Key{Segments: len(split), Key: k})
- }
-
- for _, pk := range depthGroupOrder {
- res := grouped[pk]
- sort.Sort(res)
-
- for _, v := range res {
- sorted = append(sorted, v.Key)
- }
- }
-
- return sorted
-}
-
-// topMostRefs is able to sort refs by hierarchical then lexicographic order,
-// yielding refs ordered breadth-first.
-type topmostRefs []string
-
-func (k topmostRefs) Len() int { return len(k) }
-func (k topmostRefs) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
-func (k topmostRefs) Less(i, j int) bool {
- li, lj := len(strings.Split(k[i], "/")), len(strings.Split(k[j], "/"))
- if li == lj {
- return k[i] < k[j]
- }
-
- return li < lj
-}
-
-// TopmostFirst sorts references by depth
-func TopmostFirst(refs []string) []string {
- res := topmostRefs(refs)
- sort.Sort(res)
-
- return res
-}
-
-// RefRevIdx is a reverse index for references
-type RefRevIdx struct {
- Ref spec.Ref
- Keys []string
-}
-
-// ReverseIndex builds a reverse index for references in schemas
-func ReverseIndex(schemas map[string]spec.Ref, basePath string) map[string]RefRevIdx {
- collected := make(map[string]RefRevIdx)
- for key, schRef := range schemas {
- // normalize paths before sorting,
- // so we get together keys that are from the same external file
- normalizedPath := normalize.Path(schRef, basePath)
-
- entry, ok := collected[normalizedPath]
- if ok {
- entry.Keys = append(entry.Keys, key)
- collected[normalizedPath] = entry
-
- continue
- }
-
- collected[normalizedPath] = RefRevIdx{
- Ref: schRef,
- Keys: []string{key},
- }
- }
-
- return collected
-}
diff --git a/vendor/github.com/go-openapi/analysis/mixin.go b/vendor/github.com/go-openapi/analysis/mixin.go
deleted file mode 100644
index 7785a29b27..0000000000
--- a/vendor/github.com/go-openapi/analysis/mixin.go
+++ /dev/null
@@ -1,515 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import (
- "fmt"
- "reflect"
-
- "github.com/go-openapi/spec"
-)
-
-// Mixin modifies the primary swagger spec by adding the paths and
-// definitions from the mixin specs. Top level parameters and
-// responses from the mixins are also carried over. Operation id
-// collisions are avoided by appending "Mixin" but only if
-// needed.
-//
-// The following parts of primary are subject to merge, filling empty details
-// - Info
-// - BasePath
-// - Host
-// - ExternalDocs
-//
-// Consider calling FixEmptyResponseDescriptions() on the modified primary
-// if you read them from storage and they are valid to start with.
-//
-// Entries in "paths", "definitions", "parameters" and "responses" are
-// added to the primary in the order of the given mixins. If the entry
-// already exists in primary it is skipped with a warning message.
-//
-// The count of skipped entries (from collisions) is returned so any
-// deviation from the number expected can flag a warning in your build
-// scripts. Carefully review the collisions before accepting them;
-// consider renaming things if possible.
-//
-// No key normalization takes place (paths, type defs,
-// etc). Ensure they are canonical if your downstream tools do
-// key normalization of any form.
-//
-// Merging schemes (http, https), and consumers/producers do not account for
-// collisions.
-func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
- skipped := make([]string, 0, len(mixins))
- opIDs := getOpIDs(primary)
- initPrimary(primary)
-
- for i, m := range mixins {
- skipped = append(skipped, mergeSwaggerProps(primary, m)...)
-
- skipped = append(skipped, mergeConsumes(primary, m)...)
-
- skipped = append(skipped, mergeProduces(primary, m)...)
-
- skipped = append(skipped, mergeTags(primary, m)...)
-
- skipped = append(skipped, mergeSchemes(primary, m)...)
-
- skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
-
- skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
-
- skipped = append(skipped, mergeDefinitions(primary, m)...)
-
- // merging paths requires a map of operationIDs to work with
- skipped = append(skipped, mergePaths(primary, m, opIDs, i)...)
-
- skipped = append(skipped, mergeParameters(primary, m)...)
-
- skipped = append(skipped, mergeResponses(primary, m)...)
- }
-
- return skipped
-}
-
-// getOpIDs extracts all the paths..operationIds from the given
-// spec and returns them as the keys in a map with 'true' values.
-func getOpIDs(s *spec.Swagger) map[string]bool {
- rv := make(map[string]bool)
- if s.Paths == nil {
- return rv
- }
-
- for _, v := range s.Paths.Paths {
- piops := pathItemOps(v)
-
- for _, op := range piops {
- rv[op.ID] = true
- }
- }
-
- return rv
-}
-
-func pathItemOps(p spec.PathItem) []*spec.Operation {
- var rv []*spec.Operation
- rv = appendOp(rv, p.Get)
- rv = appendOp(rv, p.Put)
- rv = appendOp(rv, p.Post)
- rv = appendOp(rv, p.Delete)
- rv = appendOp(rv, p.Head)
- rv = appendOp(rv, p.Patch)
-
- return rv
-}
-
-func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
- if op == nil {
- return ops
- }
-
- return append(ops, op)
-}
-
-func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
- for k, v := range m.SecurityDefinitions {
- if _, exists := primary.SecurityDefinitions[k]; exists {
- warn := fmt.Sprintf(
- "SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
-
- continue
- }
-
- primary.SecurityDefinitions[k] = v
- }
-
- return
-}
-
-func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
- for _, v := range m.Security {
- found := false
- for _, vv := range primary.Security {
- if reflect.DeepEqual(v, vv) {
- found = true
-
- break
- }
- }
-
- if found {
- warn := fmt.Sprintf(
- "Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
- skipped = append(skipped, warn)
-
- continue
- }
- primary.Security = append(primary.Security, v)
- }
-
- return
-}
-
-func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
- for k, v := range m.Definitions {
- // assume name collisions represent IDENTICAL type. careful.
- if _, exists := primary.Definitions[k]; exists {
- warn := fmt.Sprintf(
- "definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
-
- continue
- }
- primary.Definitions[k] = v
- }
-
- return
-}
-
-func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIDs map[string]bool, mixIndex int) (skipped []string) {
- if m.Paths != nil {
- for k, v := range m.Paths.Paths {
- if _, exists := primary.Paths.Paths[k]; exists {
- warn := fmt.Sprintf(
- "paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
-
- continue
- }
-
- // Swagger requires that operationIds be
- // unique within a spec. If we find a
- // collision we append "Mixin0" to the
- // operatoinId we are adding, where 0 is mixin
- // index. We assume that operationIds with
- // all the proivded specs are already unique.
- piops := pathItemOps(v)
- for _, piop := range piops {
- if opIDs[piop.ID] {
- piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
- }
- opIDs[piop.ID] = true
- }
- primary.Paths.Paths[k] = v
- }
- }
-
- return
-}
-
-func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
- for k, v := range m.Parameters {
- // could try to rename on conflict but would
- // have to fix $refs in the mixin. Complain
- // for now
- if _, exists := primary.Parameters[k]; exists {
- warn := fmt.Sprintf(
- "top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
-
- continue
- }
- primary.Parameters[k] = v
- }
-
- return
-}
-
-func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
- for k, v := range m.Responses {
- // could try to rename on conflict but would
- // have to fix $refs in the mixin. Complain
- // for now
- if _, exists := primary.Responses[k]; exists {
- warn := fmt.Sprintf(
- "top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
-
- continue
- }
- primary.Responses[k] = v
- }
-
- return skipped
-}
-
-func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) []string {
- for _, v := range m.Consumes {
- found := false
- for _, vv := range primary.Consumes {
- if v == vv {
- found = true
-
- break
- }
- }
-
- if found {
- // no warning here: we just skip it
- continue
- }
- primary.Consumes = append(primary.Consumes, v)
- }
-
- return []string{}
-}
-
-func mergeProduces(primary *spec.Swagger, m *spec.Swagger) []string {
- for _, v := range m.Produces {
- found := false
- for _, vv := range primary.Produces {
- if v == vv {
- found = true
-
- break
- }
- }
-
- if found {
- // no warning here: we just skip it
- continue
- }
- primary.Produces = append(primary.Produces, v)
- }
-
- return []string{}
-}
-
-func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
- for _, v := range m.Tags {
- found := false
- for _, vv := range primary.Tags {
- if v.Name == vv.Name {
- found = true
-
- break
- }
- }
-
- if found {
- warn := fmt.Sprintf(
- "top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n",
- v.Name,
- )
- skipped = append(skipped, warn)
-
- continue
- }
-
- primary.Tags = append(primary.Tags, v)
- }
-
- return
-}
-
-func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) []string {
- for _, v := range m.Schemes {
- found := false
- for _, vv := range primary.Schemes {
- if v == vv {
- found = true
-
- break
- }
- }
-
- if found {
- // no warning here: we just skip it
- continue
- }
- primary.Schemes = append(primary.Schemes, v)
- }
-
- return []string{}
-}
-
-func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string {
- var skipped, skippedInfo, skippedDocs []string
-
- primary.Extensions, skipped = mergeExtensions(primary.Extensions, m.Extensions)
-
- // merging details in swagger top properties
- if primary.Host == "" {
- primary.Host = m.Host
- }
-
- if primary.BasePath == "" {
- primary.BasePath = m.BasePath
- }
-
- if primary.Info == nil {
- primary.Info = m.Info
- } else if m.Info != nil {
- skippedInfo = mergeInfo(primary.Info, m.Info)
- skipped = append(skipped, skippedInfo...)
- }
-
- if primary.ExternalDocs == nil {
- primary.ExternalDocs = m.ExternalDocs
- } else if m != nil {
- skippedDocs = mergeExternalDocs(primary.ExternalDocs, m.ExternalDocs)
- skipped = append(skipped, skippedDocs...)
- }
-
- return skipped
-}
-
-//nolint:unparam
-func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string {
- if primary.Description == "" {
- primary.Description = m.Description
- }
-
- if primary.URL == "" {
- primary.URL = m.URL
- }
-
- return nil
-}
-
-func mergeInfo(primary *spec.Info, m *spec.Info) []string {
- var sk, skipped []string
-
- primary.Extensions, sk = mergeExtensions(primary.Extensions, m.Extensions)
- skipped = append(skipped, sk...)
-
- if primary.Description == "" {
- primary.Description = m.Description
- }
-
- if primary.Title == "" {
- primary.Description = m.Description
- }
-
- if primary.TermsOfService == "" {
- primary.TermsOfService = m.TermsOfService
- }
-
- if primary.Version == "" {
- primary.Version = m.Version
- }
-
- if primary.Contact == nil {
- primary.Contact = m.Contact
- } else if m.Contact != nil {
- var csk []string
- primary.Contact.Extensions, csk = mergeExtensions(primary.Contact.Extensions, m.Contact.Extensions)
- skipped = append(skipped, csk...)
-
- if primary.Contact.Name == "" {
- primary.Contact.Name = m.Contact.Name
- }
-
- if primary.Contact.URL == "" {
- primary.Contact.URL = m.Contact.URL
- }
-
- if primary.Contact.Email == "" {
- primary.Contact.Email = m.Contact.Email
- }
- }
-
- if primary.License == nil {
- primary.License = m.License
- } else if m.License != nil {
- var lsk []string
- primary.License.Extensions, lsk = mergeExtensions(primary.License.Extensions, m.License.Extensions)
- skipped = append(skipped, lsk...)
-
- if primary.License.Name == "" {
- primary.License.Name = m.License.Name
- }
-
- if primary.License.URL == "" {
- primary.License.URL = m.License.URL
- }
- }
-
- return skipped
-}
-
-func mergeExtensions(primary spec.Extensions, m spec.Extensions) (result spec.Extensions, skipped []string) {
- if primary == nil {
- result = m
-
- return
- }
-
- if m == nil {
- result = primary
-
- return
- }
-
- result = primary
- for k, v := range m {
- if _, found := primary[k]; found {
- skipped = append(skipped, k)
-
- continue
- }
-
- primary[k] = v
- }
-
- return
-}
-
-func initPrimary(primary *spec.Swagger) {
- if primary.SecurityDefinitions == nil {
- primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
- }
-
- if primary.Security == nil {
- primary.Security = make([]map[string][]string, 0, 10)
- }
-
- if primary.Produces == nil {
- primary.Produces = make([]string, 0, 10)
- }
-
- if primary.Consumes == nil {
- primary.Consumes = make([]string, 0, 10)
- }
-
- if primary.Tags == nil {
- primary.Tags = make([]spec.Tag, 0, 10)
- }
-
- if primary.Schemes == nil {
- primary.Schemes = make([]string, 0, 10)
- }
-
- if primary.Paths == nil {
- primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
- }
-
- if primary.Paths.Paths == nil {
- primary.Paths.Paths = make(map[string]spec.PathItem)
- }
-
- if primary.Definitions == nil {
- primary.Definitions = make(spec.Definitions)
- }
-
- if primary.Parameters == nil {
- primary.Parameters = make(map[string]spec.Parameter)
- }
-
- if primary.Responses == nil {
- primary.Responses = make(map[string]spec.Response)
- }
-}
diff --git a/vendor/github.com/go-openapi/analysis/schema.go b/vendor/github.com/go-openapi/analysis/schema.go
deleted file mode 100644
index ab190db5b7..0000000000
--- a/vendor/github.com/go-openapi/analysis/schema.go
+++ /dev/null
@@ -1,256 +0,0 @@
-package analysis
-
-import (
- "errors"
-
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-// SchemaOpts configures the schema analyzer
-type SchemaOpts struct {
- Schema *spec.Schema
- Root interface{}
- BasePath string
- _ struct{}
-}
-
-// Schema analysis, will classify the schema according to known
-// patterns.
-func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
- if opts.Schema == nil {
- return nil, errors.New("no schema to analyze")
- }
-
- a := &AnalyzedSchema{
- schema: opts.Schema,
- root: opts.Root,
- basePath: opts.BasePath,
- }
-
- a.initializeFlags()
- a.inferKnownType()
- a.inferEnum()
- a.inferBaseType()
-
- if err := a.inferMap(); err != nil {
- return nil, err
- }
- if err := a.inferArray(); err != nil {
- return nil, err
- }
-
- a.inferTuple()
-
- if err := a.inferFromRef(); err != nil {
- return nil, err
- }
-
- a.inferSimpleSchema()
-
- return a, nil
-}
-
-// AnalyzedSchema indicates what the schema represents
-type AnalyzedSchema struct {
- schema *spec.Schema
- root interface{}
- basePath string
-
- hasProps bool
- hasAllOf bool
- hasItems bool
- hasAdditionalProps bool
- hasAdditionalItems bool
- hasRef bool
-
- IsKnownType bool
- IsSimpleSchema bool
- IsArray bool
- IsSimpleArray bool
- IsMap bool
- IsSimpleMap bool
- IsExtendedObject bool
- IsTuple bool
- IsTupleWithExtra bool
- IsBaseType bool
- IsEnum bool
-}
-
-// Inherits copies value fields from other onto this schema
-func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
- if other == nil {
- return
- }
- a.hasProps = other.hasProps
- a.hasAllOf = other.hasAllOf
- a.hasItems = other.hasItems
- a.hasAdditionalItems = other.hasAdditionalItems
- a.hasAdditionalProps = other.hasAdditionalProps
- a.hasRef = other.hasRef
-
- a.IsKnownType = other.IsKnownType
- a.IsSimpleSchema = other.IsSimpleSchema
- a.IsArray = other.IsArray
- a.IsSimpleArray = other.IsSimpleArray
- a.IsMap = other.IsMap
- a.IsSimpleMap = other.IsSimpleMap
- a.IsExtendedObject = other.IsExtendedObject
- a.IsTuple = other.IsTuple
- a.IsTupleWithExtra = other.IsTupleWithExtra
- a.IsBaseType = other.IsBaseType
- a.IsEnum = other.IsEnum
-}
-
-func (a *AnalyzedSchema) inferFromRef() error {
- if a.hasRef {
- sch := new(spec.Schema)
- sch.Ref = a.schema.Ref
- err := spec.ExpandSchema(sch, a.root, nil)
- if err != nil {
- return err
- }
- rsch, err := Schema(SchemaOpts{
- Schema: sch,
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- // NOTE(fredbi): currently the only cause for errors is
- // unresolved ref. Since spec.ExpandSchema() expands the
- // schema recursively, there is no chance to get there,
- // until we add more causes for error in this schema analysis.
- return err
- }
- a.inherits(rsch)
- }
-
- return nil
-}
-
-func (a *AnalyzedSchema) inferSimpleSchema() {
- a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
-}
-
-func (a *AnalyzedSchema) inferKnownType() {
- tpe := a.schema.Type
- format := a.schema.Format
- a.IsKnownType = tpe.Contains("boolean") ||
- tpe.Contains("integer") ||
- tpe.Contains("number") ||
- tpe.Contains("string") ||
- (format != "" && strfmt.Default.ContainsName(format)) ||
- (a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
-}
-
-func (a *AnalyzedSchema) inferMap() error {
- if !a.isObjectType() {
- return nil
- }
-
- hasExtra := a.hasProps || a.hasAllOf
- a.IsMap = a.hasAdditionalProps && !hasExtra
- a.IsExtendedObject = a.hasAdditionalProps && hasExtra
-
- if !a.IsMap {
- return nil
- }
-
- // maps
- if a.schema.AdditionalProperties.Schema != nil {
- msch, err := Schema(SchemaOpts{
- Schema: a.schema.AdditionalProperties.Schema,
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- return err
- }
- a.IsSimpleMap = msch.IsSimpleSchema
- } else if a.schema.AdditionalProperties.Allows {
- a.IsSimpleMap = true
- }
-
- return nil
-}
-
-func (a *AnalyzedSchema) inferArray() error {
- // an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
- // (yes, even if the Items array contains only one element).
- // arrays in JSON schema may be unrestricted (i.e no Items specified).
- // Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
- //
- // NOTE: the spec package misses the distinction between:
- // items: [] and items: {}, so we consider both arrays here.
- a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
- if a.IsArray && a.hasItems {
- if a.schema.Items.Schema != nil {
- itsch, err := Schema(SchemaOpts{
- Schema: a.schema.Items.Schema,
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- return err
- }
-
- a.IsSimpleArray = itsch.IsSimpleSchema
- }
- }
-
- if a.IsArray && !a.hasItems {
- a.IsSimpleArray = true
- }
-
- return nil
-}
-
-func (a *AnalyzedSchema) inferTuple() {
- tuple := a.hasItems && a.schema.Items.Schemas != nil
- a.IsTuple = tuple && !a.hasAdditionalItems
- a.IsTupleWithExtra = tuple && a.hasAdditionalItems
-}
-
-func (a *AnalyzedSchema) inferBaseType() {
- if a.isObjectType() {
- a.IsBaseType = a.schema.Discriminator != ""
- }
-}
-
-func (a *AnalyzedSchema) inferEnum() {
- a.IsEnum = len(a.schema.Enum) > 0
-}
-
-func (a *AnalyzedSchema) initializeFlags() {
- a.hasProps = len(a.schema.Properties) > 0
- a.hasAllOf = len(a.schema.AllOf) > 0
- a.hasRef = a.schema.Ref.String() != ""
-
- a.hasItems = a.schema.Items != nil &&
- (a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
-
- a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
- (a.schema.AdditionalProperties.Schema != nil || a.schema.AdditionalProperties.Allows)
-
- a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
- (a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
-}
-
-func (a *AnalyzedSchema) isObjectType() bool {
- return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
-}
-
-func (a *AnalyzedSchema) isArrayType() bool {
- return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
-}
-
-// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex").
-//
-// Complex means the schema is any of:
-// - a simple type (primitive)
-// - an array of something (items are possibly complex ; if this is the case, items will generate a definition)
-// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will
-// generate a definition)
-func (a *AnalyzedSchema) isAnalyzedAsComplex() bool {
- return !a.IsSimpleSchema && !a.IsArray && !a.IsMap
-}
diff --git a/vendor/github.com/go-openapi/errors/.gitattributes b/vendor/github.com/go-openapi/errors/.gitattributes
deleted file mode 100644
index a0717e4b3b..0000000000
--- a/vendor/github.com/go-openapi/errors/.gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-*.go text eol=lf
\ No newline at end of file
diff --git a/vendor/github.com/go-openapi/errors/.gitignore b/vendor/github.com/go-openapi/errors/.gitignore
deleted file mode 100644
index dd91ed6a04..0000000000
--- a/vendor/github.com/go-openapi/errors/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-secrets.yml
-coverage.out
diff --git a/vendor/github.com/go-openapi/errors/.golangci.yml b/vendor/github.com/go-openapi/errors/.golangci.yml
deleted file mode 100644
index ee8b9bd1f1..0000000000
--- a/vendor/github.com/go-openapi/errors/.golangci.yml
+++ /dev/null
@@ -1,55 +0,0 @@
-linters-settings:
- gocyclo:
- min-complexity: 45
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- #- deadcode
- #- interfacer
- #- scopelint
- #- varcheck
- #- structcheck
- #- golint
- #- nosnakecase
- #- maligned
- #- goerr113
- #- ifshort
- #- gomnd
- #- exhaustivestruct
diff --git a/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/errors/LICENSE b/vendor/github.com/go-openapi/errors/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/errors/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/errors/README.md b/vendor/github.com/go-openapi/errors/README.md
deleted file mode 100644
index 6d57ea55c7..0000000000
--- a/vendor/github.com/go-openapi/errors/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-# OpenAPI errors [](https://github.com/go-openapi/errors/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/errors)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/errors)
-[](https://goreportcard.com/report/github.com/go-openapi/errors)
-
-Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit.
diff --git a/vendor/github.com/go-openapi/errors/api.go b/vendor/github.com/go-openapi/errors/api.go
deleted file mode 100644
index d6f507f42d..0000000000
--- a/vendor/github.com/go-openapi/errors/api.go
+++ /dev/null
@@ -1,192 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
- "reflect"
- "strings"
-)
-
-// DefaultHTTPCode is used when the error Code cannot be used as an HTTP code.
-var DefaultHTTPCode = http.StatusUnprocessableEntity
-
-// Error represents a error interface all swagger framework errors implement
-type Error interface {
- error
- Code() int32
-}
-
-type apiError struct {
- code int32
- message string
-}
-
-func (a *apiError) Error() string {
- return a.message
-}
-
-func (a *apiError) Code() int32 {
- return a.code
-}
-
-// MarshalJSON implements the JSON encoding interface
-func (a apiError) MarshalJSON() ([]byte, error) {
- return json.Marshal(map[string]interface{}{
- "code": a.code,
- "message": a.message,
- })
-}
-
-// New creates a new API error with a code and a message
-func New(code int32, message string, args ...interface{}) Error {
- if len(args) > 0 {
- return &apiError{
- code: code,
- message: fmt.Sprintf(message, args...),
- }
- }
- return &apiError{
- code: code,
- message: message,
- }
-}
-
-// NotFound creates a new not found error
-func NotFound(message string, args ...interface{}) Error {
- if message == "" {
- message = "Not found"
- }
- return New(http.StatusNotFound, fmt.Sprintf(message, args...))
-}
-
-// NotImplemented creates a new not implemented error
-func NotImplemented(message string) Error {
- return New(http.StatusNotImplemented, message)
-}
-
-// MethodNotAllowedError represents an error for when the path matches but the method doesn't
-type MethodNotAllowedError struct {
- code int32
- Allowed []string
- message string
-}
-
-func (m *MethodNotAllowedError) Error() string {
- return m.message
-}
-
-// Code the error code
-func (m *MethodNotAllowedError) Code() int32 {
- return m.code
-}
-
-// MarshalJSON implements the JSON encoding interface
-func (m MethodNotAllowedError) MarshalJSON() ([]byte, error) {
- return json.Marshal(map[string]interface{}{
- "code": m.code,
- "message": m.message,
- "allowed": m.Allowed,
- })
-}
-
-func errorAsJSON(err Error) []byte {
- //nolint:errchkjson
- b, _ := json.Marshal(struct {
- Code int32 `json:"code"`
- Message string `json:"message"`
- }{err.Code(), err.Error()})
- return b
-}
-
-func flattenComposite(errs *CompositeError) *CompositeError {
- var res []error
- for _, er := range errs.Errors {
- switch e := er.(type) {
- case *CompositeError:
- if e != nil && len(e.Errors) > 0 {
- flat := flattenComposite(e)
- if len(flat.Errors) > 0 {
- res = append(res, flat.Errors...)
- }
- }
- default:
- if e != nil {
- res = append(res, e)
- }
- }
- }
- return CompositeValidationError(res...)
-}
-
-// MethodNotAllowed creates a new method not allowed error
-func MethodNotAllowed(requested string, allow []string) Error {
- msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ","))
- return &MethodNotAllowedError{
- code: http.StatusMethodNotAllowed,
- Allowed: allow,
- message: msg,
- }
-}
-
-// ServeError implements the http error handler interface
-func ServeError(rw http.ResponseWriter, r *http.Request, err error) {
- rw.Header().Set("Content-Type", "application/json")
- switch e := err.(type) {
- case *CompositeError:
- er := flattenComposite(e)
- // strips composite errors to first element only
- if len(er.Errors) > 0 {
- ServeError(rw, r, er.Errors[0])
- } else {
- // guard against empty CompositeError (invalid construct)
- ServeError(rw, r, nil)
- }
- case *MethodNotAllowedError:
- rw.Header().Add("Allow", strings.Join(e.Allowed, ","))
- rw.WriteHeader(asHTTPCode(int(e.Code())))
- if r == nil || r.Method != http.MethodHead {
- _, _ = rw.Write(errorAsJSON(e))
- }
- case Error:
- value := reflect.ValueOf(e)
- if value.Kind() == reflect.Ptr && value.IsNil() {
- rw.WriteHeader(http.StatusInternalServerError)
- _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
- return
- }
- rw.WriteHeader(asHTTPCode(int(e.Code())))
- if r == nil || r.Method != http.MethodHead {
- _, _ = rw.Write(errorAsJSON(e))
- }
- case nil:
- rw.WriteHeader(http.StatusInternalServerError)
- _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
- default:
- rw.WriteHeader(http.StatusInternalServerError)
- if r == nil || r.Method != http.MethodHead {
- _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error())))
- }
- }
-}
-
-func asHTTPCode(input int) int {
- if input >= maximumValidHTTPCode {
- return DefaultHTTPCode
- }
- return input
-}
diff --git a/vendor/github.com/go-openapi/errors/auth.go b/vendor/github.com/go-openapi/errors/auth.go
deleted file mode 100644
index 0545b501bd..0000000000
--- a/vendor/github.com/go-openapi/errors/auth.go
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import "net/http"
-
-// Unauthenticated returns an unauthenticated error
-func Unauthenticated(scheme string) Error {
- return New(http.StatusUnauthorized, "unauthenticated for %s", scheme)
-}
diff --git a/vendor/github.com/go-openapi/errors/doc.go b/vendor/github.com/go-openapi/errors/doc.go
deleted file mode 100644
index af01190ce6..0000000000
--- a/vendor/github.com/go-openapi/errors/doc.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/*
-Package errors provides an Error interface and several concrete types
-implementing this interface to manage API errors and JSON-schema validation
-errors.
-
-A middleware handler ServeError() is provided to serve the errors types
-it defines.
-
-It is used throughout the various go-openapi toolkit libraries
-(https://github.com/go-openapi).
-*/
-package errors
diff --git a/vendor/github.com/go-openapi/errors/headers.go b/vendor/github.com/go-openapi/errors/headers.go
deleted file mode 100644
index 6ea1151f41..0000000000
--- a/vendor/github.com/go-openapi/errors/headers.go
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
-)
-
-// Validation represents a failure of a precondition
-type Validation struct { //nolint: errname
- code int32
- Name string
- In string
- Value interface{}
- message string
- Values []interface{}
-}
-
-func (e *Validation) Error() string {
- return e.message
-}
-
-// Code the error code
-func (e *Validation) Code() int32 {
- return e.code
-}
-
-// MarshalJSON implements the JSON encoding interface
-func (e Validation) MarshalJSON() ([]byte, error) {
- return json.Marshal(map[string]interface{}{
- "code": e.code,
- "message": e.message,
- "in": e.In,
- "name": e.Name,
- "value": e.Value,
- "values": e.Values,
- })
-}
-
-// ValidateName sets the name for a validation or updates it for a nested property
-func (e *Validation) ValidateName(name string) *Validation {
- if name != "" {
- if e.Name == "" {
- e.Name = name
- e.message = name + e.message
- } else {
- e.Name = name + "." + e.Name
- e.message = name + "." + e.message
- }
- }
- return e
-}
-
-const (
- contentTypeFail = `unsupported media type %q, only %v are allowed`
- responseFormatFail = `unsupported media type requested, only %v are available`
-)
-
-// InvalidContentType error for an invalid content type
-func InvalidContentType(value string, allowed []string) *Validation {
- values := make([]interface{}, 0, len(allowed))
- for _, v := range allowed {
- values = append(values, v)
- }
- return &Validation{
- code: http.StatusUnsupportedMediaType,
- Name: "Content-Type",
- In: "header",
- Value: value,
- Values: values,
- message: fmt.Sprintf(contentTypeFail, value, allowed),
- }
-}
-
-// InvalidResponseFormat error for an unacceptable response format request
-func InvalidResponseFormat(value string, allowed []string) *Validation {
- values := make([]interface{}, 0, len(allowed))
- for _, v := range allowed {
- values = append(values, v)
- }
- return &Validation{
- code: http.StatusNotAcceptable,
- Name: "Accept",
- In: "header",
- Value: value,
- Values: values,
- message: fmt.Sprintf(responseFormatFail, allowed),
- }
-}
diff --git a/vendor/github.com/go-openapi/errors/middleware.go b/vendor/github.com/go-openapi/errors/middleware.go
deleted file mode 100644
index 67f80386a2..0000000000
--- a/vendor/github.com/go-openapi/errors/middleware.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "bytes"
- "fmt"
- "strings"
-)
-
-// APIVerificationFailed is an error that contains all the missing info for a mismatched section
-// between the api registrations and the api spec
-type APIVerificationFailed struct { //nolint: errname
- Section string `json:"section,omitempty"`
- MissingSpecification []string `json:"missingSpecification,omitempty"`
- MissingRegistration []string `json:"missingRegistration,omitempty"`
-}
-
-func (v *APIVerificationFailed) Error() string {
- buf := bytes.NewBuffer(nil)
-
- hasRegMissing := len(v.MissingRegistration) > 0
- hasSpecMissing := len(v.MissingSpecification) > 0
-
- if hasRegMissing {
- buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section))
- }
-
- if hasRegMissing && hasSpecMissing {
- buf.WriteString("\n")
- }
-
- if hasSpecMissing {
- buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section))
- }
-
- return buf.String()
-}
diff --git a/vendor/github.com/go-openapi/errors/parsing.go b/vendor/github.com/go-openapi/errors/parsing.go
deleted file mode 100644
index ce1ef9cb67..0000000000
--- a/vendor/github.com/go-openapi/errors/parsing.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
-)
-
-// ParseError represents a parsing error
-type ParseError struct {
- code int32
- Name string
- In string
- Value string
- Reason error
- message string
-}
-
-func (e *ParseError) Error() string {
- return e.message
-}
-
-// Code returns the http status code for this error
-func (e *ParseError) Code() int32 {
- return e.code
-}
-
-// MarshalJSON implements the JSON encoding interface
-func (e ParseError) MarshalJSON() ([]byte, error) {
- var reason string
- if e.Reason != nil {
- reason = e.Reason.Error()
- }
- return json.Marshal(map[string]interface{}{
- "code": e.code,
- "message": e.message,
- "in": e.In,
- "name": e.Name,
- "value": e.Value,
- "reason": reason,
- })
-}
-
-const (
- parseErrorTemplContent = `parsing %s %s from %q failed, because %s`
- parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s`
-)
-
-// NewParseError creates a new parse error
-func NewParseError(name, in, value string, reason error) *ParseError {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason)
- } else {
- msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason)
- }
- return &ParseError{
- code: http.StatusBadRequest,
- Name: name,
- In: in,
- Value: value,
- Reason: reason,
- message: msg,
- }
-}
diff --git a/vendor/github.com/go-openapi/errors/schema.go b/vendor/github.com/go-openapi/errors/schema.go
deleted file mode 100644
index 8f3239dfd9..0000000000
--- a/vendor/github.com/go-openapi/errors/schema.go
+++ /dev/null
@@ -1,619 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
- "strings"
-)
-
-const (
- invalidType = "%s is an invalid type name"
- typeFail = "%s in %s must be of type %s"
- typeFailWithData = "%s in %s must be of type %s: %q"
- typeFailWithError = "%s in %s must be of type %s, because: %s"
- requiredFail = "%s in %s is required"
- readOnlyFail = "%s in %s is readOnly"
- tooLongMessage = "%s in %s should be at most %d chars long"
- tooShortMessage = "%s in %s should be at least %d chars long"
- patternFail = "%s in %s should match '%s'"
- enumFail = "%s in %s should be one of %v"
- multipleOfFail = "%s in %s should be a multiple of %v"
- maximumIncFail = "%s in %s should be less than or equal to %v"
- maximumExcFail = "%s in %s should be less than %v"
- minIncFail = "%s in %s should be greater than or equal to %v"
- minExcFail = "%s in %s should be greater than %v"
- uniqueFail = "%s in %s shouldn't contain duplicates"
- maximumItemsFail = "%s in %s should have at most %d items"
- minItemsFail = "%s in %s should have at least %d items"
- typeFailNoIn = "%s must be of type %s"
- typeFailWithDataNoIn = "%s must be of type %s: %q"
- typeFailWithErrorNoIn = "%s must be of type %s, because: %s"
- requiredFailNoIn = "%s is required"
- readOnlyFailNoIn = "%s is readOnly"
- tooLongMessageNoIn = "%s should be at most %d chars long"
- tooShortMessageNoIn = "%s should be at least %d chars long"
- patternFailNoIn = "%s should match '%s'"
- enumFailNoIn = "%s should be one of %v"
- multipleOfFailNoIn = "%s should be a multiple of %v"
- maximumIncFailNoIn = "%s should be less than or equal to %v"
- maximumExcFailNoIn = "%s should be less than %v"
- minIncFailNoIn = "%s should be greater than or equal to %v"
- minExcFailNoIn = "%s should be greater than %v"
- uniqueFailNoIn = "%s shouldn't contain duplicates"
- maximumItemsFailNoIn = "%s should have at most %d items"
- minItemsFailNoIn = "%s should have at least %d items"
- noAdditionalItems = "%s in %s can't have additional items"
- noAdditionalItemsNoIn = "%s can't have additional items"
- tooFewProperties = "%s in %s should have at least %d properties"
- tooFewPropertiesNoIn = "%s should have at least %d properties"
- tooManyProperties = "%s in %s should have at most %d properties"
- tooManyPropertiesNoIn = "%s should have at most %d properties"
- unallowedProperty = "%s.%s in %s is a forbidden property"
- unallowedPropertyNoIn = "%s.%s is a forbidden property"
- failedAllPatternProps = "%s.%s in %s failed all pattern properties"
- failedAllPatternPropsNoIn = "%s.%s failed all pattern properties"
- multipleOfMustBePositive = "factor MultipleOf declared for %s must be positive: %v"
-)
-
-const maximumValidHTTPCode = 600
-
-// All code responses can be used to differentiate errors for different handling
-// by the consuming program
-const (
- // CompositeErrorCode remains 422 for backwards-compatibility
- // and to separate it from validation errors with cause
- CompositeErrorCode = http.StatusUnprocessableEntity
-
- // InvalidTypeCode is used for any subclass of invalid types
- InvalidTypeCode = maximumValidHTTPCode + iota
- RequiredFailCode
- TooLongFailCode
- TooShortFailCode
- PatternFailCode
- EnumFailCode
- MultipleOfFailCode
- MaxFailCode
- MinFailCode
- UniqueFailCode
- MaxItemsFailCode
- MinItemsFailCode
- NoAdditionalItemsCode
- TooFewPropertiesCode
- TooManyPropertiesCode
- UnallowedPropertyCode
- FailedAllPatternPropsCode
- MultipleOfMustBePositiveCode
- ReadOnlyFailCode
-)
-
-// CompositeError is an error that groups several errors together
-type CompositeError struct {
- Errors []error
- code int32
- message string
-}
-
-// Code for this error
-func (c *CompositeError) Code() int32 {
- return c.code
-}
-
-func (c *CompositeError) Error() string {
- if len(c.Errors) > 0 {
- msgs := []string{c.message + ":"}
- for _, e := range c.Errors {
- msgs = append(msgs, e.Error())
- }
- return strings.Join(msgs, "\n")
- }
- return c.message
-}
-
-func (c *CompositeError) Unwrap() []error {
- return c.Errors
-}
-
-// MarshalJSON implements the JSON encoding interface
-func (c CompositeError) MarshalJSON() ([]byte, error) {
- return json.Marshal(map[string]interface{}{
- "code": c.code,
- "message": c.message,
- "errors": c.Errors,
- })
-}
-
-// CompositeValidationError an error to wrap a bunch of other errors
-func CompositeValidationError(errors ...error) *CompositeError {
- return &CompositeError{
- code: CompositeErrorCode,
- Errors: append(make([]error, 0, len(errors)), errors...),
- message: "validation failure list",
- }
-}
-
-// ValidateName recursively sets the name for all validations or updates them for nested properties
-func (c *CompositeError) ValidateName(name string) *CompositeError {
- for i, e := range c.Errors {
- if ve, ok := e.(*Validation); ok {
- c.Errors[i] = ve.ValidateName(name)
- } else if ce, ok := e.(*CompositeError); ok {
- c.Errors[i] = ce.ValidateName(name)
- }
- }
-
- return c
-}
-
-// FailedAllPatternProperties an error for when the property doesn't match a pattern
-func FailedAllPatternProperties(name, in, key string) *Validation {
- msg := fmt.Sprintf(failedAllPatternProps, name, key, in)
- if in == "" {
- msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key)
- }
- return &Validation{
- code: FailedAllPatternPropsCode,
- Name: name,
- In: in,
- Value: key,
- message: msg,
- }
-}
-
-// PropertyNotAllowed an error for when the property doesn't match a pattern
-func PropertyNotAllowed(name, in, key string) *Validation {
- msg := fmt.Sprintf(unallowedProperty, name, key, in)
- if in == "" {
- msg = fmt.Sprintf(unallowedPropertyNoIn, name, key)
- }
- return &Validation{
- code: UnallowedPropertyCode,
- Name: name,
- In: in,
- Value: key,
- message: msg,
- }
-}
-
-// TooFewProperties an error for an object with too few properties
-func TooFewProperties(name, in string, n int64) *Validation {
- msg := fmt.Sprintf(tooFewProperties, name, in, n)
- if in == "" {
- msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n)
- }
- return &Validation{
- code: TooFewPropertiesCode,
- Name: name,
- In: in,
- Value: n,
- message: msg,
- }
-}
-
-// TooManyProperties an error for an object with too many properties
-func TooManyProperties(name, in string, n int64) *Validation {
- msg := fmt.Sprintf(tooManyProperties, name, in, n)
- if in == "" {
- msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n)
- }
- return &Validation{
- code: TooManyPropertiesCode,
- Name: name,
- In: in,
- Value: n,
- message: msg,
- }
-}
-
-// AdditionalItemsNotAllowed an error for invalid additional items
-func AdditionalItemsNotAllowed(name, in string) *Validation {
- msg := fmt.Sprintf(noAdditionalItems, name, in)
- if in == "" {
- msg = fmt.Sprintf(noAdditionalItemsNoIn, name)
- }
- return &Validation{
- code: NoAdditionalItemsCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// InvalidCollectionFormat another flavor of invalid type error
-func InvalidCollectionFormat(name, in, format string) *Validation {
- return &Validation{
- code: InvalidTypeCode,
- Name: name,
- In: in,
- Value: format,
- message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name),
- }
-}
-
-// InvalidTypeName an error for when the type is invalid
-func InvalidTypeName(typeName string) *Validation {
- return &Validation{
- code: InvalidTypeCode,
- Value: typeName,
- message: fmt.Sprintf(invalidType, typeName),
- }
-}
-
-// InvalidType creates an error for when the type is invalid
-func InvalidType(name, in, typeName string, value interface{}) *Validation {
- var message string
-
- if in != "" {
- switch value.(type) {
- case string:
- message = fmt.Sprintf(typeFailWithData, name, in, typeName, value)
- case error:
- message = fmt.Sprintf(typeFailWithError, name, in, typeName, value)
- default:
- message = fmt.Sprintf(typeFail, name, in, typeName)
- }
- } else {
- switch value.(type) {
- case string:
- message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value)
- case error:
- message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value)
- default:
- message = fmt.Sprintf(typeFailNoIn, name, typeName)
- }
- }
-
- return &Validation{
- code: InvalidTypeCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-
-}
-
-// DuplicateItems error for when an array contains duplicates
-func DuplicateItems(name, in string) *Validation {
- msg := fmt.Sprintf(uniqueFail, name, in)
- if in == "" {
- msg = fmt.Sprintf(uniqueFailNoIn, name)
- }
- return &Validation{
- code: UniqueFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// TooManyItems error for when an array contains too many items
-func TooManyItems(name, in string, maximum int64, value interface{}) *Validation {
- msg := fmt.Sprintf(maximumItemsFail, name, in, maximum)
- if in == "" {
- msg = fmt.Sprintf(maximumItemsFailNoIn, name, maximum)
- }
-
- return &Validation{
- code: MaxItemsFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// TooFewItems error for when an array contains too few items
-func TooFewItems(name, in string, minimum int64, value interface{}) *Validation {
- msg := fmt.Sprintf(minItemsFail, name, in, minimum)
- if in == "" {
- msg = fmt.Sprintf(minItemsFailNoIn, name, minimum)
- }
- return &Validation{
- code: MinItemsFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// ExceedsMaximumInt error for when maximumimum validation fails
-func ExceedsMaximumInt(name, in string, maximum int64, exclusive bool, value interface{}) *Validation {
- var message string
- if in == "" {
- m := maximumIncFailNoIn
- if exclusive {
- m = maximumExcFailNoIn
- }
- message = fmt.Sprintf(m, name, maximum)
- } else {
- m := maximumIncFail
- if exclusive {
- m = maximumExcFail
- }
- message = fmt.Sprintf(m, name, in, maximum)
- }
- return &Validation{
- code: MaxFailCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-}
-
-// ExceedsMaximumUint error for when maximumimum validation fails
-func ExceedsMaximumUint(name, in string, maximum uint64, exclusive bool, value interface{}) *Validation {
- var message string
- if in == "" {
- m := maximumIncFailNoIn
- if exclusive {
- m = maximumExcFailNoIn
- }
- message = fmt.Sprintf(m, name, maximum)
- } else {
- m := maximumIncFail
- if exclusive {
- m = maximumExcFail
- }
- message = fmt.Sprintf(m, name, in, maximum)
- }
- return &Validation{
- code: MaxFailCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-}
-
-// ExceedsMaximum error for when maximumimum validation fails
-func ExceedsMaximum(name, in string, maximum float64, exclusive bool, value interface{}) *Validation {
- var message string
- if in == "" {
- m := maximumIncFailNoIn
- if exclusive {
- m = maximumExcFailNoIn
- }
- message = fmt.Sprintf(m, name, maximum)
- } else {
- m := maximumIncFail
- if exclusive {
- m = maximumExcFail
- }
- message = fmt.Sprintf(m, name, in, maximum)
- }
- return &Validation{
- code: MaxFailCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-}
-
-// ExceedsMinimumInt error for when minimum validation fails
-func ExceedsMinimumInt(name, in string, minimum int64, exclusive bool, value interface{}) *Validation {
- var message string
- if in == "" {
- m := minIncFailNoIn
- if exclusive {
- m = minExcFailNoIn
- }
- message = fmt.Sprintf(m, name, minimum)
- } else {
- m := minIncFail
- if exclusive {
- m = minExcFail
- }
- message = fmt.Sprintf(m, name, in, minimum)
- }
- return &Validation{
- code: MinFailCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-}
-
-// ExceedsMinimumUint error for when minimum validation fails
-func ExceedsMinimumUint(name, in string, minimum uint64, exclusive bool, value interface{}) *Validation {
- var message string
- if in == "" {
- m := minIncFailNoIn
- if exclusive {
- m = minExcFailNoIn
- }
- message = fmt.Sprintf(m, name, minimum)
- } else {
- m := minIncFail
- if exclusive {
- m = minExcFail
- }
- message = fmt.Sprintf(m, name, in, minimum)
- }
- return &Validation{
- code: MinFailCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-}
-
-// ExceedsMinimum error for when minimum validation fails
-func ExceedsMinimum(name, in string, minimum float64, exclusive bool, value interface{}) *Validation {
- var message string
- if in == "" {
- m := minIncFailNoIn
- if exclusive {
- m = minExcFailNoIn
- }
- message = fmt.Sprintf(m, name, minimum)
- } else {
- m := minIncFail
- if exclusive {
- m = minExcFail
- }
- message = fmt.Sprintf(m, name, in, minimum)
- }
- return &Validation{
- code: MinFailCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-}
-
-// NotMultipleOf error for when multiple of validation fails
-func NotMultipleOf(name, in string, multiple, value interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple)
- } else {
- msg = fmt.Sprintf(multipleOfFail, name, in, multiple)
- }
- return &Validation{
- code: MultipleOfFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// EnumFail error for when an enum validation fails
-func EnumFail(name, in string, value interface{}, values []interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(enumFailNoIn, name, values)
- } else {
- msg = fmt.Sprintf(enumFail, name, in, values)
- }
-
- return &Validation{
- code: EnumFailCode,
- Name: name,
- In: in,
- Value: value,
- Values: values,
- message: msg,
- }
-}
-
-// Required error for when a value is missing
-func Required(name, in string, value interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(requiredFailNoIn, name)
- } else {
- msg = fmt.Sprintf(requiredFail, name, in)
- }
- return &Validation{
- code: RequiredFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// ReadOnly error for when a value is present in request
-func ReadOnly(name, in string, value interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(readOnlyFailNoIn, name)
- } else {
- msg = fmt.Sprintf(readOnlyFail, name, in)
- }
- return &Validation{
- code: ReadOnlyFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// TooLong error for when a string is too long
-func TooLong(name, in string, maximum int64, value interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(tooLongMessageNoIn, name, maximum)
- } else {
- msg = fmt.Sprintf(tooLongMessage, name, in, maximum)
- }
- return &Validation{
- code: TooLongFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// TooShort error for when a string is too short
-func TooShort(name, in string, minimum int64, value interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(tooShortMessageNoIn, name, minimum)
- } else {
- msg = fmt.Sprintf(tooShortMessage, name, in, minimum)
- }
-
- return &Validation{
- code: TooShortFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// FailedPattern error for when a string fails a regex pattern match
-// the pattern that is returned is the ECMA syntax version of the pattern not the golang version.
-func FailedPattern(name, in, pattern string, value interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(patternFailNoIn, name, pattern)
- } else {
- msg = fmt.Sprintf(patternFail, name, in, pattern)
- }
-
- return &Validation{
- code: PatternFailCode,
- Name: name,
- In: in,
- Value: value,
- message: msg,
- }
-}
-
-// MultipleOfMustBePositive error for when a
-// multipleOf factor is negative
-func MultipleOfMustBePositive(name, in string, factor interface{}) *Validation {
- return &Validation{
- code: MultipleOfMustBePositiveCode,
- Name: name,
- In: in,
- Value: factor,
- message: fmt.Sprintf(multipleOfMustBePositive, name, factor),
- }
-}
diff --git a/vendor/github.com/go-openapi/jsonpointer/.editorconfig b/vendor/github.com/go-openapi/jsonpointer/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/jsonpointer/.gitignore b/vendor/github.com/go-openapi/jsonpointer/.gitignore
deleted file mode 100644
index 769c244007..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-secrets.yml
diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml
deleted file mode 100644
index d2fafb8a2b..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-linters-settings:
- gocyclo:
- min-complexity: 45
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - recvcheck
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- #- deadcode
- #- interfacer
- #- scopelint
- #- varcheck
- #- structcheck
- #- golint
- #- nosnakecase
- #- maligned
- #- goerr113
- #- ifshort
- #- gomnd
- #- exhaustivestruct
diff --git a/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/jsonpointer/LICENSE b/vendor/github.com/go-openapi/jsonpointer/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/jsonpointer/README.md b/vendor/github.com/go-openapi/jsonpointer/README.md
deleted file mode 100644
index 0108f1d572..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/README.md
+++ /dev/null
@@ -1,19 +0,0 @@
-# gojsonpointer [](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/jsonpointer)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/jsonpointer)
-[](https://goreportcard.com/report/github.com/go-openapi/jsonpointer)
-
-An implementation of JSON Pointer - Go language
-
-## Status
-Completed YES
-
-Tested YES
-
-## References
-http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
-
-### Note
-The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented.
diff --git a/vendor/github.com/go-openapi/jsonpointer/errors.go b/vendor/github.com/go-openapi/jsonpointer/errors.go
deleted file mode 100644
index b84343d9d7..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/errors.go
+++ /dev/null
@@ -1,18 +0,0 @@
-package jsonpointer
-
-type pointerError string
-
-func (e pointerError) Error() string {
- return string(e)
-}
-
-const (
- // ErrPointer is an error raised by the jsonpointer package
- ErrPointer pointerError = "JSON pointer error"
-
- // ErrInvalidStart states that a JSON pointer must start with a separator ("/")
- ErrInvalidStart pointerError = `JSON pointer must be empty or start with a "` + pointerSeparator
-
- // ErrUnsupportedValueType indicates that a value of the wrong type is being set
- ErrUnsupportedValueType pointerError = "only structs, pointers, maps and slices are supported for setting values"
-)
diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go
deleted file mode 100644
index a08cd68ac0..0000000000
--- a/vendor/github.com/go-openapi/jsonpointer/pointer.go
+++ /dev/null
@@ -1,530 +0,0 @@
-// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author sigu-399
-// author-github https://github.com/sigu-399
-// author-mail sigu.399@gmail.com
-//
-// repository-name jsonpointer
-// repository-desc An implementation of JSON Pointer - Go language
-//
-// description Main and unique file.
-//
-// created 25-02-2013
-
-package jsonpointer
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "reflect"
- "strconv"
- "strings"
-
- "github.com/go-openapi/swag"
-)
-
-const (
- emptyPointer = ``
- pointerSeparator = `/`
-)
-
-var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem()
-var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem()
-
-// JSONPointable is an interface for structs to implement when they need to customize the
-// json pointer process
-type JSONPointable interface {
- JSONLookup(string) (any, error)
-}
-
-// JSONSetable is an interface for structs to implement when they need to customize the
-// json pointer process
-type JSONSetable interface {
- JSONSet(string, any) error
-}
-
-// New creates a new json pointer for the given string
-func New(jsonPointerString string) (Pointer, error) {
-
- var p Pointer
- err := p.parse(jsonPointerString)
- return p, err
-
-}
-
-// Pointer the json pointer reprsentation
-type Pointer struct {
- referenceTokens []string
-}
-
-// "Constructor", parses the given string JSON pointer
-func (p *Pointer) parse(jsonPointerString string) error {
-
- var err error
-
- if jsonPointerString != emptyPointer {
- if !strings.HasPrefix(jsonPointerString, pointerSeparator) {
- err = errors.Join(ErrInvalidStart, ErrPointer)
- } else {
- referenceTokens := strings.Split(jsonPointerString, pointerSeparator)
- p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...)
- }
- }
-
- return err
-}
-
-// Get uses the pointer to retrieve a value from a JSON document
-func (p *Pointer) Get(document any) (any, reflect.Kind, error) {
- return p.get(document, swag.DefaultJSONNameProvider)
-}
-
-// Set uses the pointer to set a value from a JSON document
-func (p *Pointer) Set(document any, value any) (any, error) {
- return document, p.set(document, value, swag.DefaultJSONNameProvider)
-}
-
-// GetForToken gets a value for a json pointer token 1 level deep
-func GetForToken(document any, decodedToken string) (any, reflect.Kind, error) {
- return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider)
-}
-
-// SetForToken gets a value for a json pointer token 1 level deep
-func SetForToken(document any, decodedToken string, value any) (any, error) {
- return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider)
-}
-
-func isNil(input any) bool {
- if input == nil {
- return true
- }
-
- kind := reflect.TypeOf(input).Kind()
- switch kind { //nolint:exhaustive
- case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan:
- return reflect.ValueOf(input).IsNil()
- default:
- return false
- }
-}
-
-func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvider) (any, reflect.Kind, error) {
- rValue := reflect.Indirect(reflect.ValueOf(node))
- kind := rValue.Kind()
- if isNil(node) {
- return nil, kind, fmt.Errorf("nil value has no field %q: %w", decodedToken, ErrPointer)
- }
-
- switch typed := node.(type) {
- case JSONPointable:
- r, err := typed.JSONLookup(decodedToken)
- if err != nil {
- return nil, kind, err
- }
- return r, kind, nil
- case *any: // case of a pointer to interface, that is not resolved by reflect.Indirect
- return getSingleImpl(*typed, decodedToken, nameProvider)
- }
-
- switch kind { //nolint:exhaustive
- case reflect.Struct:
- nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
- if !ok {
- return nil, kind, fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer)
- }
- fld := rValue.FieldByName(nm)
- return fld.Interface(), kind, nil
-
- case reflect.Map:
- kv := reflect.ValueOf(decodedToken)
- mv := rValue.MapIndex(kv)
-
- if mv.IsValid() {
- return mv.Interface(), kind, nil
- }
- return nil, kind, fmt.Errorf("object has no key %q: %w", decodedToken, ErrPointer)
-
- case reflect.Slice:
- tokenIndex, err := strconv.Atoi(decodedToken)
- if err != nil {
- return nil, kind, err
- }
- sLength := rValue.Len()
- if tokenIndex < 0 || tokenIndex >= sLength {
- return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", sLength-1, tokenIndex, ErrPointer)
- }
-
- elem := rValue.Index(tokenIndex)
- return elem.Interface(), kind, nil
-
- default:
- return nil, kind, fmt.Errorf("invalid token reference %q: %w", decodedToken, ErrPointer)
- }
-
-}
-
-func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error {
- rValue := reflect.Indirect(reflect.ValueOf(node))
-
- if ns, ok := node.(JSONSetable); ok { // pointer impl
- return ns.JSONSet(decodedToken, data)
- }
-
- if rValue.Type().Implements(jsonSetableType) {
- return node.(JSONSetable).JSONSet(decodedToken, data)
- }
-
- switch rValue.Kind() { //nolint:exhaustive
- case reflect.Struct:
- nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
- if !ok {
- return fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer)
- }
- fld := rValue.FieldByName(nm)
- if fld.IsValid() {
- fld.Set(reflect.ValueOf(data))
- }
- return nil
-
- case reflect.Map:
- kv := reflect.ValueOf(decodedToken)
- rValue.SetMapIndex(kv, reflect.ValueOf(data))
- return nil
-
- case reflect.Slice:
- tokenIndex, err := strconv.Atoi(decodedToken)
- if err != nil {
- return err
- }
- sLength := rValue.Len()
- if tokenIndex < 0 || tokenIndex >= sLength {
- return fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", sLength, tokenIndex, ErrPointer)
- }
-
- elem := rValue.Index(tokenIndex)
- if !elem.CanSet() {
- return fmt.Errorf("can't set slice index %s to %v: %w", decodedToken, data, ErrPointer)
- }
- elem.Set(reflect.ValueOf(data))
- return nil
-
- default:
- return fmt.Errorf("invalid token reference %q: %w", decodedToken, ErrPointer)
- }
-
-}
-
-func (p *Pointer) get(node any, nameProvider *swag.NameProvider) (any, reflect.Kind, error) {
-
- if nameProvider == nil {
- nameProvider = swag.DefaultJSONNameProvider
- }
-
- kind := reflect.Invalid
-
- // Full document when empty
- if len(p.referenceTokens) == 0 {
- return node, kind, nil
- }
-
- for _, token := range p.referenceTokens {
- decodedToken := Unescape(token)
-
- r, knd, err := getSingleImpl(node, decodedToken, nameProvider)
- if err != nil {
- return nil, knd, err
- }
- node = r
- }
-
- rValue := reflect.ValueOf(node)
- kind = rValue.Kind()
-
- return node, kind, nil
-}
-
-func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error {
- knd := reflect.ValueOf(node).Kind()
-
- if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array {
- return errors.Join(
- ErrUnsupportedValueType,
- ErrPointer,
- )
- }
-
- if nameProvider == nil {
- nameProvider = swag.DefaultJSONNameProvider
- }
-
- // Full document when empty
- if len(p.referenceTokens) == 0 {
- return nil
- }
-
- lastI := len(p.referenceTokens) - 1
- for i, token := range p.referenceTokens {
- isLastToken := i == lastI
- decodedToken := Unescape(token)
-
- if isLastToken {
-
- return setSingleImpl(node, data, decodedToken, nameProvider)
- }
-
- rValue := reflect.Indirect(reflect.ValueOf(node))
- kind := rValue.Kind()
-
- if rValue.Type().Implements(jsonPointableType) {
- r, err := node.(JSONPointable).JSONLookup(decodedToken)
- if err != nil {
- return err
- }
- fld := reflect.ValueOf(r)
- if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
- node = fld.Addr().Interface()
- continue
- }
- node = r
- continue
- }
-
- switch kind { //nolint:exhaustive
- case reflect.Struct:
- nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
- if !ok {
- return fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer)
- }
- fld := rValue.FieldByName(nm)
- if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
- node = fld.Addr().Interface()
- continue
- }
- node = fld.Interface()
-
- case reflect.Map:
- kv := reflect.ValueOf(decodedToken)
- mv := rValue.MapIndex(kv)
-
- if !mv.IsValid() {
- return fmt.Errorf("object has no key %q: %w", decodedToken, ErrPointer)
- }
- if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr {
- node = mv.Addr().Interface()
- continue
- }
- node = mv.Interface()
-
- case reflect.Slice:
- tokenIndex, err := strconv.Atoi(decodedToken)
- if err != nil {
- return err
- }
- sLength := rValue.Len()
- if tokenIndex < 0 || tokenIndex >= sLength {
- return fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", sLength, tokenIndex, ErrPointer)
- }
-
- elem := rValue.Index(tokenIndex)
- if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr {
- node = elem.Addr().Interface()
- continue
- }
- node = elem.Interface()
-
- default:
- return fmt.Errorf("invalid token reference %q: %w", decodedToken, ErrPointer)
- }
-
- }
-
- return nil
-}
-
-// DecodedTokens returns the decoded tokens
-func (p *Pointer) DecodedTokens() []string {
- result := make([]string, 0, len(p.referenceTokens))
- for _, t := range p.referenceTokens {
- result = append(result, Unescape(t))
- }
- return result
-}
-
-// IsEmpty returns true if this is an empty json pointer
-// this indicates that it points to the root document
-func (p *Pointer) IsEmpty() bool {
- return len(p.referenceTokens) == 0
-}
-
-// Pointer to string representation function
-func (p *Pointer) String() string {
-
- if len(p.referenceTokens) == 0 {
- return emptyPointer
- }
-
- pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator)
-
- return pointerString
-}
-
-func (p *Pointer) Offset(document string) (int64, error) {
- dec := json.NewDecoder(strings.NewReader(document))
- var offset int64
- for _, ttk := range p.DecodedTokens() {
- tk, err := dec.Token()
- if err != nil {
- return 0, err
- }
- switch tk := tk.(type) {
- case json.Delim:
- switch tk {
- case '{':
- offset, err = offsetSingleObject(dec, ttk)
- if err != nil {
- return 0, err
- }
- case '[':
- offset, err = offsetSingleArray(dec, ttk)
- if err != nil {
- return 0, err
- }
- default:
- return 0, fmt.Errorf("invalid token %#v: %w", tk, ErrPointer)
- }
- default:
- return 0, fmt.Errorf("invalid token %#v: %w", tk, ErrPointer)
- }
- }
- return offset, nil
-}
-
-func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) {
- for dec.More() {
- offset := dec.InputOffset()
- tk, err := dec.Token()
- if err != nil {
- return 0, err
- }
- switch tk := tk.(type) {
- case json.Delim:
- switch tk {
- case '{':
- if err = drainSingle(dec); err != nil {
- return 0, err
- }
- case '[':
- if err = drainSingle(dec); err != nil {
- return 0, err
- }
- }
- case string:
- if tk == decodedToken {
- return offset, nil
- }
- default:
- return 0, fmt.Errorf("invalid token %#v: %w", tk, ErrPointer)
- }
- }
- return 0, fmt.Errorf("token reference %q not found: %w", decodedToken, ErrPointer)
-}
-
-func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) {
- idx, err := strconv.Atoi(decodedToken)
- if err != nil {
- return 0, fmt.Errorf("token reference %q is not a number: %v: %w", decodedToken, err, ErrPointer)
- }
- var i int
- for i = 0; i < idx && dec.More(); i++ {
- tk, err := dec.Token()
- if err != nil {
- return 0, err
- }
-
- if delim, isDelim := tk.(json.Delim); isDelim {
- switch delim {
- case '{':
- if err = drainSingle(dec); err != nil {
- return 0, err
- }
- case '[':
- if err = drainSingle(dec); err != nil {
- return 0, err
- }
- }
- }
- }
-
- if !dec.More() {
- return 0, fmt.Errorf("token reference %q not found: %w", decodedToken, ErrPointer)
- }
- return dec.InputOffset(), nil
-}
-
-// drainSingle drains a single level of object or array.
-// The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed.
-func drainSingle(dec *json.Decoder) error {
- for dec.More() {
- tk, err := dec.Token()
- if err != nil {
- return err
- }
- if delim, isDelim := tk.(json.Delim); isDelim {
- switch delim {
- case '{':
- if err = drainSingle(dec); err != nil {
- return err
- }
- case '[':
- if err = drainSingle(dec); err != nil {
- return err
- }
- }
- }
- }
-
- // Consumes the ending delim
- if _, err := dec.Token(); err != nil {
- return err
- }
- return nil
-}
-
-// Specific JSON pointer encoding here
-// ~0 => ~
-// ~1 => /
-// ... and vice versa
-
-const (
- encRefTok0 = `~0`
- encRefTok1 = `~1`
- decRefTok0 = `~`
- decRefTok1 = `/`
-)
-
-// Unescape unescapes a json pointer reference token string to the original representation
-func Unescape(token string) string {
- step1 := strings.ReplaceAll(token, encRefTok1, decRefTok1)
- step2 := strings.ReplaceAll(step1, encRefTok0, decRefTok0)
- return step2
-}
-
-// Escape escapes a pointer reference token string
-func Escape(token string) string {
- step1 := strings.ReplaceAll(token, decRefTok0, encRefTok0)
- step2 := strings.ReplaceAll(step1, decRefTok1, encRefTok1)
- return step2
-}
diff --git a/vendor/github.com/go-openapi/jsonreference/.gitignore b/vendor/github.com/go-openapi/jsonreference/.gitignore
deleted file mode 100644
index 769c244007..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-secrets.yml
diff --git a/vendor/github.com/go-openapi/jsonreference/.golangci.yml b/vendor/github.com/go-openapi/jsonreference/.golangci.yml
deleted file mode 100644
index 22f8d21cca..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/.golangci.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-linters-settings:
- govet:
- check-shadowing: true
- golint:
- min-confidence: 0
- gocyclo:
- min-complexity: 45
- maligned:
- suggest-new: true
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - maligned
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - gomnd
- - exhaustivestruct
- - goerr113
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - ifshort
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- - deadcode
- - interfacer
- - scopelint
- - varcheck
- - structcheck
- - golint
- - nosnakecase
diff --git a/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/jsonreference/LICENSE b/vendor/github.com/go-openapi/jsonreference/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/jsonreference/README.md b/vendor/github.com/go-openapi/jsonreference/README.md
deleted file mode 100644
index c7fc2049c1..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/README.md
+++ /dev/null
@@ -1,19 +0,0 @@
-# gojsonreference [](https://github.com/go-openapi/jsonreference/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/jsonreference)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/jsonreference)
-[](https://goreportcard.com/report/github.com/go-openapi/jsonreference)
-
-An implementation of JSON Reference - Go language
-
-## Status
-Feature complete. Stable API
-
-## Dependencies
-* https://github.com/go-openapi/jsonpointer
-
-## References
-
-* http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
-* http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03
diff --git a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go
deleted file mode 100644
index f0610cf1e5..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package internal
-
-import (
- "net/url"
- "regexp"
- "strings"
-)
-
-const (
- defaultHTTPPort = ":80"
- defaultHTTPSPort = ":443"
-)
-
-// Regular expressions used by the normalizations
-var rxPort = regexp.MustCompile(`(:\d+)/?$`)
-var rxDupSlashes = regexp.MustCompile(`/{2,}`)
-
-// NormalizeURL will normalize the specified URL
-// This was added to replace a previous call to the no longer maintained purell library:
-// The call that was used looked like the following:
-//
-// url.Parse(purell.NormalizeURL(parsed, purell.FlagsSafe|purell.FlagRemoveDuplicateSlashes))
-//
-// To explain all that was included in the call above, purell.FlagsSafe was really just the following:
-// - FlagLowercaseScheme
-// - FlagLowercaseHost
-// - FlagRemoveDefaultPort
-// - FlagRemoveDuplicateSlashes (and this was mixed in with the |)
-//
-// This also normalizes the URL into its urlencoded form by removing RawPath and RawFragment.
-func NormalizeURL(u *url.URL) {
- lowercaseScheme(u)
- lowercaseHost(u)
- removeDefaultPort(u)
- removeDuplicateSlashes(u)
-
- u.RawPath = ""
- u.RawFragment = ""
-}
-
-func lowercaseScheme(u *url.URL) {
- if len(u.Scheme) > 0 {
- u.Scheme = strings.ToLower(u.Scheme)
- }
-}
-
-func lowercaseHost(u *url.URL) {
- if len(u.Host) > 0 {
- u.Host = strings.ToLower(u.Host)
- }
-}
-
-func removeDefaultPort(u *url.URL) {
- if len(u.Host) > 0 {
- scheme := strings.ToLower(u.Scheme)
- u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string {
- if (scheme == "http" && val == defaultHTTPPort) || (scheme == "https" && val == defaultHTTPSPort) {
- return ""
- }
- return val
- })
- }
-}
-
-func removeDuplicateSlashes(u *url.URL) {
- if len(u.Path) > 0 {
- u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/")
- }
-}
diff --git a/vendor/github.com/go-openapi/jsonreference/reference.go b/vendor/github.com/go-openapi/jsonreference/reference.go
deleted file mode 100644
index cfdef03e5d..0000000000
--- a/vendor/github.com/go-openapi/jsonreference/reference.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// author sigu-399
-// author-github https://github.com/sigu-399
-// author-mail sigu.399@gmail.com
-//
-// repository-name jsonreference
-// repository-desc An implementation of JSON Reference - Go language
-//
-// description Main and unique file.
-//
-// created 26-02-2013
-
-package jsonreference
-
-import (
- "errors"
- "net/url"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/jsonreference/internal"
-)
-
-const (
- fragmentRune = `#`
-)
-
-// New creates a new reference for the given string
-func New(jsonReferenceString string) (Ref, error) {
-
- var r Ref
- err := r.parse(jsonReferenceString)
- return r, err
-
-}
-
-// MustCreateRef parses the ref string and panics when it's invalid.
-// Use the New method for a version that returns an error
-func MustCreateRef(ref string) Ref {
- r, err := New(ref)
- if err != nil {
- panic(err)
- }
- return r
-}
-
-// Ref represents a json reference object
-type Ref struct {
- referenceURL *url.URL
- referencePointer jsonpointer.Pointer
-
- HasFullURL bool
- HasURLPathOnly bool
- HasFragmentOnly bool
- HasFileScheme bool
- HasFullFilePath bool
-}
-
-// GetURL gets the URL for this reference
-func (r *Ref) GetURL() *url.URL {
- return r.referenceURL
-}
-
-// GetPointer gets the json pointer for this reference
-func (r *Ref) GetPointer() *jsonpointer.Pointer {
- return &r.referencePointer
-}
-
-// String returns the best version of the url for this reference
-func (r *Ref) String() string {
-
- if r.referenceURL != nil {
- return r.referenceURL.String()
- }
-
- if r.HasFragmentOnly {
- return fragmentRune + r.referencePointer.String()
- }
-
- return r.referencePointer.String()
-}
-
-// IsRoot returns true if this reference is a root document
-func (r *Ref) IsRoot() bool {
- return r.referenceURL != nil &&
- !r.IsCanonical() &&
- !r.HasURLPathOnly &&
- r.referenceURL.Fragment == ""
-}
-
-// IsCanonical returns true when this pointer starts with http(s):// or file://
-func (r *Ref) IsCanonical() bool {
- return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullURL)
-}
-
-// "Constructor", parses the given string JSON reference
-func (r *Ref) parse(jsonReferenceString string) error {
-
- parsed, err := url.Parse(jsonReferenceString)
- if err != nil {
- return err
- }
-
- internal.NormalizeURL(parsed)
-
- r.referenceURL = parsed
- refURL := r.referenceURL
-
- if refURL.Scheme != "" && refURL.Host != "" {
- r.HasFullURL = true
- } else {
- if refURL.Path != "" {
- r.HasURLPathOnly = true
- } else if refURL.RawQuery == "" && refURL.Fragment != "" {
- r.HasFragmentOnly = true
- }
- }
-
- r.HasFileScheme = refURL.Scheme == "file"
- r.HasFullFilePath = strings.HasPrefix(refURL.Path, "/")
-
- // invalid json-pointer error means url has no json-pointer fragment. simply ignore error
- r.referencePointer, _ = jsonpointer.New(refURL.Fragment)
-
- return nil
-}
-
-// Inherits creates a new reference from a parent and a child
-// If the child cannot inherit from the parent, an error is returned
-func (r *Ref) Inherits(child Ref) (*Ref, error) {
- childURL := child.GetURL()
- parentURL := r.GetURL()
- if childURL == nil {
- return nil, errors.New("child url is nil")
- }
- if parentURL == nil {
- return &child, nil
- }
-
- ref, err := New(parentURL.ResolveReference(childURL).String())
- if err != nil {
- return nil, err
- }
- return &ref, nil
-}
diff --git a/vendor/github.com/go-openapi/loads/.editorconfig b/vendor/github.com/go-openapi/loads/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/loads/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/loads/.gitignore b/vendor/github.com/go-openapi/loads/.gitignore
deleted file mode 100644
index e4f15f17bf..0000000000
--- a/vendor/github.com/go-openapi/loads/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-secrets.yml
-coverage.out
-profile.cov
-profile.out
diff --git a/vendor/github.com/go-openapi/loads/.golangci.yml b/vendor/github.com/go-openapi/loads/.golangci.yml
deleted file mode 100644
index 22f8d21cca..0000000000
--- a/vendor/github.com/go-openapi/loads/.golangci.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-linters-settings:
- govet:
- check-shadowing: true
- golint:
- min-confidence: 0
- gocyclo:
- min-complexity: 45
- maligned:
- suggest-new: true
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - maligned
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - gomnd
- - exhaustivestruct
- - goerr113
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - ifshort
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- - deadcode
- - interfacer
- - scopelint
- - varcheck
- - structcheck
- - golint
- - nosnakecase
diff --git a/vendor/github.com/go-openapi/loads/.travis.yml b/vendor/github.com/go-openapi/loads/.travis.yml
deleted file mode 100644
index cd4a7c331b..0000000000
--- a/vendor/github.com/go-openapi/loads/.travis.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-after_success:
-- bash <(curl -s https://codecov.io/bash)
-go:
-- 1.16.x
-- 1.x
-install:
-- go get gotest.tools/gotestsum
-language: go
-arch:
-- amd64
-- ppc64le
-jobs:
- include:
- # include linting job, but only for latest go version and amd64 arch
- - go: 1.x
- arch: amd64
- install:
- go get github.com/golangci/golangci-lint/cmd/golangci-lint
- script:
- - golangci-lint run --new-from-rev master
-notifications:
- slack:
- secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM=
-script:
-- gotestsum -f short-verbose -- -race -timeout=20m -coverprofile=coverage.txt -covermode=atomic ./...
diff --git a/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/loads/LICENSE b/vendor/github.com/go-openapi/loads/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/loads/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/loads/README.md b/vendor/github.com/go-openapi/loads/README.md
deleted file mode 100644
index f8bd440dfc..0000000000
--- a/vendor/github.com/go-openapi/loads/README.md
+++ /dev/null
@@ -1,6 +0,0 @@
-# Loads OAI specs [](https://github.com/go-openapi/loads/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/loads)
-
-[](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [](http://godoc.org/github.com/go-openapi/loads)
-[](https://goreportcard.com/report/github.com/go-openapi/loads)
-
-Loading of OAI specification documents from local or remote locations. Supports JSON and YAML documents.
diff --git a/vendor/github.com/go-openapi/loads/doc.go b/vendor/github.com/go-openapi/loads/doc.go
deleted file mode 100644
index 5bcaef5dbc..0000000000
--- a/vendor/github.com/go-openapi/loads/doc.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package loads provides document loading methods for swagger (OAI) specifications.
-//
-// It is used by other go-openapi packages to load and run analysis on local or remote spec documents.
-package loads
diff --git a/vendor/github.com/go-openapi/loads/loaders.go b/vendor/github.com/go-openapi/loads/loaders.go
deleted file mode 100644
index b2d1e034c5..0000000000
--- a/vendor/github.com/go-openapi/loads/loaders.go
+++ /dev/null
@@ -1,133 +0,0 @@
-package loads
-
-import (
- "encoding/json"
- "errors"
- "net/url"
-
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-var (
- // Default chain of loaders, defined at the package level.
- //
- // By default this matches json and yaml documents.
- //
- // May be altered with AddLoader().
- loaders *loader
-)
-
-func init() {
- jsonLoader := &loader{
- DocLoaderWithMatch: DocLoaderWithMatch{
- Match: func(_ string) bool {
- return true
- },
- Fn: JSONDoc,
- },
- }
-
- loaders = jsonLoader.WithHead(&loader{
- DocLoaderWithMatch: DocLoaderWithMatch{
- Match: swag.YAMLMatcher,
- Fn: swag.YAMLDoc,
- },
- })
-
- // sets the global default loader for go-openapi/spec
- spec.PathLoader = loaders.Load
-}
-
-// DocLoader represents a doc loader type
-type DocLoader func(string) (json.RawMessage, error)
-
-// DocMatcher represents a predicate to check if a loader matches
-type DocMatcher func(string) bool
-
-// DocLoaderWithMatch describes a loading function for a given extension match.
-type DocLoaderWithMatch struct {
- Fn DocLoader
- Match DocMatcher
-}
-
-// NewDocLoaderWithMatch builds a DocLoaderWithMatch to be used in load options
-func NewDocLoaderWithMatch(fn DocLoader, matcher DocMatcher) DocLoaderWithMatch {
- return DocLoaderWithMatch{
- Fn: fn,
- Match: matcher,
- }
-}
-
-type loader struct {
- DocLoaderWithMatch
- Next *loader
-}
-
-// WithHead adds a loader at the head of the current stack
-func (l *loader) WithHead(head *loader) *loader {
- if head == nil {
- return l
- }
- head.Next = l
- return head
-}
-
-// WithNext adds a loader at the trail of the current stack
-func (l *loader) WithNext(next *loader) *loader {
- l.Next = next
- return next
-}
-
-// Load the raw document from path
-func (l *loader) Load(path string) (json.RawMessage, error) {
- _, erp := url.Parse(path)
- if erp != nil {
- return nil, erp
- }
-
- lastErr := errors.New("no loader matched") // default error if no match was found
- for ldr := l; ldr != nil; ldr = ldr.Next {
- if ldr.Match != nil && !ldr.Match(path) {
- continue
- }
-
- // try then move to next one if there is an error
- b, err := ldr.Fn(path)
- if err == nil {
- return b, nil
- }
-
- lastErr = err
- }
-
- return nil, lastErr
-}
-
-// JSONDoc loads a json document from either a file or a remote url
-func JSONDoc(path string) (json.RawMessage, error) {
- data, err := swag.LoadFromFileOrHTTP(path)
- if err != nil {
- return nil, err
- }
- return json.RawMessage(data), nil
-}
-
-// AddLoader for a document, executed before other previously set loaders.
-//
-// This sets the configuration at the package level.
-//
-// NOTE:
-// - this updates the default loader used by github.com/go-openapi/spec
-// - since this sets package level globals, you shouln't call this concurrently
-func AddLoader(predicate DocMatcher, load DocLoader) {
- loaders = loaders.WithHead(&loader{
- DocLoaderWithMatch: DocLoaderWithMatch{
- Match: predicate,
- Fn: load,
- },
- })
-
- // sets the global default loader for go-openapi/spec
- spec.PathLoader = loaders.Load
-}
diff --git a/vendor/github.com/go-openapi/loads/options.go b/vendor/github.com/go-openapi/loads/options.go
deleted file mode 100644
index f8305d5607..0000000000
--- a/vendor/github.com/go-openapi/loads/options.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package loads
-
-type options struct {
- loader *loader
-}
-
-func defaultOptions() *options {
- return &options{
- loader: loaders,
- }
-}
-
-func loaderFromOptions(options []LoaderOption) *loader {
- opts := defaultOptions()
- for _, apply := range options {
- apply(opts)
- }
-
- return opts.loader
-}
-
-// LoaderOption allows to fine-tune the spec loader behavior
-type LoaderOption func(*options)
-
-// WithDocLoader sets a custom loader for loading specs
-func WithDocLoader(l DocLoader) LoaderOption {
- return func(opt *options) {
- if l == nil {
- return
- }
- opt.loader = &loader{
- DocLoaderWithMatch: DocLoaderWithMatch{
- Fn: l,
- },
- }
- }
-}
-
-// WithDocLoaderMatches sets a chain of custom loaders for loading specs
-// for different extension matches.
-//
-// Loaders are executed in the order of provided DocLoaderWithMatch'es.
-func WithDocLoaderMatches(l ...DocLoaderWithMatch) LoaderOption {
- return func(opt *options) {
- var final, prev *loader
- for _, ldr := range l {
- if ldr.Fn == nil {
- continue
- }
-
- if prev == nil {
- final = &loader{DocLoaderWithMatch: ldr}
- prev = final
- continue
- }
-
- prev = prev.WithNext(&loader{DocLoaderWithMatch: ldr})
- }
- opt.loader = final
- }
-}
diff --git a/vendor/github.com/go-openapi/loads/spec.go b/vendor/github.com/go-openapi/loads/spec.go
deleted file mode 100644
index c9039cd5d7..0000000000
--- a/vendor/github.com/go-openapi/loads/spec.go
+++ /dev/null
@@ -1,275 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package loads
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/json"
- "fmt"
-
- "github.com/go-openapi/analysis"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-func init() {
- gob.Register(map[string]interface{}{})
- gob.Register([]interface{}{})
-}
-
-// Document represents a swagger spec document
-type Document struct {
- // specAnalyzer
- Analyzer *analysis.Spec
- spec *spec.Swagger
- specFilePath string
- origSpec *spec.Swagger
- schema *spec.Schema
- pathLoader *loader
- raw json.RawMessage
-}
-
-// JSONSpec loads a spec from a json document
-func JSONSpec(path string, options ...LoaderOption) (*Document, error) {
- data, err := JSONDoc(path)
- if err != nil {
- return nil, err
- }
- // convert to json
- doc, err := Analyzed(data, "", options...)
- if err != nil {
- return nil, err
- }
-
- doc.specFilePath = path
-
- return doc, nil
-}
-
-// Embedded returns a Document based on embedded specs. No analysis is required
-func Embedded(orig, flat json.RawMessage, options ...LoaderOption) (*Document, error) {
- var origSpec, flatSpec spec.Swagger
- if err := json.Unmarshal(orig, &origSpec); err != nil {
- return nil, err
- }
- if err := json.Unmarshal(flat, &flatSpec); err != nil {
- return nil, err
- }
- return &Document{
- raw: orig,
- origSpec: &origSpec,
- spec: &flatSpec,
- pathLoader: loaderFromOptions(options),
- }, nil
-}
-
-// Spec loads a new spec document from a local or remote path
-func Spec(path string, options ...LoaderOption) (*Document, error) {
- ldr := loaderFromOptions(options)
-
- b, err := ldr.Load(path)
- if err != nil {
- return nil, err
- }
-
- document, err := Analyzed(b, "", options...)
- if err != nil {
- return nil, err
- }
-
- document.specFilePath = path
- document.pathLoader = ldr
-
- return document, nil
-}
-
-// Analyzed creates a new analyzed spec document for a root json.RawMessage.
-func Analyzed(data json.RawMessage, version string, options ...LoaderOption) (*Document, error) {
- if version == "" {
- version = "2.0"
- }
- if version != "2.0" {
- return nil, fmt.Errorf("spec version %q is not supported", version)
- }
-
- raw, err := trimData(data) // trim blanks, then convert yaml docs into json
- if err != nil {
- return nil, err
- }
-
- swspec := new(spec.Swagger)
- if err = json.Unmarshal(raw, swspec); err != nil {
- return nil, err
- }
-
- origsqspec, err := cloneSpec(swspec)
- if err != nil {
- return nil, err
- }
-
- d := &Document{
- Analyzer: analysis.New(swspec), // NOTE: at this moment, analysis does not follow $refs to documents outside the root doc
- schema: spec.MustLoadSwagger20Schema(),
- spec: swspec,
- raw: raw,
- origSpec: origsqspec,
- pathLoader: loaderFromOptions(options),
- }
-
- return d, nil
-}
-
-func trimData(in json.RawMessage) (json.RawMessage, error) {
- trimmed := bytes.TrimSpace(in)
- if len(trimmed) == 0 {
- return in, nil
- }
-
- if trimmed[0] == '{' || trimmed[0] == '[' {
- return trimmed, nil
- }
-
- // assume yaml doc: convert it to json
- yml, err := swag.BytesToYAMLDoc(trimmed)
- if err != nil {
- return nil, fmt.Errorf("analyzed: %v", err)
- }
-
- d, err := swag.YAMLToJSON(yml)
- if err != nil {
- return nil, fmt.Errorf("analyzed: %v", err)
- }
-
- return d, nil
-}
-
-// Expanded expands the $ref fields in the spec document and returns a new spec document
-func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) {
- swspec := new(spec.Swagger)
- if err := json.Unmarshal(d.raw, swspec); err != nil {
- return nil, err
- }
-
- var expandOptions *spec.ExpandOptions
- if len(options) > 0 {
- expandOptions = options[0]
- if expandOptions.RelativeBase == "" {
- expandOptions.RelativeBase = d.specFilePath
- }
- } else {
- expandOptions = &spec.ExpandOptions{
- RelativeBase: d.specFilePath,
- }
- }
-
- if expandOptions.PathLoader == nil {
- if d.pathLoader != nil {
- // use loader from Document options
- expandOptions.PathLoader = d.pathLoader.Load
- } else {
- // use package level loader
- expandOptions.PathLoader = loaders.Load
- }
- }
-
- if err := spec.ExpandSpec(swspec, expandOptions); err != nil {
- return nil, err
- }
-
- dd := &Document{
- Analyzer: analysis.New(swspec),
- spec: swspec,
- specFilePath: d.specFilePath,
- schema: spec.MustLoadSwagger20Schema(),
- raw: d.raw,
- origSpec: d.origSpec,
- }
- return dd, nil
-}
-
-// BasePath the base path for the API specified by this spec
-func (d *Document) BasePath() string {
- return d.spec.BasePath
-}
-
-// Version returns the version of this spec
-func (d *Document) Version() string {
- return d.spec.Swagger
-}
-
-// Schema returns the swagger 2.0 schema
-func (d *Document) Schema() *spec.Schema {
- return d.schema
-}
-
-// Spec returns the swagger spec object model
-func (d *Document) Spec() *spec.Swagger {
- return d.spec
-}
-
-// Host returns the host for the API
-func (d *Document) Host() string {
- return d.spec.Host
-}
-
-// Raw returns the raw swagger spec as json bytes
-func (d *Document) Raw() json.RawMessage {
- return d.raw
-}
-
-// OrigSpec yields the original spec
-func (d *Document) OrigSpec() *spec.Swagger {
- return d.origSpec
-}
-
-// ResetDefinitions gives a shallow copy with the models reset to the original spec
-func (d *Document) ResetDefinitions() *Document {
- defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
- for k, v := range d.origSpec.Definitions {
- defs[k] = v
- }
-
- d.spec.Definitions = defs
- return d
-}
-
-// Pristine creates a new pristine document instance based on the input data
-func (d *Document) Pristine() *Document {
- raw, _ := json.Marshal(d.Spec())
- dd, _ := Analyzed(raw, d.Version())
- dd.pathLoader = d.pathLoader
- dd.specFilePath = d.specFilePath
-
- return dd
-}
-
-// SpecFilePath returns the file path of the spec if one is defined
-func (d *Document) SpecFilePath() string {
- return d.specFilePath
-}
-
-func cloneSpec(src *spec.Swagger) (*spec.Swagger, error) {
- var b bytes.Buffer
- if err := gob.NewEncoder(&b).Encode(src); err != nil {
- return nil, err
- }
-
- var dst spec.Swagger
- if err := gob.NewDecoder(&b).Decode(&dst); err != nil {
- return nil, err
- }
- return &dst, nil
-}
diff --git a/vendor/github.com/go-openapi/runtime/.editorconfig b/vendor/github.com/go-openapi/runtime/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/runtime/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/runtime/.gitattributes b/vendor/github.com/go-openapi/runtime/.gitattributes
deleted file mode 100644
index d207b1802b..0000000000
--- a/vendor/github.com/go-openapi/runtime/.gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/runtime/.gitignore b/vendor/github.com/go-openapi/runtime/.gitignore
deleted file mode 100644
index fea8b84eca..0000000000
--- a/vendor/github.com/go-openapi/runtime/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-secrets.yml
-coverage.out
-*.cov
-*.out
-playground
diff --git a/vendor/github.com/go-openapi/runtime/.golangci.yml b/vendor/github.com/go-openapi/runtime/.golangci.yml
deleted file mode 100644
index 1c75557bac..0000000000
--- a/vendor/github.com/go-openapi/runtime/.golangci.yml
+++ /dev/null
@@ -1,62 +0,0 @@
-linters-settings:
- govet:
- check-shadowing: true
- golint:
- min-confidence: 0
- gocyclo:
- min-complexity: 45
- maligned:
- suggest-new: true
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - nilerr # nilerr crashes on this repo
- - maligned
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - gomnd
- - exhaustivestruct
- - goerr113
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - ifshort
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- - deadcode
- - interfacer
- - scopelint
- - varcheck
- - structcheck
- - golint
- - nosnakecase
diff --git a/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/runtime/LICENSE b/vendor/github.com/go-openapi/runtime/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/runtime/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/runtime/README.md b/vendor/github.com/go-openapi/runtime/README.md
deleted file mode 100644
index b07e0ad9d6..0000000000
--- a/vendor/github.com/go-openapi/runtime/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# runtime [](https://github.com/go-openapi/runtime/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/runtime)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/runtime)
-[](https://goreportcard.com/report/github.com/go-openapi/runtime)
-
-# go OpenAPI toolkit runtime
-
-The runtime component for use in code generation or as untyped usage.
diff --git a/vendor/github.com/go-openapi/runtime/bytestream.go b/vendor/github.com/go-openapi/runtime/bytestream.go
deleted file mode 100644
index f8fb482232..0000000000
--- a/vendor/github.com/go-openapi/runtime/bytestream.go
+++ /dev/null
@@ -1,222 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "bytes"
- "encoding"
- "errors"
- "fmt"
- "io"
- "reflect"
-
- "github.com/go-openapi/swag"
-)
-
-func defaultCloser() error { return nil }
-
-type byteStreamOpt func(opts *byteStreamOpts)
-
-// ClosesStream when the bytestream consumer or producer is finished
-func ClosesStream(opts *byteStreamOpts) {
- opts.Close = true
-}
-
-type byteStreamOpts struct {
- Close bool
-}
-
-// ByteStreamConsumer creates a consumer for byte streams.
-//
-// The consumer consumes from a provided reader into the data passed by reference.
-//
-// Supported output underlying types and interfaces, prioritized in this order:
-// - io.ReaderFrom (for maximum control)
-// - io.Writer (performs io.Copy)
-// - encoding.BinaryUnmarshaler
-// - *string
-// - *[]byte
-func ByteStreamConsumer(opts ...byteStreamOpt) Consumer {
- var vals byteStreamOpts
- for _, opt := range opts {
- opt(&vals)
- }
-
- return ConsumerFunc(func(reader io.Reader, data interface{}) error {
- if reader == nil {
- return errors.New("ByteStreamConsumer requires a reader") // early exit
- }
- if data == nil {
- return errors.New("nil destination for ByteStreamConsumer")
- }
-
- closer := defaultCloser
- if vals.Close {
- if cl, isReaderCloser := reader.(io.Closer); isReaderCloser {
- closer = cl.Close
- }
- }
- defer func() {
- _ = closer()
- }()
-
- if readerFrom, isReaderFrom := data.(io.ReaderFrom); isReaderFrom {
- _, err := readerFrom.ReadFrom(reader)
- return err
- }
-
- if writer, isDataWriter := data.(io.Writer); isDataWriter {
- _, err := io.Copy(writer, reader)
- return err
- }
-
- // buffers input before writing to data
- var buf bytes.Buffer
- _, err := buf.ReadFrom(reader)
- if err != nil {
- return err
- }
- b := buf.Bytes()
-
- switch destinationPointer := data.(type) {
- case encoding.BinaryUnmarshaler:
- return destinationPointer.UnmarshalBinary(b)
- case *any:
- switch (*destinationPointer).(type) {
- case string:
- *destinationPointer = string(b)
-
- return nil
-
- case []byte:
- *destinationPointer = b
-
- return nil
- }
- default:
- // check for the underlying type to be pointer to []byte or string,
- if ptr := reflect.TypeOf(data); ptr.Kind() != reflect.Ptr {
- return errors.New("destination must be a pointer")
- }
-
- v := reflect.Indirect(reflect.ValueOf(data))
- t := v.Type()
-
- switch {
- case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8:
- v.SetBytes(b)
- return nil
-
- case t.Kind() == reflect.String:
- v.SetString(string(b))
- return nil
- }
- }
-
- return fmt.Errorf("%v (%T) is not supported by the ByteStreamConsumer, %s",
- data, data, "can be resolved by supporting Writer/BinaryUnmarshaler interface")
- })
-}
-
-// ByteStreamProducer creates a producer for byte streams.
-//
-// The producer takes input data then writes to an output writer (essentially as a pipe).
-//
-// Supported input underlying types and interfaces, prioritized in this order:
-// - io.WriterTo (for maximum control)
-// - io.Reader (performs io.Copy). A ReadCloser is closed before exiting.
-// - encoding.BinaryMarshaler
-// - error (writes as a string)
-// - []byte
-// - string
-// - struct, other slices: writes as JSON
-func ByteStreamProducer(opts ...byteStreamOpt) Producer {
- var vals byteStreamOpts
- for _, opt := range opts {
- opt(&vals)
- }
-
- return ProducerFunc(func(writer io.Writer, data interface{}) error {
- if writer == nil {
- return errors.New("ByteStreamProducer requires a writer") // early exit
- }
- if data == nil {
- return errors.New("nil data for ByteStreamProducer")
- }
-
- closer := defaultCloser
- if vals.Close {
- if cl, isWriterCloser := writer.(io.Closer); isWriterCloser {
- closer = cl.Close
- }
- }
- defer func() {
- _ = closer()
- }()
-
- if rc, isDataCloser := data.(io.ReadCloser); isDataCloser {
- defer rc.Close()
- }
-
- switch origin := data.(type) {
- case io.WriterTo:
- _, err := origin.WriteTo(writer)
- return err
-
- case io.Reader:
- _, err := io.Copy(writer, origin)
- return err
-
- case encoding.BinaryMarshaler:
- bytes, err := origin.MarshalBinary()
- if err != nil {
- return err
- }
-
- _, err = writer.Write(bytes)
- return err
-
- case error:
- _, err := writer.Write([]byte(origin.Error()))
- return err
-
- default:
- v := reflect.Indirect(reflect.ValueOf(data))
- t := v.Type()
-
- switch {
- case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8:
- _, err := writer.Write(v.Bytes())
- return err
-
- case t.Kind() == reflect.String:
- _, err := writer.Write([]byte(v.String()))
- return err
-
- case t.Kind() == reflect.Struct || t.Kind() == reflect.Slice:
- b, err := swag.WriteJSON(data)
- if err != nil {
- return err
- }
-
- _, err = writer.Write(b)
- return err
- }
- }
-
- return fmt.Errorf("%v (%T) is not supported by the ByteStreamProducer, %s",
- data, data, "can be resolved by supporting Reader/BinaryMarshaler interface")
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/auth_info.go b/vendor/github.com/go-openapi/runtime/client/auth_info.go
deleted file mode 100644
index 4f26e92347..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/auth_info.go
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package client
-
-import (
- "encoding/base64"
-
- "github.com/go-openapi/strfmt"
-
- "github.com/go-openapi/runtime"
-)
-
-// PassThroughAuth never manipulates the request
-var PassThroughAuth runtime.ClientAuthInfoWriter
-
-func init() {
- PassThroughAuth = runtime.ClientAuthInfoWriterFunc(func(_ runtime.ClientRequest, _ strfmt.Registry) error { return nil })
-}
-
-// BasicAuth provides a basic auth info writer
-func BasicAuth(username, password string) runtime.ClientAuthInfoWriter {
- return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error {
- encoded := base64.StdEncoding.EncodeToString([]byte(username + ":" + password))
- return r.SetHeaderParam(runtime.HeaderAuthorization, "Basic "+encoded)
- })
-}
-
-// APIKeyAuth provides an API key auth info writer
-func APIKeyAuth(name, in, value string) runtime.ClientAuthInfoWriter {
- if in == "query" {
- return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error {
- return r.SetQueryParam(name, value)
- })
- }
-
- if in == "header" {
- return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error {
- return r.SetHeaderParam(name, value)
- })
- }
- return nil
-}
-
-// BearerToken provides a header based oauth2 bearer access token auth info writer
-func BearerToken(token string) runtime.ClientAuthInfoWriter {
- return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error {
- return r.SetHeaderParam(runtime.HeaderAuthorization, "Bearer "+token)
- })
-}
-
-// Compose combines multiple ClientAuthInfoWriters into a single one.
-// Useful when multiple auth headers are needed.
-func Compose(auths ...runtime.ClientAuthInfoWriter) runtime.ClientAuthInfoWriter {
- return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error {
- for _, auth := range auths {
- if auth == nil {
- continue
- }
- if err := auth.AuthenticateRequest(r, nil); err != nil {
- return err
- }
- }
- return nil
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/keepalive.go b/vendor/github.com/go-openapi/runtime/client/keepalive.go
deleted file mode 100644
index 7dd6b51c4d..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/keepalive.go
+++ /dev/null
@@ -1,54 +0,0 @@
-package client
-
-import (
- "io"
- "net/http"
- "sync/atomic"
-)
-
-// KeepAliveTransport drains the remaining body from a response
-// so that go will reuse the TCP connections.
-// This is not enabled by default because there are servers where
-// the response never gets closed and that would make the code hang forever.
-// So instead it's provided as a http client middleware that can be used to override
-// any request.
-func KeepAliveTransport(rt http.RoundTripper) http.RoundTripper {
- return &keepAliveTransport{wrapped: rt}
-}
-
-type keepAliveTransport struct {
- wrapped http.RoundTripper
-}
-
-func (k *keepAliveTransport) RoundTrip(r *http.Request) (*http.Response, error) {
- resp, err := k.wrapped.RoundTrip(r)
- if err != nil {
- return resp, err
- }
- resp.Body = &drainingReadCloser{rdr: resp.Body}
- return resp, nil
-}
-
-type drainingReadCloser struct {
- rdr io.ReadCloser
- seenEOF uint32
-}
-
-func (d *drainingReadCloser) Read(p []byte) (n int, err error) {
- n, err = d.rdr.Read(p)
- if err == io.EOF || n == 0 {
- atomic.StoreUint32(&d.seenEOF, 1)
- }
- return
-}
-
-func (d *drainingReadCloser) Close() error {
- // drain buffer
- if atomic.LoadUint32(&d.seenEOF) != 1 {
- // If the reader side (a HTTP server) is misbehaving, it still may send
- // some bytes, but the closer ignores them to keep the underling
- // connection open.
- _, _ = io.Copy(io.Discard, d.rdr)
- }
- return d.rdr.Close()
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/opentelemetry.go b/vendor/github.com/go-openapi/runtime/client/opentelemetry.go
deleted file mode 100644
index 256cd1b4fd..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/opentelemetry.go
+++ /dev/null
@@ -1,211 +0,0 @@
-package client
-
-import (
- "fmt"
- "net/http"
- "strings"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
- "go.opentelemetry.io/otel"
- "go.opentelemetry.io/otel/attribute"
- "go.opentelemetry.io/otel/codes"
- "go.opentelemetry.io/otel/propagation"
- semconv "go.opentelemetry.io/otel/semconv/v1.17.0"
- "go.opentelemetry.io/otel/semconv/v1.17.0/httpconv"
- "go.opentelemetry.io/otel/trace"
-)
-
-const (
- instrumentationVersion = "1.0.0"
- tracerName = "go-openapi"
-)
-
-type config struct {
- Tracer trace.Tracer
- Propagator propagation.TextMapPropagator
- SpanStartOptions []trace.SpanStartOption
- SpanNameFormatter func(*runtime.ClientOperation) string
- TracerProvider trace.TracerProvider
-}
-
-type OpenTelemetryOpt interface {
- apply(*config)
-}
-
-type optionFunc func(*config)
-
-func (o optionFunc) apply(c *config) {
- o(c)
-}
-
-// WithTracerProvider specifies a tracer provider to use for creating a tracer.
-// If none is specified, the global provider is used.
-func WithTracerProvider(provider trace.TracerProvider) OpenTelemetryOpt {
- return optionFunc(func(c *config) {
- if provider != nil {
- c.TracerProvider = provider
- }
- })
-}
-
-// WithPropagators configures specific propagators. If this
-// option isn't specified, then the global TextMapPropagator is used.
-func WithPropagators(ps propagation.TextMapPropagator) OpenTelemetryOpt {
- return optionFunc(func(c *config) {
- if ps != nil {
- c.Propagator = ps
- }
- })
-}
-
-// WithSpanOptions configures an additional set of
-// trace.SpanOptions, which are applied to each new span.
-func WithSpanOptions(opts ...trace.SpanStartOption) OpenTelemetryOpt {
- return optionFunc(func(c *config) {
- c.SpanStartOptions = append(c.SpanStartOptions, opts...)
- })
-}
-
-// WithSpanNameFormatter takes a function that will be called on every
-// request and the returned string will become the Span Name.
-func WithSpanNameFormatter(f func(op *runtime.ClientOperation) string) OpenTelemetryOpt {
- return optionFunc(func(c *config) {
- c.SpanNameFormatter = f
- })
-}
-
-func defaultTransportFormatter(op *runtime.ClientOperation) string {
- if op.ID != "" {
- return op.ID
- }
-
- return fmt.Sprintf("%s_%s", strings.ToLower(op.Method), op.PathPattern)
-}
-
-type openTelemetryTransport struct {
- transport runtime.ClientTransport
- host string
- tracer trace.Tracer
- config *config
-}
-
-func newOpenTelemetryTransport(transport runtime.ClientTransport, host string, opts []OpenTelemetryOpt) *openTelemetryTransport {
- tr := &openTelemetryTransport{
- transport: transport,
- host: host,
- }
-
- defaultOpts := []OpenTelemetryOpt{
- WithSpanOptions(trace.WithSpanKind(trace.SpanKindClient)),
- WithSpanNameFormatter(defaultTransportFormatter),
- WithPropagators(otel.GetTextMapPropagator()),
- WithTracerProvider(otel.GetTracerProvider()),
- }
-
- c := newConfig(append(defaultOpts, opts...)...)
- tr.config = c
-
- return tr
-}
-
-func (t *openTelemetryTransport) Submit(op *runtime.ClientOperation) (interface{}, error) {
- if op.Context == nil {
- return t.transport.Submit(op)
- }
-
- params := op.Params
- reader := op.Reader
-
- var span trace.Span
- defer func() {
- if span != nil {
- span.End()
- }
- }()
-
- op.Params = runtime.ClientRequestWriterFunc(func(req runtime.ClientRequest, reg strfmt.Registry) error {
- span = t.newOpenTelemetrySpan(op, req.GetHeaderParams())
- return params.WriteToRequest(req, reg)
- })
-
- op.Reader = runtime.ClientResponseReaderFunc(func(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- if span != nil {
- statusCode := response.Code()
- // NOTE: this is replaced by semconv.HTTPResponseStatusCode in semconv v1.21
- span.SetAttributes(semconv.HTTPStatusCode(statusCode))
- // NOTE: the conversion from HTTP status code to trace code is no longer available with
- // semconv v1.21
- span.SetStatus(httpconv.ServerStatus(statusCode))
- }
-
- return reader.ReadResponse(response, consumer)
- })
-
- submit, err := t.transport.Submit(op)
- if err != nil && span != nil {
- span.RecordError(err)
- span.SetStatus(codes.Error, err.Error())
- }
-
- return submit, err
-}
-
-func (t *openTelemetryTransport) newOpenTelemetrySpan(op *runtime.ClientOperation, header http.Header) trace.Span {
- ctx := op.Context
-
- tracer := t.tracer
- if tracer == nil {
- if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() {
- tracer = newTracer(span.TracerProvider())
- } else {
- tracer = newTracer(otel.GetTracerProvider())
- }
- }
-
- ctx, span := tracer.Start(ctx, t.config.SpanNameFormatter(op), t.config.SpanStartOptions...)
-
- var scheme string
- if len(op.Schemes) > 0 {
- scheme = op.Schemes[0]
- }
-
- span.SetAttributes(
- attribute.String("net.peer.name", t.host),
- attribute.String(string(semconv.HTTPRouteKey), op.PathPattern),
- attribute.String(string(semconv.HTTPMethodKey), op.Method),
- attribute.String("span.kind", trace.SpanKindClient.String()),
- attribute.String("http.scheme", scheme),
- )
-
- carrier := propagation.HeaderCarrier(header)
- t.config.Propagator.Inject(ctx, carrier)
-
- return span
-}
-
-func newTracer(tp trace.TracerProvider) trace.Tracer {
- return tp.Tracer(tracerName, trace.WithInstrumentationVersion(version()))
-}
-
-func newConfig(opts ...OpenTelemetryOpt) *config {
- c := &config{
- Propagator: otel.GetTextMapPropagator(),
- }
-
- for _, opt := range opts {
- opt.apply(c)
- }
-
- // Tracer is only initialized if manually specified. Otherwise, can be passed with the tracing context.
- if c.TracerProvider != nil {
- c.Tracer = newTracer(c.TracerProvider)
- }
-
- return c
-}
-
-// Version is the current release version of the go-runtime instrumentation.
-func version() string {
- return instrumentationVersion
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/opentracing.go b/vendor/github.com/go-openapi/runtime/client/opentracing.go
deleted file mode 100644
index 627286d12f..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/opentracing.go
+++ /dev/null
@@ -1,99 +0,0 @@
-package client
-
-import (
- "fmt"
- "net/http"
-
- "github.com/go-openapi/strfmt"
- "github.com/opentracing/opentracing-go"
- "github.com/opentracing/opentracing-go/ext"
- "github.com/opentracing/opentracing-go/log"
-
- "github.com/go-openapi/runtime"
-)
-
-type tracingTransport struct {
- transport runtime.ClientTransport
- host string
- opts []opentracing.StartSpanOption
-}
-
-func newOpenTracingTransport(transport runtime.ClientTransport, host string, opts []opentracing.StartSpanOption,
-) runtime.ClientTransport {
- return &tracingTransport{
- transport: transport,
- host: host,
- opts: opts,
- }
-}
-
-func (t *tracingTransport) Submit(op *runtime.ClientOperation) (interface{}, error) {
- if op.Context == nil {
- return t.transport.Submit(op)
- }
-
- params := op.Params
- reader := op.Reader
-
- var span opentracing.Span
- defer func() {
- if span != nil {
- span.Finish()
- }
- }()
-
- op.Params = runtime.ClientRequestWriterFunc(func(req runtime.ClientRequest, reg strfmt.Registry) error {
- span = createClientSpan(op, req.GetHeaderParams(), t.host, t.opts)
- return params.WriteToRequest(req, reg)
- })
-
- op.Reader = runtime.ClientResponseReaderFunc(func(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- if span != nil {
- code := response.Code()
- ext.HTTPStatusCode.Set(span, uint16(code))
- if code >= 400 {
- ext.Error.Set(span, true)
- }
- }
- return reader.ReadResponse(response, consumer)
- })
-
- submit, err := t.transport.Submit(op)
- if err != nil && span != nil {
- ext.Error.Set(span, true)
- span.LogFields(log.Error(err))
- }
- return submit, err
-}
-
-func createClientSpan(op *runtime.ClientOperation, header http.Header, host string,
- opts []opentracing.StartSpanOption) opentracing.Span {
- ctx := op.Context
- span := opentracing.SpanFromContext(ctx)
-
- if span != nil {
- opts = append(opts, ext.SpanKindRPCClient)
- span, _ = opentracing.StartSpanFromContextWithTracer(
- ctx, span.Tracer(), operationName(op), opts...)
-
- ext.Component.Set(span, "go-openapi")
- ext.PeerHostname.Set(span, host)
- span.SetTag("http.path", op.PathPattern)
- ext.HTTPMethod.Set(span, op.Method)
-
- _ = span.Tracer().Inject(
- span.Context(),
- opentracing.HTTPHeaders,
- opentracing.HTTPHeadersCarrier(header))
-
- return span
- }
- return nil
-}
-
-func operationName(op *runtime.ClientOperation) string {
- if op.ID != "" {
- return op.ID
- }
- return fmt.Sprintf("%s_%s", op.Method, op.PathPattern)
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/request.go b/vendor/github.com/go-openapi/runtime/client/request.go
deleted file mode 100644
index c4a891d0bc..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/request.go
+++ /dev/null
@@ -1,482 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package client
-
-import (
- "bytes"
- "context"
- "fmt"
- "io"
- "log"
- "mime/multipart"
- "net/http"
- "net/textproto"
- "net/url"
- "os"
- "path"
- "path/filepath"
- "strings"
- "time"
-
- "github.com/go-openapi/strfmt"
-
- "github.com/go-openapi/runtime"
-)
-
-// NewRequest creates a new swagger http client request
-func newRequest(method, pathPattern string, writer runtime.ClientRequestWriter) *request {
- return &request{
- pathPattern: pathPattern,
- method: method,
- writer: writer,
- header: make(http.Header),
- query: make(url.Values),
- timeout: DefaultTimeout,
- getBody: getRequestBuffer,
- }
-}
-
-// Request represents a swagger client request.
-//
-// This Request struct converts to a HTTP request.
-// There might be others that convert to other transports.
-// There is no error checking here, it is assumed to be used after a spec has been validated.
-// so impossible combinations should not arise (hopefully).
-//
-// The main purpose of this struct is to hide the machinery of adding params to a transport request.
-// The generated code only implements what is necessary to turn a param into a valid value for these methods.
-type request struct {
- pathPattern string
- method string
- writer runtime.ClientRequestWriter
-
- pathParams map[string]string
- header http.Header
- query url.Values
- formFields url.Values
- fileFields map[string][]runtime.NamedReadCloser
- payload interface{}
- timeout time.Duration
- buf *bytes.Buffer
-
- getBody func(r *request) []byte
-}
-
-var (
- // ensure interface compliance
- _ runtime.ClientRequest = new(request)
-)
-
-func (r *request) isMultipart(mediaType string) bool {
- if len(r.fileFields) > 0 {
- return true
- }
-
- return runtime.MultipartFormMime == mediaType
-}
-
-// BuildHTTP creates a new http request based on the data from the params
-func (r *request) BuildHTTP(mediaType, basePath string, producers map[string]runtime.Producer, registry strfmt.Registry) (*http.Request, error) {
- return r.buildHTTP(mediaType, basePath, producers, registry, nil)
-}
-func escapeQuotes(s string) string {
- return strings.NewReplacer("\\", "\\\\", `"`, "\\\"").Replace(s)
-}
-
-func logClose(err error, pw *io.PipeWriter) {
- log.Println(err)
- closeErr := pw.CloseWithError(err)
- if closeErr != nil {
- log.Println(closeErr)
- }
-}
-
-func (r *request) buildHTTP(mediaType, basePath string, producers map[string]runtime.Producer, registry strfmt.Registry, auth runtime.ClientAuthInfoWriter) (*http.Request, error) { //nolint:gocyclo,maintidx
- // build the data
- if err := r.writer.WriteToRequest(r, registry); err != nil {
- return nil, err
- }
-
- // Our body must be an io.Reader.
- // When we create the http.Request, if we pass it a
- // bytes.Buffer then it will wrap it in an io.ReadCloser
- // and set the content length automatically.
- var body io.Reader
- var pr *io.PipeReader
- var pw *io.PipeWriter
-
- r.buf = bytes.NewBuffer(nil)
- if r.payload != nil || len(r.formFields) > 0 || len(r.fileFields) > 0 {
- body = r.buf
- if r.isMultipart(mediaType) {
- pr, pw = io.Pipe()
- body = pr
- }
- }
-
- // check if this is a form type request
- if len(r.formFields) > 0 || len(r.fileFields) > 0 {
- if !r.isMultipart(mediaType) {
- r.header.Set(runtime.HeaderContentType, mediaType)
- formString := r.formFields.Encode()
- r.buf.WriteString(formString)
- goto DoneChoosingBodySource
- }
-
- mp := multipart.NewWriter(pw)
- r.header.Set(runtime.HeaderContentType, mangleContentType(mediaType, mp.Boundary()))
-
- go func() {
- defer func() {
- mp.Close()
- pw.Close()
- }()
-
- for fn, v := range r.formFields {
- for _, vi := range v {
- if err := mp.WriteField(fn, vi); err != nil {
- logClose(err, pw)
- return
- }
- }
- }
-
- defer func() {
- for _, ff := range r.fileFields {
- for _, ffi := range ff {
- ffi.Close()
- }
- }
- }()
- for fn, f := range r.fileFields {
- for _, fi := range f {
- var fileContentType string
- if p, ok := fi.(interface {
- ContentType() string
- }); ok {
- fileContentType = p.ContentType()
- } else {
- // Need to read the data so that we can detect the content type
- buf := make([]byte, 512)
- size, err := fi.Read(buf)
- if err != nil && err != io.EOF {
- logClose(err, pw)
- return
- }
- fileContentType = http.DetectContentType(buf)
- fi = runtime.NamedReader(fi.Name(), io.MultiReader(bytes.NewReader(buf[:size]), fi))
- }
-
- // Create the MIME headers for the new part
- h := make(textproto.MIMEHeader)
- h.Set("Content-Disposition",
- fmt.Sprintf(`form-data; name="%s"; filename="%s"`,
- escapeQuotes(fn), escapeQuotes(filepath.Base(fi.Name()))))
- h.Set("Content-Type", fileContentType)
-
- wrtr, err := mp.CreatePart(h)
- if err != nil {
- logClose(err, pw)
- return
- }
- if _, err := io.Copy(wrtr, fi); err != nil {
- logClose(err, pw)
- }
- }
- }
- }()
-
- goto DoneChoosingBodySource
- }
-
- // if there is payload, use the producer to write the payload, and then
- // set the header to the content-type appropriate for the payload produced
- if r.payload != nil {
- // TODO: infer most appropriate content type based on the producer used,
- // and the `consumers` section of the spec/operation
- r.header.Set(runtime.HeaderContentType, mediaType)
- if rdr, ok := r.payload.(io.ReadCloser); ok {
- body = rdr
- goto DoneChoosingBodySource
- }
-
- if rdr, ok := r.payload.(io.Reader); ok {
- body = rdr
- goto DoneChoosingBodySource
- }
-
- producer := producers[mediaType]
- if err := producer.Produce(r.buf, r.payload); err != nil {
- return nil, err
- }
- }
-
-DoneChoosingBodySource:
-
- if runtime.CanHaveBody(r.method) && body != nil && r.header.Get(runtime.HeaderContentType) == "" {
- r.header.Set(runtime.HeaderContentType, mediaType)
- }
-
- if auth != nil {
- // If we're not using r.buf as our http.Request's body,
- // either the payload is an io.Reader or io.ReadCloser,
- // or we're doing a multipart form/file.
- //
- // In those cases, if the AuthenticateRequest call asks for the body,
- // we must read it into a buffer and provide that, then use that buffer
- // as the body of our http.Request.
- //
- // This is done in-line with the GetBody() request rather than ahead
- // of time, because there's no way to know if the AuthenticateRequest
- // will even ask for the body of the request.
- //
- // If for some reason the copy fails, there's no way to return that
- // error to the GetBody() call, so return it afterwards.
- //
- // An error from the copy action is prioritized over any error
- // from the AuthenticateRequest call, because the mis-read
- // body may have interfered with the auth.
- //
- var copyErr error
- if buf, ok := body.(*bytes.Buffer); body != nil && (!ok || buf != r.buf) {
- var copied bool
- r.getBody = func(r *request) []byte {
- if copied {
- return getRequestBuffer(r)
- }
-
- defer func() {
- copied = true
- }()
-
- if _, copyErr = io.Copy(r.buf, body); copyErr != nil {
- return nil
- }
-
- if closer, ok := body.(io.ReadCloser); ok {
- if copyErr = closer.Close(); copyErr != nil {
- return nil
- }
- }
-
- body = r.buf
- return getRequestBuffer(r)
- }
- }
-
- authErr := auth.AuthenticateRequest(r, registry)
-
- if copyErr != nil {
- return nil, fmt.Errorf("error retrieving the response body: %v", copyErr)
- }
-
- if authErr != nil {
- return nil, authErr
- }
- }
-
- // In case the basePath or the request pathPattern include static query parameters,
- // parse those out before constructing the final path. The parameters themselves
- // will be merged with the ones set by the client, with the priority given first to
- // the ones set by the client, then the path pattern, and lastly the base path.
- basePathURL, err := url.Parse(basePath)
- if err != nil {
- return nil, err
- }
- staticQueryParams := basePathURL.Query()
-
- pathPatternURL, err := url.Parse(r.pathPattern)
- if err != nil {
- return nil, err
- }
- for name, values := range pathPatternURL.Query() {
- if _, present := staticQueryParams[name]; present {
- staticQueryParams.Del(name)
- }
- for _, value := range values {
- staticQueryParams.Add(name, value)
- }
- }
-
- // create http request
- var reinstateSlash bool
- if pathPatternURL.Path != "" && pathPatternURL.Path != "/" && pathPatternURL.Path[len(pathPatternURL.Path)-1] == '/' {
- reinstateSlash = true
- }
-
- urlPath := path.Join(basePathURL.Path, pathPatternURL.Path)
- for k, v := range r.pathParams {
- urlPath = strings.ReplaceAll(urlPath, "{"+k+"}", url.PathEscape(v))
- }
- if reinstateSlash {
- urlPath += "/"
- }
-
- req, err := http.NewRequestWithContext(context.Background(), r.method, urlPath, body)
- if err != nil {
- return nil, err
- }
-
- originalParams := r.GetQueryParams()
-
- // Merge the query parameters extracted from the basePath with the ones set by
- // the client in this struct. In case of conflict, the client wins.
- for k, v := range staticQueryParams {
- _, present := originalParams[k]
- if !present {
- if err = r.SetQueryParam(k, v...); err != nil {
- return nil, err
- }
- }
- }
-
- req.URL.RawQuery = r.query.Encode()
- req.Header = r.header
-
- return req, nil
-}
-
-func mangleContentType(mediaType, boundary string) string {
- if strings.ToLower(mediaType) == runtime.URLencodedFormMime {
- return fmt.Sprintf("%s; boundary=%s", mediaType, boundary)
- }
- return "multipart/form-data; boundary=" + boundary
-}
-
-func (r *request) GetMethod() string {
- return r.method
-}
-
-func (r *request) GetPath() string {
- path := r.pathPattern
- for k, v := range r.pathParams {
- path = strings.ReplaceAll(path, "{"+k+"}", v)
- }
- return path
-}
-
-func (r *request) GetBody() []byte {
- return r.getBody(r)
-}
-
-func getRequestBuffer(r *request) []byte {
- if r.buf == nil {
- return nil
- }
- return r.buf.Bytes()
-}
-
-// SetHeaderParam adds a header param to the request
-// when there is only 1 value provided for the varargs, it will set it.
-// when there are several values provided for the varargs it will add it (no overriding)
-func (r *request) SetHeaderParam(name string, values ...string) error {
- if r.header == nil {
- r.header = make(http.Header)
- }
- r.header[http.CanonicalHeaderKey(name)] = values
- return nil
-}
-
-// GetHeaderParams returns the all headers currently set for the request
-func (r *request) GetHeaderParams() http.Header {
- return r.header
-}
-
-// SetQueryParam adds a query param to the request
-// when there is only 1 value provided for the varargs, it will set it.
-// when there are several values provided for the varargs it will add it (no overriding)
-func (r *request) SetQueryParam(name string, values ...string) error {
- if r.query == nil {
- r.query = make(url.Values)
- }
- r.query[name] = values
- return nil
-}
-
-// GetQueryParams returns a copy of all query params currently set for the request
-func (r *request) GetQueryParams() url.Values {
- var result = make(url.Values)
- for key, value := range r.query {
- result[key] = append([]string{}, value...)
- }
- return result
-}
-
-// SetFormParam adds a forn param to the request
-// when there is only 1 value provided for the varargs, it will set it.
-// when there are several values provided for the varargs it will add it (no overriding)
-func (r *request) SetFormParam(name string, values ...string) error {
- if r.formFields == nil {
- r.formFields = make(url.Values)
- }
- r.formFields[name] = values
- return nil
-}
-
-// SetPathParam adds a path param to the request
-func (r *request) SetPathParam(name string, value string) error {
- if r.pathParams == nil {
- r.pathParams = make(map[string]string)
- }
-
- r.pathParams[name] = value
- return nil
-}
-
-// SetFileParam adds a file param to the request
-func (r *request) SetFileParam(name string, files ...runtime.NamedReadCloser) error {
- for _, file := range files {
- if actualFile, ok := file.(*os.File); ok {
- fi, err := os.Stat(actualFile.Name())
- if err != nil {
- return err
- }
- if fi.IsDir() {
- return fmt.Errorf("%q is a directory, only files are supported", file.Name())
- }
- }
- }
-
- if r.fileFields == nil {
- r.fileFields = make(map[string][]runtime.NamedReadCloser)
- }
- if r.formFields == nil {
- r.formFields = make(url.Values)
- }
-
- r.fileFields[name] = files
- return nil
-}
-
-func (r *request) GetFileParam() map[string][]runtime.NamedReadCloser {
- return r.fileFields
-}
-
-// SetBodyParam sets a body parameter on the request.
-// This does not yet serialze the object, this happens as late as possible.
-func (r *request) SetBodyParam(payload interface{}) error {
- r.payload = payload
- return nil
-}
-
-func (r *request) GetBodyParam() interface{} {
- return r.payload
-}
-
-// SetTimeout sets the timeout for a request
-func (r *request) SetTimeout(timeout time.Duration) error {
- r.timeout = timeout
- return nil
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/response.go b/vendor/github.com/go-openapi/runtime/client/response.go
deleted file mode 100644
index 0bbd388bc8..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/response.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package client
-
-import (
- "io"
- "net/http"
-
- "github.com/go-openapi/runtime"
-)
-
-var _ runtime.ClientResponse = response{}
-
-func newResponse(resp *http.Response) runtime.ClientResponse { return response{resp: resp} }
-
-type response struct {
- resp *http.Response
-}
-
-func (r response) Code() int {
- return r.resp.StatusCode
-}
-
-func (r response) Message() string {
- return r.resp.Status
-}
-
-func (r response) GetHeader(name string) string {
- return r.resp.Header.Get(name)
-}
-
-func (r response) GetHeaders(name string) []string {
- return r.resp.Header.Values(name)
-}
-
-func (r response) Body() io.ReadCloser {
- return r.resp.Body
-}
diff --git a/vendor/github.com/go-openapi/runtime/client/runtime.go b/vendor/github.com/go-openapi/runtime/client/runtime.go
deleted file mode 100644
index 5bd4d75d90..0000000000
--- a/vendor/github.com/go-openapi/runtime/client/runtime.go
+++ /dev/null
@@ -1,552 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package client
-
-import (
- "context"
- "crypto"
- "crypto/ecdsa"
- "crypto/rsa"
- "crypto/tls"
- "crypto/x509"
- "encoding/pem"
- "errors"
- "fmt"
- "mime"
- "net/http"
- "net/http/httputil"
- "os"
- "strings"
- "sync"
- "time"
-
- "github.com/go-openapi/strfmt"
- "github.com/opentracing/opentracing-go"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/runtime/logger"
- "github.com/go-openapi/runtime/middleware"
- "github.com/go-openapi/runtime/yamlpc"
-)
-
-const (
- schemeHTTP = "http"
- schemeHTTPS = "https"
-)
-
-// TLSClientOptions to configure client authentication with mutual TLS
-type TLSClientOptions struct {
- // Certificate is the path to a PEM-encoded certificate to be used for
- // client authentication. If set then Key must also be set.
- Certificate string
-
- // LoadedCertificate is the certificate to be used for client authentication.
- // This field is ignored if Certificate is set. If this field is set, LoadedKey
- // is also required.
- LoadedCertificate *x509.Certificate
-
- // Key is the path to an unencrypted PEM-encoded private key for client
- // authentication. This field is required if Certificate is set.
- Key string
-
- // LoadedKey is the key for client authentication. This field is required if
- // LoadedCertificate is set.
- LoadedKey crypto.PrivateKey
-
- // CA is a path to a PEM-encoded certificate that specifies the root certificate
- // to use when validating the TLS certificate presented by the server. If this field
- // (and LoadedCA) is not set, the system certificate pool is used. This field is ignored if LoadedCA
- // is set.
- CA string
-
- // LoadedCA specifies the root certificate to use when validating the server's TLS certificate.
- // If this field (and CA) is not set, the system certificate pool is used.
- LoadedCA *x509.Certificate
-
- // LoadedCAPool specifies a pool of RootCAs to use when validating the server's TLS certificate.
- // If set, it will be combined with the other loaded certificates (see LoadedCA and CA).
- // If neither LoadedCA or CA is set, the provided pool with override the system
- // certificate pool.
- // The caller must not use the supplied pool after calling TLSClientAuth.
- LoadedCAPool *x509.CertPool
-
- // ServerName specifies the hostname to use when verifying the server certificate.
- // If this field is set then InsecureSkipVerify will be ignored and treated as
- // false.
- ServerName string
-
- // InsecureSkipVerify controls whether the certificate chain and hostname presented
- // by the server are validated. If true, any certificate is accepted.
- InsecureSkipVerify bool
-
- // VerifyPeerCertificate, if not nil, is called after normal
- // certificate verification. It receives the raw ASN.1 certificates
- // provided by the peer and also any verified chains that normal processing found.
- // If it returns a non-nil error, the handshake is aborted and that error results.
- //
- // If normal verification fails then the handshake will abort before
- // considering this callback. If normal verification is disabled by
- // setting InsecureSkipVerify then this callback will be considered but
- // the verifiedChains argument will always be nil.
- VerifyPeerCertificate func(rawCerts [][]byte, verifiedChains [][]*x509.Certificate) error
-
- // SessionTicketsDisabled may be set to true to disable session ticket and
- // PSK (resumption) support. Note that on clients, session ticket support is
- // also disabled if ClientSessionCache is nil.
- SessionTicketsDisabled bool
-
- // ClientSessionCache is a cache of ClientSessionState entries for TLS
- // session resumption. It is only used by clients.
- ClientSessionCache tls.ClientSessionCache
-
- // Prevents callers using unkeyed fields.
- _ struct{}
-}
-
-// TLSClientAuth creates a tls.Config for mutual auth
-func TLSClientAuth(opts TLSClientOptions) (*tls.Config, error) {
- // create client tls config
- cfg := &tls.Config{
- MinVersion: tls.VersionTLS12,
- }
-
- // load client cert if specified
- if opts.Certificate != "" {
- cert, err := tls.LoadX509KeyPair(opts.Certificate, opts.Key)
- if err != nil {
- return nil, fmt.Errorf("tls client cert: %v", err)
- }
- cfg.Certificates = []tls.Certificate{cert}
- } else if opts.LoadedCertificate != nil {
- block := pem.Block{Type: "CERTIFICATE", Bytes: opts.LoadedCertificate.Raw}
- certPem := pem.EncodeToMemory(&block)
-
- var keyBytes []byte
- switch k := opts.LoadedKey.(type) {
- case *rsa.PrivateKey:
- keyBytes = x509.MarshalPKCS1PrivateKey(k)
- case *ecdsa.PrivateKey:
- var err error
- keyBytes, err = x509.MarshalECPrivateKey(k)
- if err != nil {
- return nil, fmt.Errorf("tls client priv key: %v", err)
- }
- default:
- return nil, errors.New("tls client priv key: unsupported key type")
- }
-
- block = pem.Block{Type: "PRIVATE KEY", Bytes: keyBytes}
- keyPem := pem.EncodeToMemory(&block)
-
- cert, err := tls.X509KeyPair(certPem, keyPem)
- if err != nil {
- return nil, fmt.Errorf("tls client cert: %v", err)
- }
- cfg.Certificates = []tls.Certificate{cert}
- }
-
- cfg.InsecureSkipVerify = opts.InsecureSkipVerify
-
- cfg.VerifyPeerCertificate = opts.VerifyPeerCertificate
- cfg.SessionTicketsDisabled = opts.SessionTicketsDisabled
- cfg.ClientSessionCache = opts.ClientSessionCache
-
- // When no CA certificate is provided, default to the system cert pool
- // that way when a request is made to a server known by the system trust store,
- // the name is still verified
- switch {
- case opts.LoadedCA != nil:
- caCertPool := basePool(opts.LoadedCAPool)
- caCertPool.AddCert(opts.LoadedCA)
- cfg.RootCAs = caCertPool
- case opts.CA != "":
- // load ca cert
- caCert, err := os.ReadFile(opts.CA)
- if err != nil {
- return nil, fmt.Errorf("tls client ca: %v", err)
- }
- caCertPool := basePool(opts.LoadedCAPool)
- caCertPool.AppendCertsFromPEM(caCert)
- cfg.RootCAs = caCertPool
- case opts.LoadedCAPool != nil:
- cfg.RootCAs = opts.LoadedCAPool
- }
-
- // apply servername overrride
- if opts.ServerName != "" {
- cfg.InsecureSkipVerify = false
- cfg.ServerName = opts.ServerName
- }
-
- return cfg, nil
-}
-
-func basePool(pool *x509.CertPool) *x509.CertPool {
- if pool == nil {
- return x509.NewCertPool()
- }
- return pool
-}
-
-// TLSTransport creates a http client transport suitable for mutual tls auth
-func TLSTransport(opts TLSClientOptions) (http.RoundTripper, error) {
- cfg, err := TLSClientAuth(opts)
- if err != nil {
- return nil, err
- }
-
- return &http.Transport{TLSClientConfig: cfg}, nil
-}
-
-// TLSClient creates a http.Client for mutual auth
-func TLSClient(opts TLSClientOptions) (*http.Client, error) {
- transport, err := TLSTransport(opts)
- if err != nil {
- return nil, err
- }
- return &http.Client{Transport: transport}, nil
-}
-
-// DefaultTimeout the default request timeout
-var DefaultTimeout = 30 * time.Second
-
-// Runtime represents an API client that uses the transport
-// to make http requests based on a swagger specification.
-type Runtime struct {
- DefaultMediaType string
- DefaultAuthentication runtime.ClientAuthInfoWriter
- Consumers map[string]runtime.Consumer
- Producers map[string]runtime.Producer
-
- Transport http.RoundTripper
- Jar http.CookieJar
- // Spec *spec.Document
- Host string
- BasePath string
- Formats strfmt.Registry
- Context context.Context //nolint:containedctx // we precisely want this type to contain the request context
-
- Debug bool
- logger logger.Logger
-
- clientOnce *sync.Once
- client *http.Client
- schemes []string
- response ClientResponseFunc
-}
-
-// New creates a new default runtime for a swagger api runtime.Client
-func New(host, basePath string, schemes []string) *Runtime {
- var rt Runtime
- rt.DefaultMediaType = runtime.JSONMime
-
- // TODO: actually infer this stuff from the spec
- rt.Consumers = map[string]runtime.Consumer{
- runtime.YAMLMime: yamlpc.YAMLConsumer(),
- runtime.JSONMime: runtime.JSONConsumer(),
- runtime.XMLMime: runtime.XMLConsumer(),
- runtime.TextMime: runtime.TextConsumer(),
- runtime.HTMLMime: runtime.TextConsumer(),
- runtime.CSVMime: runtime.CSVConsumer(),
- runtime.DefaultMime: runtime.ByteStreamConsumer(),
- }
- rt.Producers = map[string]runtime.Producer{
- runtime.YAMLMime: yamlpc.YAMLProducer(),
- runtime.JSONMime: runtime.JSONProducer(),
- runtime.XMLMime: runtime.XMLProducer(),
- runtime.TextMime: runtime.TextProducer(),
- runtime.HTMLMime: runtime.TextProducer(),
- runtime.CSVMime: runtime.CSVProducer(),
- runtime.DefaultMime: runtime.ByteStreamProducer(),
- }
- rt.Transport = http.DefaultTransport
- rt.Jar = nil
- rt.Host = host
- rt.BasePath = basePath
- rt.Context = context.Background()
- rt.clientOnce = new(sync.Once)
- if !strings.HasPrefix(rt.BasePath, "/") {
- rt.BasePath = "/" + rt.BasePath
- }
-
- rt.Debug = logger.DebugEnabled()
- rt.logger = logger.StandardLogger{}
- rt.response = newResponse
-
- if len(schemes) > 0 {
- rt.schemes = schemes
- }
- return &rt
-}
-
-// NewWithClient allows you to create a new transport with a configured http.Client
-func NewWithClient(host, basePath string, schemes []string, client *http.Client) *Runtime {
- rt := New(host, basePath, schemes)
- if client != nil {
- rt.clientOnce.Do(func() {
- rt.client = client
- })
- }
- return rt
-}
-
-// WithOpenTracing adds opentracing support to the provided runtime.
-// A new client span is created for each request.
-// If the context of the client operation does not contain an active span, no span is created.
-// The provided opts are applied to each spans - for example to add global tags.
-func (r *Runtime) WithOpenTracing(opts ...opentracing.StartSpanOption) runtime.ClientTransport {
- return newOpenTracingTransport(r, r.Host, opts)
-}
-
-// WithOpenTelemetry adds opentelemetry support to the provided runtime.
-// A new client span is created for each request.
-// If the context of the client operation does not contain an active span, no span is created.
-// The provided opts are applied to each spans - for example to add global tags.
-func (r *Runtime) WithOpenTelemetry(opts ...OpenTelemetryOpt) runtime.ClientTransport {
- return newOpenTelemetryTransport(r, r.Host, opts)
-}
-
-func (r *Runtime) pickScheme(schemes []string) string {
- if v := r.selectScheme(r.schemes); v != "" {
- return v
- }
- if v := r.selectScheme(schemes); v != "" {
- return v
- }
- return schemeHTTP
-}
-
-func (r *Runtime) selectScheme(schemes []string) string {
- schLen := len(schemes)
- if schLen == 0 {
- return ""
- }
-
- scheme := schemes[0]
- // prefer https, but skip when not possible
- if scheme != schemeHTTPS && schLen > 1 {
- for _, sch := range schemes {
- if sch == schemeHTTPS {
- scheme = sch
- break
- }
- }
- }
- return scheme
-}
-
-func transportOrDefault(left, right http.RoundTripper) http.RoundTripper {
- if left == nil {
- return right
- }
- return left
-}
-
-// EnableConnectionReuse drains the remaining body from a response
-// so that go will reuse the TCP connections.
-//
-// This is not enabled by default because there are servers where
-// the response never gets closed and that would make the code hang forever.
-// So instead it's provided as a http client middleware that can be used to override
-// any request.
-func (r *Runtime) EnableConnectionReuse() {
- if r.client == nil {
- r.Transport = KeepAliveTransport(
- transportOrDefault(r.Transport, http.DefaultTransport),
- )
- return
- }
-
- r.client.Transport = KeepAliveTransport(
- transportOrDefault(r.client.Transport,
- transportOrDefault(r.Transport, http.DefaultTransport),
- ),
- )
-}
-
-// takes a client operation and creates equivalent http.Request
-func (r *Runtime) createHttpRequest(operation *runtime.ClientOperation) (*request, *http.Request, error) { //nolint:revive,stylecheck
- params, _, auth := operation.Params, operation.Reader, operation.AuthInfo
-
- request := newRequest(operation.Method, operation.PathPattern, params)
-
- var accept []string
- accept = append(accept, operation.ProducesMediaTypes...)
- if err := request.SetHeaderParam(runtime.HeaderAccept, accept...); err != nil {
- return nil, nil, err
- }
-
- if auth == nil && r.DefaultAuthentication != nil {
- auth = runtime.ClientAuthInfoWriterFunc(func(req runtime.ClientRequest, reg strfmt.Registry) error {
- if req.GetHeaderParams().Get(runtime.HeaderAuthorization) != "" {
- return nil
- }
- return r.DefaultAuthentication.AuthenticateRequest(req, reg)
- })
- }
- // if auth != nil {
- // if err := auth.AuthenticateRequest(request, r.Formats); err != nil {
- // return nil, err
- // }
- //}
-
- // TODO: pick appropriate media type
- cmt := r.DefaultMediaType
- for _, mediaType := range operation.ConsumesMediaTypes {
- // Pick first non-empty media type
- if mediaType != "" {
- cmt = mediaType
- break
- }
- }
-
- if _, ok := r.Producers[cmt]; !ok && cmt != runtime.MultipartFormMime && cmt != runtime.URLencodedFormMime {
- return nil, nil, fmt.Errorf("none of producers: %v registered. try %s", r.Producers, cmt)
- }
-
- req, err := request.buildHTTP(cmt, r.BasePath, r.Producers, r.Formats, auth)
- if err != nil {
- return nil, nil, err
- }
- req.URL.Scheme = r.pickScheme(operation.Schemes)
- req.URL.Host = r.Host
- req.Host = r.Host
- return request, req, nil
-}
-
-func (r *Runtime) CreateHttpRequest(operation *runtime.ClientOperation) (req *http.Request, err error) { //nolint:revive,stylecheck
- _, req, err = r.createHttpRequest(operation)
- return
-}
-
-// Submit a request and when there is a body on success it will turn that into the result
-// all other things are turned into an api error for swagger which retains the status code
-func (r *Runtime) Submit(operation *runtime.ClientOperation) (interface{}, error) {
- _, readResponse, _ := operation.Params, operation.Reader, operation.AuthInfo
-
- request, req, err := r.createHttpRequest(operation)
- if err != nil {
- return nil, err
- }
-
- r.clientOnce.Do(func() {
- r.client = &http.Client{
- Transport: r.Transport,
- Jar: r.Jar,
- }
- })
-
- if r.Debug {
- b, err2 := httputil.DumpRequestOut(req, true)
- if err2 != nil {
- return nil, err2
- }
- r.logger.Debugf("%s\n", string(b))
- }
-
- var parentCtx context.Context
- switch {
- case operation.Context != nil:
- parentCtx = operation.Context
- case r.Context != nil:
- parentCtx = r.Context
- default:
- parentCtx = context.Background()
- }
-
- var (
- ctx context.Context
- cancel context.CancelFunc
- )
- if request.timeout == 0 {
- // There may be a deadline in the context passed to the operation.
- // Otherwise, there is no timeout set.
- ctx, cancel = context.WithCancel(parentCtx)
- } else {
- // Sets the timeout passed from request params (by default runtime.DefaultTimeout).
- // If there is already a deadline in the parent context, the shortest will
- // apply.
- ctx, cancel = context.WithTimeout(parentCtx, request.timeout)
- }
- defer cancel()
-
- var client *http.Client
- if operation.Client != nil {
- client = operation.Client
- } else {
- client = r.client
- }
- req = req.WithContext(ctx)
- res, err := client.Do(req) // make requests, by default follows 10 redirects before failing
- if err != nil {
- return nil, err
- }
- defer res.Body.Close()
-
- ct := res.Header.Get(runtime.HeaderContentType)
- if ct == "" { // this should really never occur
- ct = r.DefaultMediaType
- }
-
- if r.Debug {
- printBody := true
- if ct == runtime.DefaultMime {
- printBody = false // Spare the terminal from a binary blob.
- }
- b, err2 := httputil.DumpResponse(res, printBody)
- if err2 != nil {
- return nil, err2
- }
- r.logger.Debugf("%s\n", string(b))
- }
-
- mt, _, err := mime.ParseMediaType(ct)
- if err != nil {
- return nil, fmt.Errorf("parse content type: %s", err)
- }
-
- cons, ok := r.Consumers[mt]
- if !ok {
- if cons, ok = r.Consumers["*/*"]; !ok {
- // scream about not knowing what to do
- return nil, fmt.Errorf("no consumer: %q", ct)
- }
- }
- return readResponse.ReadResponse(r.response(res), cons)
-}
-
-// SetDebug changes the debug flag.
-// It ensures that client and middlewares have the set debug level.
-func (r *Runtime) SetDebug(debug bool) {
- r.Debug = debug
- middleware.Debug = debug
-}
-
-// SetLogger changes the logger stream.
-// It ensures that client and middlewares use the same logger.
-func (r *Runtime) SetLogger(logger logger.Logger) {
- r.logger = logger
- middleware.Logger = logger
-}
-
-type ClientResponseFunc = func(*http.Response) runtime.ClientResponse //nolint:revive
-
-// SetResponseReader changes the response reader implementation.
-func (r *Runtime) SetResponseReader(f ClientResponseFunc) {
- if f == nil {
- return
- }
- r.response = f
-}
diff --git a/vendor/github.com/go-openapi/runtime/client_auth_info.go b/vendor/github.com/go-openapi/runtime/client_auth_info.go
deleted file mode 100644
index c6c97d9a7c..0000000000
--- a/vendor/github.com/go-openapi/runtime/client_auth_info.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import "github.com/go-openapi/strfmt"
-
-// A ClientAuthInfoWriterFunc converts a function to a request writer interface
-type ClientAuthInfoWriterFunc func(ClientRequest, strfmt.Registry) error
-
-// AuthenticateRequest adds authentication data to the request
-func (fn ClientAuthInfoWriterFunc) AuthenticateRequest(req ClientRequest, reg strfmt.Registry) error {
- return fn(req, reg)
-}
-
-// A ClientAuthInfoWriter implementor knows how to write authentication info to a request
-type ClientAuthInfoWriter interface {
- AuthenticateRequest(ClientRequest, strfmt.Registry) error
-}
diff --git a/vendor/github.com/go-openapi/runtime/client_operation.go b/vendor/github.com/go-openapi/runtime/client_operation.go
deleted file mode 100644
index 5a5d63563a..0000000000
--- a/vendor/github.com/go-openapi/runtime/client_operation.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "context"
- "net/http"
-)
-
-// ClientOperation represents the context for a swagger operation to be submitted to the transport
-type ClientOperation struct {
- ID string
- Method string
- PathPattern string
- ProducesMediaTypes []string
- ConsumesMediaTypes []string
- Schemes []string
- AuthInfo ClientAuthInfoWriter
- Params ClientRequestWriter
- Reader ClientResponseReader
- Context context.Context //nolint:containedctx // we precisely want this type to contain the request context
- Client *http.Client
-}
-
-// A ClientTransport implementor knows how to submit Request objects to some destination
-type ClientTransport interface {
- // Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error)
- Submit(*ClientOperation) (interface{}, error)
-}
diff --git a/vendor/github.com/go-openapi/runtime/client_request.go b/vendor/github.com/go-openapi/runtime/client_request.go
deleted file mode 100644
index 4ebb2deabe..0000000000
--- a/vendor/github.com/go-openapi/runtime/client_request.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "io"
- "net/http"
- "net/url"
- "time"
-
- "github.com/go-openapi/strfmt"
-)
-
-// ClientRequestWriterFunc converts a function to a request writer interface
-type ClientRequestWriterFunc func(ClientRequest, strfmt.Registry) error
-
-// WriteToRequest adds data to the request
-func (fn ClientRequestWriterFunc) WriteToRequest(req ClientRequest, reg strfmt.Registry) error {
- return fn(req, reg)
-}
-
-// ClientRequestWriter is an interface for things that know how to write to a request
-type ClientRequestWriter interface {
- WriteToRequest(ClientRequest, strfmt.Registry) error
-}
-
-// ClientRequest is an interface for things that know how to
-// add information to a swagger client request.
-type ClientRequest interface { //nolint:interfacebloat // a swagger-capable request is quite rich, hence the many getter/setters
- SetHeaderParam(string, ...string) error
-
- GetHeaderParams() http.Header
-
- SetQueryParam(string, ...string) error
-
- SetFormParam(string, ...string) error
-
- SetPathParam(string, string) error
-
- GetQueryParams() url.Values
-
- SetFileParam(string, ...NamedReadCloser) error
-
- SetBodyParam(interface{}) error
-
- SetTimeout(time.Duration) error
-
- GetMethod() string
-
- GetPath() string
-
- GetBody() []byte
-
- GetBodyParam() interface{}
-
- GetFileParam() map[string][]NamedReadCloser
-}
-
-// NamedReadCloser represents a named ReadCloser interface
-type NamedReadCloser interface {
- io.ReadCloser
- Name() string
-}
-
-// NamedReader creates a NamedReadCloser for use as file upload
-func NamedReader(name string, rdr io.Reader) NamedReadCloser {
- rc, ok := rdr.(io.ReadCloser)
- if !ok {
- rc = io.NopCloser(rdr)
- }
- return &namedReadCloser{
- name: name,
- cr: rc,
- }
-}
-
-type namedReadCloser struct {
- name string
- cr io.ReadCloser
-}
-
-func (n *namedReadCloser) Close() error {
- return n.cr.Close()
-}
-func (n *namedReadCloser) Read(p []byte) (int, error) {
- return n.cr.Read(p)
-}
-func (n *namedReadCloser) Name() string {
- return n.name
-}
-
-type TestClientRequest struct {
- Headers http.Header
- Body interface{}
-}
-
-func (t *TestClientRequest) SetHeaderParam(name string, values ...string) error {
- if t.Headers == nil {
- t.Headers = make(http.Header)
- }
- t.Headers.Set(name, values[0])
- return nil
-}
-
-func (t *TestClientRequest) SetQueryParam(_ string, _ ...string) error { return nil }
-
-func (t *TestClientRequest) SetFormParam(_ string, _ ...string) error { return nil }
-
-func (t *TestClientRequest) SetPathParam(_ string, _ string) error { return nil }
-
-func (t *TestClientRequest) SetFileParam(_ string, _ ...NamedReadCloser) error { return nil }
-
-func (t *TestClientRequest) SetBodyParam(body interface{}) error {
- t.Body = body
- return nil
-}
-
-func (t *TestClientRequest) SetTimeout(time.Duration) error {
- return nil
-}
-
-func (t *TestClientRequest) GetQueryParams() url.Values { return nil }
-
-func (t *TestClientRequest) GetMethod() string { return "" }
-
-func (t *TestClientRequest) GetPath() string { return "" }
-
-func (t *TestClientRequest) GetBody() []byte { return nil }
-
-func (t *TestClientRequest) GetBodyParam() interface{} {
- return t.Body
-}
-
-func (t *TestClientRequest) GetFileParam() map[string][]NamedReadCloser {
- return nil
-}
-
-func (t *TestClientRequest) GetHeaderParams() http.Header {
- return t.Headers
-}
diff --git a/vendor/github.com/go-openapi/runtime/client_response.go b/vendor/github.com/go-openapi/runtime/client_response.go
deleted file mode 100644
index 0d1691149d..0000000000
--- a/vendor/github.com/go-openapi/runtime/client_response.go
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "encoding/json"
- "fmt"
- "io"
-)
-
-// A ClientResponse represents a client response
-// This bridges between responses obtained from different transports
-type ClientResponse interface {
- Code() int
- Message() string
- GetHeader(string) string
- GetHeaders(string) []string
- Body() io.ReadCloser
-}
-
-// A ClientResponseReaderFunc turns a function into a ClientResponseReader interface implementation
-type ClientResponseReaderFunc func(ClientResponse, Consumer) (interface{}, error)
-
-// ReadResponse reads the response
-func (read ClientResponseReaderFunc) ReadResponse(resp ClientResponse, consumer Consumer) (interface{}, error) {
- return read(resp, consumer)
-}
-
-// A ClientResponseReader is an interface for things want to read a response.
-// An application of this is to create structs from response values
-type ClientResponseReader interface {
- ReadResponse(ClientResponse, Consumer) (interface{}, error)
-}
-
-// NewAPIError creates a new API error
-func NewAPIError(opName string, payload interface{}, code int) *APIError {
- return &APIError{
- OperationName: opName,
- Response: payload,
- Code: code,
- }
-}
-
-// APIError wraps an error model and captures the status code
-type APIError struct {
- OperationName string
- Response interface{}
- Code int
-}
-
-func (o *APIError) Error() string {
- var resp []byte
- if err, ok := o.Response.(error); ok {
- resp = []byte("'" + err.Error() + "'")
- } else {
- resp, _ = json.Marshal(o.Response)
- }
- return fmt.Sprintf("%s (status %d): %s", o.OperationName, o.Code, resp)
-}
-
-func (o *APIError) String() string {
- return o.Error()
-}
-
-// IsSuccess returns true when this elapse o k response returns a 2xx status code
-func (o *APIError) IsSuccess() bool {
- return o.Code/100 == 2
-}
-
-// IsRedirect returns true when this elapse o k response returns a 3xx status code
-func (o *APIError) IsRedirect() bool {
- return o.Code/100 == 3
-}
-
-// IsClientError returns true when this elapse o k response returns a 4xx status code
-func (o *APIError) IsClientError() bool {
- return o.Code/100 == 4
-}
-
-// IsServerError returns true when this elapse o k response returns a 5xx status code
-func (o *APIError) IsServerError() bool {
- return o.Code/100 == 5
-}
-
-// IsCode returns true when this elapse o k response returns a 4xx status code
-func (o *APIError) IsCode(code int) bool {
- return o.Code == code
-}
-
-// A ClientResponseStatus is a common interface implemented by all responses on the generated code
-// You can use this to treat any client response based on status code
-type ClientResponseStatus interface {
- IsSuccess() bool
- IsRedirect() bool
- IsClientError() bool
- IsServerError() bool
- IsCode(int) bool
-}
diff --git a/vendor/github.com/go-openapi/runtime/constants.go b/vendor/github.com/go-openapi/runtime/constants.go
deleted file mode 100644
index 515969242c..0000000000
--- a/vendor/github.com/go-openapi/runtime/constants.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-const (
- // HeaderContentType represents a http content-type header, it's value is supposed to be a mime type
- HeaderContentType = "Content-Type"
-
- // HeaderTransferEncoding represents a http transfer-encoding header.
- HeaderTransferEncoding = "Transfer-Encoding"
-
- // HeaderAccept the Accept header
- HeaderAccept = "Accept"
- // HeaderAuthorization the Authorization header
- HeaderAuthorization = "Authorization"
-
- charsetKey = "charset"
-
- // DefaultMime the default fallback mime type
- DefaultMime = "application/octet-stream"
- // JSONMime the json mime type
- JSONMime = "application/json"
- // YAMLMime the yaml mime type
- YAMLMime = "application/x-yaml"
- // XMLMime the xml mime type
- XMLMime = "application/xml"
- // TextMime the text mime type
- TextMime = "text/plain"
- // HTMLMime the html mime type
- HTMLMime = "text/html"
- // CSVMime the csv mime type
- CSVMime = "text/csv"
- // MultipartFormMime the multipart form mime type
- MultipartFormMime = "multipart/form-data"
- // URLencodedFormMime the url encoded form mime type
- URLencodedFormMime = "application/x-www-form-urlencoded"
-)
diff --git a/vendor/github.com/go-openapi/runtime/csv.go b/vendor/github.com/go-openapi/runtime/csv.go
deleted file mode 100644
index c9597bcd6e..0000000000
--- a/vendor/github.com/go-openapi/runtime/csv.go
+++ /dev/null
@@ -1,350 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "bytes"
- "context"
- "encoding"
- "encoding/csv"
- "errors"
- "fmt"
- "io"
- "reflect"
-
- "golang.org/x/sync/errgroup"
-)
-
-// CSVConsumer creates a new CSV consumer.
-//
-// The consumer consumes CSV records from a provided reader into the data passed by reference.
-//
-// CSVOpts options may be specified to alter the default CSV behavior on the reader and the writer side (e.g. separator, skip header, ...).
-// The defaults are those of the standard library's csv.Reader and csv.Writer.
-//
-// Supported output underlying types and interfaces, prioritized in this order:
-// - *csv.Writer
-// - CSVWriter (writer options are ignored)
-// - io.Writer (as raw bytes)
-// - io.ReaderFrom (as raw bytes)
-// - encoding.BinaryUnmarshaler (as raw bytes)
-// - *[][]string (as a collection of records)
-// - *[]byte (as raw bytes)
-// - *string (a raw bytes)
-//
-// The consumer prioritizes situations where buffering the input is not required.
-func CSVConsumer(opts ...CSVOpt) Consumer {
- o := csvOptsWithDefaults(opts)
-
- return ConsumerFunc(func(reader io.Reader, data interface{}) error {
- if reader == nil {
- return errors.New("CSVConsumer requires a reader")
- }
- if data == nil {
- return errors.New("nil destination for CSVConsumer")
- }
-
- csvReader := csv.NewReader(reader)
- o.applyToReader(csvReader)
- closer := defaultCloser
- if o.closeStream {
- if cl, isReaderCloser := reader.(io.Closer); isReaderCloser {
- closer = cl.Close
- }
- }
- defer func() {
- _ = closer()
- }()
-
- switch destination := data.(type) {
- case *csv.Writer:
- csvWriter := destination
- o.applyToWriter(csvWriter)
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case CSVWriter:
- csvWriter := destination
- // no writer options available
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case io.Writer:
- csvWriter := csv.NewWriter(destination)
- o.applyToWriter(csvWriter)
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case io.ReaderFrom:
- var buf bytes.Buffer
- csvWriter := csv.NewWriter(&buf)
- o.applyToWriter(csvWriter)
- if err := bufferedCSV(csvWriter, csvReader, o); err != nil {
- return err
- }
- _, err := destination.ReadFrom(&buf)
-
- return err
-
- case encoding.BinaryUnmarshaler:
- var buf bytes.Buffer
- csvWriter := csv.NewWriter(&buf)
- o.applyToWriter(csvWriter)
- if err := bufferedCSV(csvWriter, csvReader, o); err != nil {
- return err
- }
-
- return destination.UnmarshalBinary(buf.Bytes())
-
- default:
- // support *[][]string, *[]byte, *string
- if ptr := reflect.TypeOf(data); ptr.Kind() != reflect.Ptr {
- return errors.New("destination must be a pointer")
- }
-
- v := reflect.Indirect(reflect.ValueOf(data))
- t := v.Type()
-
- switch {
- case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Slice && t.Elem().Elem().Kind() == reflect.String:
- csvWriter := &csvRecordsWriter{}
- // writer options are ignored
- if err := pipeCSV(csvWriter, csvReader, o); err != nil {
- return err
- }
-
- v.Grow(len(csvWriter.records))
- v.SetCap(len(csvWriter.records)) // in case Grow was unnessary, trim down the capacity
- v.SetLen(len(csvWriter.records))
- reflect.Copy(v, reflect.ValueOf(csvWriter.records))
-
- return nil
-
- case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8:
- var buf bytes.Buffer
- csvWriter := csv.NewWriter(&buf)
- o.applyToWriter(csvWriter)
- if err := bufferedCSV(csvWriter, csvReader, o); err != nil {
- return err
- }
- v.SetBytes(buf.Bytes())
-
- return nil
-
- case t.Kind() == reflect.String:
- var buf bytes.Buffer
- csvWriter := csv.NewWriter(&buf)
- o.applyToWriter(csvWriter)
- if err := bufferedCSV(csvWriter, csvReader, o); err != nil {
- return err
- }
- v.SetString(buf.String())
-
- return nil
-
- default:
- return fmt.Errorf("%v (%T) is not supported by the CSVConsumer, %s",
- data, data, "can be resolved by supporting CSVWriter/Writer/BinaryUnmarshaler interface",
- )
- }
- }
- })
-}
-
-// CSVProducer creates a new CSV producer.
-//
-// The producer takes input data then writes as CSV to an output writer (essentially as a pipe).
-//
-// Supported input underlying types and interfaces, prioritized in this order:
-// - *csv.Reader
-// - CSVReader (reader options are ignored)
-// - io.Reader
-// - io.WriterTo
-// - encoding.BinaryMarshaler
-// - [][]string
-// - []byte
-// - string
-//
-// The producer prioritizes situations where buffering the input is not required.
-func CSVProducer(opts ...CSVOpt) Producer {
- o := csvOptsWithDefaults(opts)
-
- return ProducerFunc(func(writer io.Writer, data interface{}) error {
- if writer == nil {
- return errors.New("CSVProducer requires a writer")
- }
- if data == nil {
- return errors.New("nil data for CSVProducer")
- }
-
- csvWriter := csv.NewWriter(writer)
- o.applyToWriter(csvWriter)
- closer := defaultCloser
- if o.closeStream {
- if cl, isWriterCloser := writer.(io.Closer); isWriterCloser {
- closer = cl.Close
- }
- }
- defer func() {
- _ = closer()
- }()
-
- if rc, isDataCloser := data.(io.ReadCloser); isDataCloser {
- defer rc.Close()
- }
-
- switch origin := data.(type) {
- case *csv.Reader:
- csvReader := origin
- o.applyToReader(csvReader)
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case CSVReader:
- csvReader := origin
- // no reader options available
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case io.Reader:
- csvReader := csv.NewReader(origin)
- o.applyToReader(csvReader)
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case io.WriterTo:
- // async piping of the writes performed by WriteTo
- r, w := io.Pipe()
- csvReader := csv.NewReader(r)
- o.applyToReader(csvReader)
-
- pipe, _ := errgroup.WithContext(context.Background())
- pipe.Go(func() error {
- _, err := origin.WriteTo(w)
- _ = w.Close()
- return err
- })
-
- pipe.Go(func() error {
- defer func() {
- _ = r.Close()
- }()
-
- return pipeCSV(csvWriter, csvReader, o)
- })
-
- return pipe.Wait()
-
- case encoding.BinaryMarshaler:
- buf, err := origin.MarshalBinary()
- if err != nil {
- return err
- }
- rdr := bytes.NewBuffer(buf)
- csvReader := csv.NewReader(rdr)
-
- return bufferedCSV(csvWriter, csvReader, o)
-
- default:
- // support [][]string, []byte, string (or pointers to those)
- v := reflect.Indirect(reflect.ValueOf(data))
- t := v.Type()
-
- switch {
- case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Slice && t.Elem().Elem().Kind() == reflect.String:
- csvReader := &csvRecordsWriter{
- records: make([][]string, v.Len()),
- }
- reflect.Copy(reflect.ValueOf(csvReader.records), v)
-
- return pipeCSV(csvWriter, csvReader, o)
-
- case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8:
- buf := bytes.NewBuffer(v.Bytes())
- csvReader := csv.NewReader(buf)
- o.applyToReader(csvReader)
-
- return bufferedCSV(csvWriter, csvReader, o)
-
- case t.Kind() == reflect.String:
- buf := bytes.NewBufferString(v.String())
- csvReader := csv.NewReader(buf)
- o.applyToReader(csvReader)
-
- return bufferedCSV(csvWriter, csvReader, o)
-
- default:
- return fmt.Errorf("%v (%T) is not supported by the CSVProducer, %s",
- data, data, "can be resolved by supporting CSVReader/Reader/BinaryMarshaler interface",
- )
- }
- }
- })
-}
-
-// pipeCSV copies CSV records from a CSV reader to a CSV writer
-func pipeCSV(csvWriter CSVWriter, csvReader CSVReader, opts csvOpts) error {
- for ; opts.skippedLines > 0; opts.skippedLines-- {
- _, err := csvReader.Read()
- if err != nil {
- if errors.Is(err, io.EOF) {
- return nil
- }
-
- return err
- }
- }
-
- for {
- record, err := csvReader.Read()
- if err != nil {
- if errors.Is(err, io.EOF) {
- break
- }
-
- return err
- }
-
- if err := csvWriter.Write(record); err != nil {
- return err
- }
- }
-
- csvWriter.Flush()
-
- return csvWriter.Error()
-}
-
-// bufferedCSV copies CSV records from a CSV reader to a CSV writer,
-// by first reading all records then writing them at once.
-func bufferedCSV(csvWriter *csv.Writer, csvReader *csv.Reader, opts csvOpts) error {
- for ; opts.skippedLines > 0; opts.skippedLines-- {
- _, err := csvReader.Read()
- if err != nil {
- if errors.Is(err, io.EOF) {
- return nil
- }
-
- return err
- }
- }
-
- records, err := csvReader.ReadAll()
- if err != nil {
- return err
- }
-
- return csvWriter.WriteAll(records)
-}
diff --git a/vendor/github.com/go-openapi/runtime/csv_options.go b/vendor/github.com/go-openapi/runtime/csv_options.go
deleted file mode 100644
index c16464c578..0000000000
--- a/vendor/github.com/go-openapi/runtime/csv_options.go
+++ /dev/null
@@ -1,121 +0,0 @@
-package runtime
-
-import (
- "encoding/csv"
- "io"
-)
-
-// CSVOpts alter the behavior of the CSV consumer or producer.
-type CSVOpt func(*csvOpts)
-
-type csvOpts struct {
- csvReader csv.Reader
- csvWriter csv.Writer
- skippedLines int
- closeStream bool
-}
-
-// WithCSVReaderOpts specifies the options to csv.Reader
-// when reading CSV.
-func WithCSVReaderOpts(reader csv.Reader) CSVOpt {
- return func(o *csvOpts) {
- o.csvReader = reader
- }
-}
-
-// WithCSVWriterOpts specifies the options to csv.Writer
-// when writing CSV.
-func WithCSVWriterOpts(writer csv.Writer) CSVOpt {
- return func(o *csvOpts) {
- o.csvWriter = writer
- }
-}
-
-// WithCSVSkipLines will skip header lines.
-func WithCSVSkipLines(skipped int) CSVOpt {
- return func(o *csvOpts) {
- o.skippedLines = skipped
- }
-}
-
-func WithCSVClosesStream() CSVOpt {
- return func(o *csvOpts) {
- o.closeStream = true
- }
-}
-
-func (o csvOpts) applyToReader(in *csv.Reader) {
- if o.csvReader.Comma != 0 {
- in.Comma = o.csvReader.Comma
- }
- if o.csvReader.Comment != 0 {
- in.Comment = o.csvReader.Comment
- }
- if o.csvReader.FieldsPerRecord != 0 {
- in.FieldsPerRecord = o.csvReader.FieldsPerRecord
- }
-
- in.LazyQuotes = o.csvReader.LazyQuotes
- in.TrimLeadingSpace = o.csvReader.TrimLeadingSpace
- in.ReuseRecord = o.csvReader.ReuseRecord
-}
-
-func (o csvOpts) applyToWriter(in *csv.Writer) {
- if o.csvWriter.Comma != 0 {
- in.Comma = o.csvWriter.Comma
- }
- in.UseCRLF = o.csvWriter.UseCRLF
-}
-
-func csvOptsWithDefaults(opts []CSVOpt) csvOpts {
- var o csvOpts
- for _, apply := range opts {
- apply(&o)
- }
-
- return o
-}
-
-type CSVWriter interface {
- Write([]string) error
- Flush()
- Error() error
-}
-
-type CSVReader interface {
- Read() ([]string, error)
-}
-
-var (
- _ CSVWriter = &csvRecordsWriter{}
- _ CSVReader = &csvRecordsWriter{}
-)
-
-// csvRecordsWriter is an internal container to move CSV records back and forth
-type csvRecordsWriter struct {
- i int
- records [][]string
-}
-
-func (w *csvRecordsWriter) Write(record []string) error {
- w.records = append(w.records, record)
-
- return nil
-}
-
-func (w *csvRecordsWriter) Read() ([]string, error) {
- if w.i >= len(w.records) {
- return nil, io.EOF
- }
- defer func() {
- w.i++
- }()
-
- return w.records[w.i], nil
-}
-
-func (w *csvRecordsWriter) Flush() {}
-
-func (w *csvRecordsWriter) Error() error {
- return nil
-}
diff --git a/vendor/github.com/go-openapi/runtime/discard.go b/vendor/github.com/go-openapi/runtime/discard.go
deleted file mode 100644
index 0d390cfd64..0000000000
--- a/vendor/github.com/go-openapi/runtime/discard.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package runtime
-
-import "io"
-
-// DiscardConsumer does absolutely nothing, it's a black hole.
-var DiscardConsumer = ConsumerFunc(func(_ io.Reader, _ interface{}) error { return nil })
-
-// DiscardProducer does absolutely nothing, it's a black hole.
-var DiscardProducer = ProducerFunc(func(_ io.Writer, _ interface{}) error { return nil })
diff --git a/vendor/github.com/go-openapi/runtime/file.go b/vendor/github.com/go-openapi/runtime/file.go
deleted file mode 100644
index 397d8a4593..0000000000
--- a/vendor/github.com/go-openapi/runtime/file.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import "github.com/go-openapi/swag"
-
-type File = swag.File
diff --git a/vendor/github.com/go-openapi/runtime/headers.go b/vendor/github.com/go-openapi/runtime/headers.go
deleted file mode 100644
index 4d111db4fe..0000000000
--- a/vendor/github.com/go-openapi/runtime/headers.go
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "mime"
- "net/http"
-
- "github.com/go-openapi/errors"
-)
-
-// ContentType parses a content type header
-func ContentType(headers http.Header) (string, string, error) {
- ct := headers.Get(HeaderContentType)
- orig := ct
- if ct == "" {
- ct = DefaultMime
- }
- if ct == "" {
- return "", "", nil
- }
-
- mt, opts, err := mime.ParseMediaType(ct)
- if err != nil {
- return "", "", errors.NewParseError(HeaderContentType, "header", orig, err)
- }
-
- if cs, ok := opts[charsetKey]; ok {
- return mt, cs, nil
- }
-
- return mt, "", nil
-}
diff --git a/vendor/github.com/go-openapi/runtime/interfaces.go b/vendor/github.com/go-openapi/runtime/interfaces.go
deleted file mode 100644
index e334128683..0000000000
--- a/vendor/github.com/go-openapi/runtime/interfaces.go
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "context"
- "io"
- "net/http"
-
- "github.com/go-openapi/strfmt"
-)
-
-// OperationHandlerFunc an adapter for a function to the OperationHandler interface
-type OperationHandlerFunc func(interface{}) (interface{}, error)
-
-// Handle implements the operation handler interface
-func (s OperationHandlerFunc) Handle(data interface{}) (interface{}, error) {
- return s(data)
-}
-
-// OperationHandler a handler for a swagger operation
-type OperationHandler interface {
- Handle(interface{}) (interface{}, error)
-}
-
-// ConsumerFunc represents a function that can be used as a consumer
-type ConsumerFunc func(io.Reader, interface{}) error
-
-// Consume consumes the reader into the data parameter
-func (fn ConsumerFunc) Consume(reader io.Reader, data interface{}) error {
- return fn(reader, data)
-}
-
-// Consumer implementations know how to bind the values on the provided interface to
-// data provided by the request body
-type Consumer interface {
- // Consume performs the binding of request values
- Consume(io.Reader, interface{}) error
-}
-
-// ProducerFunc represents a function that can be used as a producer
-type ProducerFunc func(io.Writer, interface{}) error
-
-// Produce produces the response for the provided data
-func (f ProducerFunc) Produce(writer io.Writer, data interface{}) error {
- return f(writer, data)
-}
-
-// Producer implementations know how to turn the provided interface into a valid
-// HTTP response
-type Producer interface {
- // Produce writes to the http response
- Produce(io.Writer, interface{}) error
-}
-
-// AuthenticatorFunc turns a function into an authenticator
-type AuthenticatorFunc func(interface{}) (bool, interface{}, error)
-
-// Authenticate authenticates the request with the provided data
-func (f AuthenticatorFunc) Authenticate(params interface{}) (bool, interface{}, error) {
- return f(params)
-}
-
-// Authenticator represents an authentication strategy
-// implementations of Authenticator know how to authenticate the
-// request data and translate that into a valid principal object or an error
-type Authenticator interface {
- Authenticate(interface{}) (bool, interface{}, error)
-}
-
-// AuthorizerFunc turns a function into an authorizer
-type AuthorizerFunc func(*http.Request, interface{}) error
-
-// Authorize authorizes the processing of the request for the principal
-func (f AuthorizerFunc) Authorize(r *http.Request, principal interface{}) error {
- return f(r, principal)
-}
-
-// Authorizer represents an authorization strategy
-// implementations of Authorizer know how to authorize the principal object
-// using the request data and returns error if unauthorized
-type Authorizer interface {
- Authorize(*http.Request, interface{}) error
-}
-
-// Validatable types implementing this interface allow customizing their validation
-// this will be used instead of the reflective validation based on the spec document.
-// the implementations are assumed to have been generated by the swagger tool so they should
-// contain all the validations obtained from the spec
-type Validatable interface {
- Validate(strfmt.Registry) error
-}
-
-// ContextValidatable types implementing this interface allow customizing their validation
-// this will be used instead of the reflective validation based on the spec document.
-// the implementations are assumed to have been generated by the swagger tool so they should
-// contain all the context validations obtained from the spec
-type ContextValidatable interface {
- ContextValidate(context.Context, strfmt.Registry) error
-}
diff --git a/vendor/github.com/go-openapi/runtime/json.go b/vendor/github.com/go-openapi/runtime/json.go
deleted file mode 100644
index 5a690559cc..0000000000
--- a/vendor/github.com/go-openapi/runtime/json.go
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "encoding/json"
- "io"
-)
-
-// JSONConsumer creates a new JSON consumer
-func JSONConsumer() Consumer {
- return ConsumerFunc(func(reader io.Reader, data interface{}) error {
- dec := json.NewDecoder(reader)
- dec.UseNumber() // preserve number formats
- return dec.Decode(data)
- })
-}
-
-// JSONProducer creates a new JSON producer
-func JSONProducer() Producer {
- return ProducerFunc(func(writer io.Writer, data interface{}) error {
- enc := json.NewEncoder(writer)
- enc.SetEscapeHTML(false)
- return enc.Encode(data)
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/logger/logger.go b/vendor/github.com/go-openapi/runtime/logger/logger.go
deleted file mode 100644
index 6f4debcc14..0000000000
--- a/vendor/github.com/go-openapi/runtime/logger/logger.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package logger
-
-import "os"
-
-type Logger interface {
- Printf(format string, args ...interface{})
- Debugf(format string, args ...interface{})
-}
-
-func DebugEnabled() bool {
- d := os.Getenv("SWAGGER_DEBUG")
- if d != "" && d != "false" && d != "0" {
- return true
- }
- d = os.Getenv("DEBUG")
- if d != "" && d != "false" && d != "0" {
- return true
- }
- return false
-}
diff --git a/vendor/github.com/go-openapi/runtime/logger/standard.go b/vendor/github.com/go-openapi/runtime/logger/standard.go
deleted file mode 100644
index 30035a7777..0000000000
--- a/vendor/github.com/go-openapi/runtime/logger/standard.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package logger
-
-import (
- "fmt"
- "os"
-)
-
-var _ Logger = StandardLogger{}
-
-type StandardLogger struct{}
-
-func (StandardLogger) Printf(format string, args ...interface{}) {
- if len(format) == 0 || format[len(format)-1] != '\n' {
- format += "\n"
- }
- fmt.Fprintf(os.Stderr, format, args...)
-}
-
-func (StandardLogger) Debugf(format string, args ...interface{}) {
- if len(format) == 0 || format[len(format)-1] != '\n' {
- format += "\n"
- }
- fmt.Fprintf(os.Stderr, format, args...)
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/context.go b/vendor/github.com/go-openapi/runtime/middleware/context.go
deleted file mode 100644
index 44cecf1181..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/context.go
+++ /dev/null
@@ -1,722 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- stdContext "context"
- "fmt"
- "net/http"
- "net/url"
- "path"
- "strings"
- "sync"
-
- "github.com/go-openapi/analysis"
- "github.com/go-openapi/errors"
- "github.com/go-openapi/loads"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/runtime/logger"
- "github.com/go-openapi/runtime/middleware/untyped"
- "github.com/go-openapi/runtime/security"
-)
-
-// Debug when true turns on verbose logging
-var Debug = logger.DebugEnabled()
-
-// Logger is the standard libray logger used for printing debug messages
-var Logger logger.Logger = logger.StandardLogger{}
-
-func debugLogfFunc(lg logger.Logger) func(string, ...any) {
- if logger.DebugEnabled() {
- if lg == nil {
- return Logger.Debugf
- }
-
- return lg.Debugf
- }
-
- // muted logger
- return func(_ string, _ ...any) {}
-}
-
-// A Builder can create middlewares
-type Builder func(http.Handler) http.Handler
-
-// PassthroughBuilder returns the handler, aka the builder identity function
-func PassthroughBuilder(handler http.Handler) http.Handler { return handler }
-
-// RequestBinder is an interface for types to implement
-// when they want to be able to bind from a request
-type RequestBinder interface {
- BindRequest(*http.Request, *MatchedRoute) error
-}
-
-// Responder is an interface for types to implement
-// when they want to be considered for writing HTTP responses
-type Responder interface {
- WriteResponse(http.ResponseWriter, runtime.Producer)
-}
-
-// ResponderFunc wraps a func as a Responder interface
-type ResponderFunc func(http.ResponseWriter, runtime.Producer)
-
-// WriteResponse writes to the response
-func (fn ResponderFunc) WriteResponse(rw http.ResponseWriter, pr runtime.Producer) {
- fn(rw, pr)
-}
-
-// Context is a type safe wrapper around an untyped request context
-// used throughout to store request context with the standard context attached
-// to the http.Request
-type Context struct {
- spec *loads.Document
- analyzer *analysis.Spec
- api RoutableAPI
- router Router
- debugLogf func(string, ...any) // a logging function to debug context and all components using it
-}
-
-type routableUntypedAPI struct {
- api *untyped.API
- hlock *sync.Mutex
- handlers map[string]map[string]http.Handler
- defaultConsumes string
- defaultProduces string
-}
-
-func newRoutableUntypedAPI(spec *loads.Document, api *untyped.API, context *Context) *routableUntypedAPI {
- var handlers map[string]map[string]http.Handler
- if spec == nil || api == nil {
- return nil
- }
- analyzer := analysis.New(spec.Spec())
- for method, hls := range analyzer.Operations() {
- um := strings.ToUpper(method)
- for path, op := range hls {
- schemes := analyzer.SecurityRequirementsFor(op)
-
- if oh, ok := api.OperationHandlerFor(method, path); ok {
- if handlers == nil {
- handlers = make(map[string]map[string]http.Handler)
- }
- if b, ok := handlers[um]; !ok || b == nil {
- handlers[um] = make(map[string]http.Handler)
- }
-
- var handler http.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- // lookup route info in the context
- route, rCtx, _ := context.RouteInfo(r)
- if rCtx != nil {
- r = rCtx
- }
-
- // bind and validate the request using reflection
- var bound interface{}
- var validation error
- bound, r, validation = context.BindAndValidate(r, route)
- if validation != nil {
- context.Respond(w, r, route.Produces, route, validation)
- return
- }
-
- // actually handle the request
- result, err := oh.Handle(bound)
- if err != nil {
- // respond with failure
- context.Respond(w, r, route.Produces, route, err)
- return
- }
-
- // respond with success
- context.Respond(w, r, route.Produces, route, result)
- })
-
- if len(schemes) > 0 {
- handler = newSecureAPI(context, handler)
- }
- handlers[um][path] = handler
- }
- }
- }
-
- return &routableUntypedAPI{
- api: api,
- hlock: new(sync.Mutex),
- handlers: handlers,
- defaultProduces: api.DefaultProduces,
- defaultConsumes: api.DefaultConsumes,
- }
-}
-
-func (r *routableUntypedAPI) HandlerFor(method, path string) (http.Handler, bool) {
- r.hlock.Lock()
- paths, ok := r.handlers[strings.ToUpper(method)]
- if !ok {
- r.hlock.Unlock()
- return nil, false
- }
- handler, ok := paths[path]
- r.hlock.Unlock()
- return handler, ok
-}
-func (r *routableUntypedAPI) ServeErrorFor(_ string) func(http.ResponseWriter, *http.Request, error) {
- return r.api.ServeError
-}
-func (r *routableUntypedAPI) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
- return r.api.ConsumersFor(mediaTypes)
-}
-func (r *routableUntypedAPI) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
- return r.api.ProducersFor(mediaTypes)
-}
-func (r *routableUntypedAPI) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
- return r.api.AuthenticatorsFor(schemes)
-}
-func (r *routableUntypedAPI) Authorizer() runtime.Authorizer {
- return r.api.Authorizer()
-}
-func (r *routableUntypedAPI) Formats() strfmt.Registry {
- return r.api.Formats()
-}
-
-func (r *routableUntypedAPI) DefaultProduces() string {
- return r.defaultProduces
-}
-
-func (r *routableUntypedAPI) DefaultConsumes() string {
- return r.defaultConsumes
-}
-
-// NewRoutableContext creates a new context for a routable API.
-//
-// If a nil Router is provided, the DefaultRouter (denco-based) will be used.
-func NewRoutableContext(spec *loads.Document, routableAPI RoutableAPI, routes Router) *Context {
- var an *analysis.Spec
- if spec != nil {
- an = analysis.New(spec.Spec())
- }
-
- return NewRoutableContextWithAnalyzedSpec(spec, an, routableAPI, routes)
-}
-
-// NewRoutableContextWithAnalyzedSpec is like NewRoutableContext but takes as input an already analysed spec.
-//
-// If a nil Router is provided, the DefaultRouter (denco-based) will be used.
-func NewRoutableContextWithAnalyzedSpec(spec *loads.Document, an *analysis.Spec, routableAPI RoutableAPI, routes Router) *Context {
- // Either there are no spec doc and analysis, or both of them.
- if !((spec == nil && an == nil) || (spec != nil && an != nil)) {
- panic(errors.New(http.StatusInternalServerError, "routable context requires either both spec doc and analysis, or none of them"))
- }
-
- return &Context{
- spec: spec,
- api: routableAPI,
- analyzer: an,
- router: routes,
- debugLogf: debugLogfFunc(nil),
- }
-}
-
-// NewContext creates a new context wrapper.
-//
-// If a nil Router is provided, the DefaultRouter (denco-based) will be used.
-func NewContext(spec *loads.Document, api *untyped.API, routes Router) *Context {
- var an *analysis.Spec
- if spec != nil {
- an = analysis.New(spec.Spec())
- }
- ctx := &Context{
- spec: spec,
- analyzer: an,
- router: routes,
- debugLogf: debugLogfFunc(nil),
- }
- ctx.api = newRoutableUntypedAPI(spec, api, ctx)
-
- return ctx
-}
-
-// Serve serves the specified spec with the specified api registrations as a http.Handler
-func Serve(spec *loads.Document, api *untyped.API) http.Handler {
- return ServeWithBuilder(spec, api, PassthroughBuilder)
-}
-
-// ServeWithBuilder serves the specified spec with the specified api registrations as a http.Handler that is decorated
-// by the Builder
-func ServeWithBuilder(spec *loads.Document, api *untyped.API, builder Builder) http.Handler {
- context := NewContext(spec, api, nil)
- return context.APIHandler(builder)
-}
-
-type contextKey int8
-
-const (
- _ contextKey = iota
- ctxContentType
- ctxResponseFormat
- ctxMatchedRoute
- ctxBoundParams
- ctxSecurityPrincipal
- ctxSecurityScopes
-)
-
-// MatchedRouteFrom request context value.
-func MatchedRouteFrom(req *http.Request) *MatchedRoute {
- mr := req.Context().Value(ctxMatchedRoute)
- if mr == nil {
- return nil
- }
- if res, ok := mr.(*MatchedRoute); ok {
- return res
- }
- return nil
-}
-
-// SecurityPrincipalFrom request context value.
-func SecurityPrincipalFrom(req *http.Request) interface{} {
- return req.Context().Value(ctxSecurityPrincipal)
-}
-
-// SecurityScopesFrom request context value.
-func SecurityScopesFrom(req *http.Request) []string {
- rs := req.Context().Value(ctxSecurityScopes)
- if res, ok := rs.([]string); ok {
- return res
- }
- return nil
-}
-
-type contentTypeValue struct {
- MediaType string
- Charset string
-}
-
-// BasePath returns the base path for this API
-func (c *Context) BasePath() string {
- return c.spec.BasePath()
-}
-
-// SetLogger allows for injecting a logger to catch debug entries.
-//
-// The logger is enabled in DEBUG mode only.
-func (c *Context) SetLogger(lg logger.Logger) {
- c.debugLogf = debugLogfFunc(lg)
-}
-
-// RequiredProduces returns the accepted content types for responses
-func (c *Context) RequiredProduces() []string {
- return c.analyzer.RequiredProduces()
-}
-
-// BindValidRequest binds a params object to a request but only when the request is valid
-// if the request is not valid an error will be returned
-func (c *Context) BindValidRequest(request *http.Request, route *MatchedRoute, binder RequestBinder) error {
- var res []error
- var requestContentType string
-
- // check and validate content type, select consumer
- if runtime.HasBody(request) {
- ct, _, err := runtime.ContentType(request.Header)
- if err != nil {
- res = append(res, err)
- } else {
- c.debugLogf("validating content type for %q against [%s]", ct, strings.Join(route.Consumes, ", "))
- if err := validateContentType(route.Consumes, ct); err != nil {
- res = append(res, err)
- }
- if len(res) == 0 {
- cons, ok := route.Consumers[ct]
- if !ok {
- res = append(res, errors.New(500, "no consumer registered for %s", ct))
- } else {
- route.Consumer = cons
- requestContentType = ct
- }
- }
- }
- }
-
- // check and validate the response format
- if len(res) == 0 {
- // if the route does not provide Produces and a default contentType could not be identified
- // based on a body, typical for GET and DELETE requests, then default contentType to.
- if len(route.Produces) == 0 && requestContentType == "" {
- requestContentType = "*/*"
- }
-
- if str := NegotiateContentType(request, route.Produces, requestContentType); str == "" {
- res = append(res, errors.InvalidResponseFormat(request.Header.Get(runtime.HeaderAccept), route.Produces))
- }
- }
-
- // now bind the request with the provided binder
- // it's assumed the binder will also validate the request and return an error if the
- // request is invalid
- if binder != nil && len(res) == 0 {
- if err := binder.BindRequest(request, route); err != nil {
- return err
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// ContentType gets the parsed value of a content type
-// Returns the media type, its charset and a shallow copy of the request
-// when its context doesn't contain the content type value, otherwise it returns
-// the same request
-// Returns the error that runtime.ContentType may retunrs.
-func (c *Context) ContentType(request *http.Request) (string, string, *http.Request, error) {
- var rCtx = request.Context()
-
- if v, ok := rCtx.Value(ctxContentType).(*contentTypeValue); ok {
- return v.MediaType, v.Charset, request, nil
- }
-
- mt, cs, err := runtime.ContentType(request.Header)
- if err != nil {
- return "", "", nil, err
- }
- rCtx = stdContext.WithValue(rCtx, ctxContentType, &contentTypeValue{mt, cs})
- return mt, cs, request.WithContext(rCtx), nil
-}
-
-// LookupRoute looks a route up and returns true when it is found
-func (c *Context) LookupRoute(request *http.Request) (*MatchedRoute, bool) {
- if route, ok := c.router.Lookup(request.Method, request.URL.EscapedPath()); ok {
- return route, ok
- }
- return nil, false
-}
-
-// RouteInfo tries to match a route for this request
-// Returns the matched route, a shallow copy of the request if its context
-// contains the matched router, otherwise the same request, and a bool to
-// indicate if it the request matches one of the routes, if it doesn't
-// then it returns false and nil for the other two return values
-func (c *Context) RouteInfo(request *http.Request) (*MatchedRoute, *http.Request, bool) {
- var rCtx = request.Context()
-
- if v, ok := rCtx.Value(ctxMatchedRoute).(*MatchedRoute); ok {
- return v, request, ok
- }
-
- if route, ok := c.LookupRoute(request); ok {
- rCtx = stdContext.WithValue(rCtx, ctxMatchedRoute, route)
- return route, request.WithContext(rCtx), ok
- }
-
- return nil, nil, false
-}
-
-// ResponseFormat negotiates the response content type
-// Returns the response format and a shallow copy of the request if its context
-// doesn't contain the response format, otherwise the same request
-func (c *Context) ResponseFormat(r *http.Request, offers []string) (string, *http.Request) {
- var rCtx = r.Context()
-
- if v, ok := rCtx.Value(ctxResponseFormat).(string); ok {
- c.debugLogf("[%s %s] found response format %q in context", r.Method, r.URL.Path, v)
- return v, r
- }
-
- format := NegotiateContentType(r, offers, "")
- if format != "" {
- c.debugLogf("[%s %s] set response format %q in context", r.Method, r.URL.Path, format)
- r = r.WithContext(stdContext.WithValue(rCtx, ctxResponseFormat, format))
- }
- c.debugLogf("[%s %s] negotiated response format %q", r.Method, r.URL.Path, format)
- return format, r
-}
-
-// AllowedMethods gets the allowed methods for the path of this request
-func (c *Context) AllowedMethods(request *http.Request) []string {
- return c.router.OtherMethods(request.Method, request.URL.EscapedPath())
-}
-
-// ResetAuth removes the current principal from the request context
-func (c *Context) ResetAuth(request *http.Request) *http.Request {
- rctx := request.Context()
- rctx = stdContext.WithValue(rctx, ctxSecurityPrincipal, nil)
- rctx = stdContext.WithValue(rctx, ctxSecurityScopes, nil)
- return request.WithContext(rctx)
-}
-
-// Authorize authorizes the request
-// Returns the principal object and a shallow copy of the request when its
-// context doesn't contain the principal, otherwise the same request or an error
-// (the last) if one of the authenticators returns one or an Unauthenticated error
-func (c *Context) Authorize(request *http.Request, route *MatchedRoute) (interface{}, *http.Request, error) {
- if route == nil || !route.HasAuth() {
- return nil, nil, nil
- }
-
- var rCtx = request.Context()
- if v := rCtx.Value(ctxSecurityPrincipal); v != nil {
- return v, request, nil
- }
-
- applies, usr, err := route.Authenticators.Authenticate(request, route)
- if !applies || err != nil || !route.Authenticators.AllowsAnonymous() && usr == nil {
- if err != nil {
- return nil, nil, err
- }
- return nil, nil, errors.Unauthenticated("invalid credentials")
- }
- if route.Authorizer != nil {
- if err := route.Authorizer.Authorize(request, usr); err != nil {
- if _, ok := err.(errors.Error); ok {
- return nil, nil, err
- }
-
- return nil, nil, errors.New(http.StatusForbidden, err.Error())
- }
- }
-
- rCtx = request.Context()
-
- rCtx = stdContext.WithValue(rCtx, ctxSecurityPrincipal, usr)
- rCtx = stdContext.WithValue(rCtx, ctxSecurityScopes, route.Authenticator.AllScopes())
- return usr, request.WithContext(rCtx), nil
-}
-
-// BindAndValidate binds and validates the request
-// Returns the validation map and a shallow copy of the request when its context
-// doesn't contain the validation, otherwise it returns the same request or an
-// CompositeValidationError error
-func (c *Context) BindAndValidate(request *http.Request, matched *MatchedRoute) (interface{}, *http.Request, error) {
- var rCtx = request.Context()
-
- if v, ok := rCtx.Value(ctxBoundParams).(*validation); ok {
- c.debugLogf("got cached validation (valid: %t)", len(v.result) == 0)
- if len(v.result) > 0 {
- return v.bound, request, errors.CompositeValidationError(v.result...)
- }
- return v.bound, request, nil
- }
- result := validateRequest(c, request, matched)
- rCtx = stdContext.WithValue(rCtx, ctxBoundParams, result)
- request = request.WithContext(rCtx)
- if len(result.result) > 0 {
- return result.bound, request, errors.CompositeValidationError(result.result...)
- }
- c.debugLogf("no validation errors found")
- return result.bound, request, nil
-}
-
-// NotFound the default not found responder for when no route has been matched yet
-func (c *Context) NotFound(rw http.ResponseWriter, r *http.Request) {
- c.Respond(rw, r, []string{c.api.DefaultProduces()}, nil, errors.NotFound("not found"))
-}
-
-// Respond renders the response after doing some content negotiation
-func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []string, route *MatchedRoute, data interface{}) {
- c.debugLogf("responding to %s %s with produces: %v", r.Method, r.URL.Path, produces)
- offers := []string{}
- for _, mt := range produces {
- if mt != c.api.DefaultProduces() {
- offers = append(offers, mt)
- }
- }
- // the default producer is last so more specific producers take precedence
- offers = append(offers, c.api.DefaultProduces())
- c.debugLogf("offers: %v", offers)
-
- var format string
- format, r = c.ResponseFormat(r, offers)
- rw.Header().Set(runtime.HeaderContentType, format)
-
- if resp, ok := data.(Responder); ok {
- producers := route.Producers
- // producers contains keys with normalized format, if a format has MIME type parameter such as `text/plain; charset=utf-8`
- // then you must provide `text/plain` to get the correct producer. HOWEVER, format here is not normalized.
- prod, ok := producers[normalizeOffer(format)]
- if !ok {
- prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()}))
- pr, ok := prods[c.api.DefaultProduces()]
- if !ok {
- panic(errors.New(http.StatusInternalServerError, cantFindProducer(format)))
- }
- prod = pr
- }
- resp.WriteResponse(rw, prod)
- return
- }
-
- if err, ok := data.(error); ok {
- if format == "" {
- rw.Header().Set(runtime.HeaderContentType, runtime.JSONMime)
- }
-
- if realm := security.FailedBasicAuth(r); realm != "" {
- rw.Header().Set("WWW-Authenticate", fmt.Sprintf("Basic realm=%q", realm))
- }
-
- if route == nil || route.Operation == nil {
- c.api.ServeErrorFor("")(rw, r, err)
- return
- }
- c.api.ServeErrorFor(route.Operation.ID)(rw, r, err)
- return
- }
-
- if route == nil || route.Operation == nil {
- rw.WriteHeader(http.StatusOK)
- if r.Method == http.MethodHead {
- return
- }
- producers := c.api.ProducersFor(normalizeOffers(offers))
- prod, ok := producers[format]
- if !ok {
- panic(errors.New(http.StatusInternalServerError, cantFindProducer(format)))
- }
- if err := prod.Produce(rw, data); err != nil {
- panic(err) // let the recovery middleware deal with this
- }
- return
- }
-
- if _, code, ok := route.Operation.SuccessResponse(); ok {
- rw.WriteHeader(code)
- if code == http.StatusNoContent || r.Method == http.MethodHead {
- return
- }
-
- producers := route.Producers
- prod, ok := producers[format]
- if !ok {
- if !ok {
- prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()}))
- pr, ok := prods[c.api.DefaultProduces()]
- if !ok {
- panic(errors.New(http.StatusInternalServerError, cantFindProducer(format)))
- }
- prod = pr
- }
- }
- if err := prod.Produce(rw, data); err != nil {
- panic(err) // let the recovery middleware deal with this
- }
- return
- }
-
- c.api.ServeErrorFor(route.Operation.ID)(rw, r, errors.New(http.StatusInternalServerError, "can't produce response"))
-}
-
-// APIHandlerSwaggerUI returns a handler to serve the API.
-//
-// This handler includes a swagger spec, router and the contract defined in the swagger spec.
-//
-// A spec UI (SwaggerUI) is served at {API base path}/docs and the spec document at /swagger.json
-// (these can be modified with uiOptions).
-func (c *Context) APIHandlerSwaggerUI(builder Builder, opts ...UIOption) http.Handler {
- b := builder
- if b == nil {
- b = PassthroughBuilder
- }
-
- specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts)
- var swaggerUIOpts SwaggerUIOpts
- fromCommonToAnyOptions(uiOpts, &swaggerUIOpts)
-
- return Spec(specPath, c.spec.Raw(), SwaggerUI(swaggerUIOpts, c.RoutesHandler(b)), specOpts...)
-}
-
-// APIHandlerRapiDoc returns a handler to serve the API.
-//
-// This handler includes a swagger spec, router and the contract defined in the swagger spec.
-//
-// A spec UI (RapiDoc) is served at {API base path}/docs and the spec document at /swagger.json
-// (these can be modified with uiOptions).
-func (c *Context) APIHandlerRapiDoc(builder Builder, opts ...UIOption) http.Handler {
- b := builder
- if b == nil {
- b = PassthroughBuilder
- }
-
- specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts)
- var rapidocUIOpts RapiDocOpts
- fromCommonToAnyOptions(uiOpts, &rapidocUIOpts)
-
- return Spec(specPath, c.spec.Raw(), RapiDoc(rapidocUIOpts, c.RoutesHandler(b)), specOpts...)
-}
-
-// APIHandler returns a handler to serve the API.
-//
-// This handler includes a swagger spec, router and the contract defined in the swagger spec.
-//
-// A spec UI (Redoc) is served at {API base path}/docs and the spec document at /swagger.json
-// (these can be modified with uiOptions).
-func (c *Context) APIHandler(builder Builder, opts ...UIOption) http.Handler {
- b := builder
- if b == nil {
- b = PassthroughBuilder
- }
-
- specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts)
- var redocOpts RedocOpts
- fromCommonToAnyOptions(uiOpts, &redocOpts)
-
- return Spec(specPath, c.spec.Raw(), Redoc(redocOpts, c.RoutesHandler(b)), specOpts...)
-}
-
-func (c Context) uiOptionsForHandler(opts []UIOption) (string, uiOptions, []SpecOption) {
- var title string
- sp := c.spec.Spec()
- if sp != nil && sp.Info != nil && sp.Info.Title != "" {
- title = sp.Info.Title
- }
-
- // default options (may be overridden)
- optsForContext := []UIOption{
- WithUIBasePath(c.BasePath()),
- WithUITitle(title),
- }
- optsForContext = append(optsForContext, opts...)
- uiOpts := uiOptionsWithDefaults(optsForContext)
-
- // If spec URL is provided, there is a non-default path to serve the spec.
- // This makes sure that the UI middleware is aligned with the Spec middleware.
- u, _ := url.Parse(uiOpts.SpecURL)
- var specPath string
- if u != nil {
- specPath = u.Path
- }
-
- pth, doc := path.Split(specPath)
- if pth == "." {
- pth = ""
- }
-
- return pth, uiOpts, []SpecOption{WithSpecDocument(doc)}
-}
-
-// RoutesHandler returns a handler to serve the API, just the routes and the contract defined in the swagger spec
-func (c *Context) RoutesHandler(builder Builder) http.Handler {
- b := builder
- if b == nil {
- b = PassthroughBuilder
- }
- return NewRouter(c, b(NewOperationExecutor(c)))
-}
-
-func cantFindProducer(format string) string {
- return "can't find a producer for " + format
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE b/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE
deleted file mode 100644
index e65039ad84..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2014 Naoya Inada
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/README.md b/vendor/github.com/go-openapi/runtime/middleware/denco/README.md
deleted file mode 100644
index 30109e17d5..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/denco/README.md
+++ /dev/null
@@ -1,180 +0,0 @@
-# Denco [](https://travis-ci.org/naoina/denco)
-
-The fast and flexible HTTP request router for [Go](http://golang.org).
-
-Denco is based on Double-Array implementation of [Kocha-urlrouter](https://github.com/naoina/kocha-urlrouter).
-However, Denco is optimized and some features added.
-
-## Features
-
-* Fast (See [go-http-routing-benchmark](https://github.com/naoina/go-http-routing-benchmark))
-* [URL patterns](#url-patterns) (`/foo/:bar` and `/foo/*wildcard`)
-* Small (but enough) URL router API
-* HTTP request multiplexer like `http.ServeMux`
-
-## Installation
-
- go get -u github.com/go-openapi/runtime/middleware/denco
-
-## Using as HTTP request multiplexer
-
-```go
-package main
-
-import (
- "fmt"
- "log"
- "net/http"
-
- "github.com/go-openapi/runtime/middleware/denco"
-)
-
-func Index(w http.ResponseWriter, r *http.Request, params denco.Params) {
- fmt.Fprintf(w, "Welcome to Denco!\n")
-}
-
-func User(w http.ResponseWriter, r *http.Request, params denco.Params) {
- fmt.Fprintf(w, "Hello %s!\n", params.Get("name"))
-}
-
-func main() {
- mux := denco.NewMux()
- handler, err := mux.Build([]denco.Handler{
- mux.GET("/", Index),
- mux.GET("/user/:name", User),
- mux.POST("/user/:name", User),
- })
- if err != nil {
- panic(err)
- }
- log.Fatal(http.ListenAndServe(":8080", handler))
-}
-```
-
-## Using as URL router
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/go-openapi/runtime/middleware/denco"
-)
-
-type route struct {
- name string
-}
-
-func main() {
- router := denco.New()
- router.Build([]denco.Record{
- {"/", &route{"root"}},
- {"/user/:id", &route{"user"}},
- {"/user/:name/:id", &route{"username"}},
- {"/static/*filepath", &route{"static"}},
- })
-
- data, params, found := router.Lookup("/")
- // print `&main.route{name:"root"}, denco.Params(nil), true`.
- fmt.Printf("%#v, %#v, %#v\n", data, params, found)
-
- data, params, found = router.Lookup("/user/hoge")
- // print `&main.route{name:"user"}, denco.Params{denco.Param{Name:"id", Value:"hoge"}}, true`.
- fmt.Printf("%#v, %#v, %#v\n", data, params, found)
-
- data, params, found = router.Lookup("/user/hoge/7")
- // print `&main.route{name:"username"}, denco.Params{denco.Param{Name:"name", Value:"hoge"}, denco.Param{Name:"id", Value:"7"}}, true`.
- fmt.Printf("%#v, %#v, %#v\n", data, params, found)
-
- data, params, found = router.Lookup("/static/path/to/file")
- // print `&main.route{name:"static"}, denco.Params{denco.Param{Name:"filepath", Value:"path/to/file"}}, true`.
- fmt.Printf("%#v, %#v, %#v\n", data, params, found)
-}
-```
-
-See [Godoc](http://godoc.org/github.com/go-openapi/runtime/middleware/denco) for more details.
-
-## Getting the value of path parameter
-
-You can get the value of path parameter by 2 ways.
-
-1. Using [`denco.Params.Get`](http://godoc.org/github.com/go-openapi/runtime/middleware/denco#Params.Get) method
-2. Find by loop
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/go-openapi/runtime/middleware/denco"
-)
-
-func main() {
- router := denco.New()
- if err := router.Build([]denco.Record{
- {"/user/:name/:id", "route1"},
- }); err != nil {
- panic(err)
- }
-
- // 1. Using denco.Params.Get method.
- _, params, _ := router.Lookup("/user/alice/1")
- name := params.Get("name")
- if name != "" {
- fmt.Printf("Hello %s.\n", name) // prints "Hello alice.".
- }
-
- // 2. Find by loop.
- for _, param := range params {
- if param.Name == "name" {
- fmt.Printf("Hello %s.\n", name) // prints "Hello alice.".
- }
- }
-}
-```
-
-## URL patterns
-
-Denco's route matching strategy is "most nearly matching".
-
-When routes `/:name` and `/alice` have been built, URI `/alice` matches the route `/alice`, not `/:name`.
-Because URI `/alice` is more match with the route `/alice` than `/:name`.
-
-For more example, when routes below have been built:
-
-```
-/user/alice
-/user/:name
-/user/:name/:id
-/user/alice/:id
-/user/:id/bob
-```
-
-Routes matching are:
-
-```
-/user/alice => "/user/alice" (no match with "/user/:name")
-/user/bob => "/user/:name"
-/user/naoina/1 => "/user/:name/1"
-/user/alice/1 => "/user/alice/:id" (no match with "/user/:name/:id")
-/user/1/bob => "/user/:id/bob" (no match with "/user/:name/:id")
-/user/alice/bob => "/user/alice/:id" (no match with "/user/:name/:id" and "/user/:id/bob")
-```
-
-## Limitation
-
-Denco has some limitations below.
-
-* Number of param records (such as `/:name`) must be less than 2^22
-* Number of elements of internal slice must be less than 2^22
-
-## Benchmarks
-
- cd $GOPATH/github.com/go-openapi/runtime/middleware/denco
- go test -bench . -benchmem
-
-## License
-
-Denco is licensed under the MIT License.
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/router.go b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go
deleted file mode 100644
index 4377f77a46..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/denco/router.go
+++ /dev/null
@@ -1,467 +0,0 @@
-// Package denco provides fast URL router.
-package denco
-
-import (
- "errors"
- "fmt"
- "sort"
- "strings"
-)
-
-const (
- // ParamCharacter is a special character for path parameter.
- ParamCharacter = ':'
-
- // WildcardCharacter is a special character for wildcard path parameter.
- WildcardCharacter = '*'
-
- // TerminationCharacter is a special character for end of path.
- TerminationCharacter = '#'
-
- // SeparatorCharacter separates path segments.
- SeparatorCharacter = '/'
-
- // PathParamCharacter indicates a RESTCONF path param
- PathParamCharacter = '='
-
- // MaxSize is max size of records and internal slice.
- MaxSize = (1 << 22) - 1
-)
-
-// Router represents a URL router.
-type Router struct {
- param *doubleArray
- // SizeHint expects the maximum number of path parameters in records to Build.
- // SizeHint will be used to determine the capacity of the memory to allocate.
- // By default, SizeHint will be determined from given records to Build.
- SizeHint int
-
- static map[string]interface{}
-}
-
-// New returns a new Router.
-func New() *Router {
- return &Router{
- SizeHint: -1,
- static: make(map[string]interface{}),
- param: newDoubleArray(),
- }
-}
-
-// Lookup returns data and path parameters that associated with path.
-// params is a slice of the Param that arranged in the order in which parameters appeared.
-// e.g. when built routing path is "/path/to/:id/:name" and given path is "/path/to/1/alice". params order is [{"id": "1"}, {"name": "alice"}], not [{"name": "alice"}, {"id": "1"}].
-func (rt *Router) Lookup(path string) (data interface{}, params Params, found bool) {
- if data, found = rt.static[path]; found {
- return data, nil, true
- }
- if len(rt.param.node) == 1 {
- return nil, nil, false
- }
- nd, params, found := rt.param.lookup(path, make([]Param, 0, rt.SizeHint), 1)
- if !found {
- return nil, nil, false
- }
- for i := 0; i < len(params); i++ {
- params[i].Name = nd.paramNames[i]
- }
- return nd.data, params, true
-}
-
-// Build builds URL router from records.
-func (rt *Router) Build(records []Record) error {
- statics, params := makeRecords(records)
- if len(params) > MaxSize {
- return errors.New("denco: too many records")
- }
- if rt.SizeHint < 0 {
- rt.SizeHint = 0
- for _, p := range params {
- size := 0
- for _, k := range p.Key {
- if k == ParamCharacter || k == WildcardCharacter {
- size++
- }
- }
- if size > rt.SizeHint {
- rt.SizeHint = size
- }
- }
- }
- for _, r := range statics {
- rt.static[r.Key] = r.Value
- }
- if err := rt.param.build(params, 1, 0, make(map[int]struct{})); err != nil {
- return err
- }
- return nil
-}
-
-// Param represents name and value of path parameter.
-type Param struct {
- Name string
- Value string
-}
-
-// Params represents the name and value of path parameters.
-type Params []Param
-
-// Get gets the first value associated with the given name.
-// If there are no values associated with the key, Get returns "".
-func (ps Params) Get(name string) string {
- for _, p := range ps {
- if p.Name == name {
- return p.Value
- }
- }
- return ""
-}
-
-type doubleArray struct {
- bc []baseCheck
- node []*node
-}
-
-func newDoubleArray() *doubleArray {
- return &doubleArray{
- bc: []baseCheck{0},
- node: []*node{nil}, // A start index is adjusting to 1 because 0 will be used as a mark of non-existent node.
- }
-}
-
-// baseCheck contains BASE, CHECK and Extra flags.
-// From the top, 22bits of BASE, 2bits of Extra flags and 8bits of CHECK.
-//
-// BASE (22bit) | Extra flags (2bit) | CHECK (8bit)
-//
-// |----------------------|--|--------|
-// 32 10 8 0
-type baseCheck uint32
-
-func (bc baseCheck) Base() int {
- return int(bc >> 10)
-}
-
-func (bc *baseCheck) SetBase(base int) {
- *bc |= baseCheck(base) << 10
-}
-
-func (bc baseCheck) Check() byte {
- return byte(bc)
-}
-
-func (bc *baseCheck) SetCheck(check byte) {
- *bc |= baseCheck(check)
-}
-
-func (bc baseCheck) IsEmpty() bool {
- return bc&0xfffffcff == 0
-}
-
-func (bc baseCheck) IsSingleParam() bool {
- return bc¶mTypeSingle == paramTypeSingle
-}
-
-func (bc baseCheck) IsWildcardParam() bool {
- return bc¶mTypeWildcard == paramTypeWildcard
-}
-
-func (bc baseCheck) IsAnyParam() bool {
- return bc¶mTypeAny != 0
-}
-
-func (bc *baseCheck) SetSingleParam() {
- *bc |= (1 << 8)
-}
-
-func (bc *baseCheck) SetWildcardParam() {
- *bc |= (1 << 9)
-}
-
-const (
- paramTypeSingle = 0x0100
- paramTypeWildcard = 0x0200
- paramTypeAny = 0x0300
-)
-
-func (da *doubleArray) lookup(path string, params []Param, idx int) (*node, []Param, bool) {
- indices := make([]uint64, 0, 1)
- for i := 0; i < len(path); i++ {
- if da.bc[idx].IsAnyParam() {
- indices = append(indices, (uint64(i)<<32)|(uint64(idx)&0xffffffff))
- }
- c := path[i]
- if idx = nextIndex(da.bc[idx].Base(), c); idx >= len(da.bc) || da.bc[idx].Check() != c {
- goto BACKTRACKING
- }
- }
- if next := nextIndex(da.bc[idx].Base(), TerminationCharacter); next < len(da.bc) && da.bc[next].Check() == TerminationCharacter {
- return da.node[da.bc[next].Base()], params, true
- }
-
-BACKTRACKING:
- for j := len(indices) - 1; j >= 0; j-- {
- i, idx := int(indices[j]>>32), int(indices[j]&0xffffffff)
- if da.bc[idx].IsSingleParam() {
- nextIdx := nextIndex(da.bc[idx].Base(), ParamCharacter)
- if nextIdx >= len(da.bc) {
- break
- }
-
- next := NextSeparator(path, i)
- nextParams := params
- nextParams = append(nextParams, Param{Value: path[i:next]})
- if nd, nextNextParams, found := da.lookup(path[next:], nextParams, nextIdx); found {
- return nd, nextNextParams, true
- }
- }
-
- if da.bc[idx].IsWildcardParam() {
- nextIdx := nextIndex(da.bc[idx].Base(), WildcardCharacter)
- nextParams := params
- nextParams = append(nextParams, Param{Value: path[i:]})
- return da.node[da.bc[nextIdx].Base()], nextParams, true
- }
- }
- return nil, nil, false
-}
-
-// build builds double-array from records.
-func (da *doubleArray) build(srcs []*record, idx, depth int, usedBase map[int]struct{}) error {
- sort.Stable(recordSlice(srcs))
- base, siblings, leaf, err := da.arrange(srcs, idx, depth, usedBase)
- if err != nil {
- return err
- }
- if leaf != nil {
- nd, err := makeNode(leaf)
- if err != nil {
- return err
- }
- da.bc[idx].SetBase(len(da.node))
- da.node = append(da.node, nd)
- }
- for _, sib := range siblings {
- da.setCheck(nextIndex(base, sib.c), sib.c)
- }
- for _, sib := range siblings {
- records := srcs[sib.start:sib.end]
- switch sib.c {
- case ParamCharacter:
- for _, r := range records {
- next := NextSeparator(r.Key, depth+1)
- name := r.Key[depth+1 : next]
- r.paramNames = append(r.paramNames, name)
- r.Key = r.Key[next:]
- }
- da.bc[idx].SetSingleParam()
- if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil {
- return err
- }
- case WildcardCharacter:
- r := records[0]
- name := r.Key[depth+1 : len(r.Key)-1]
- r.paramNames = append(r.paramNames, name)
- r.Key = ""
- da.bc[idx].SetWildcardParam()
- if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil {
- return err
- }
- default:
- if err := da.build(records, nextIndex(base, sib.c), depth+1, usedBase); err != nil {
- return err
- }
- }
- }
- return nil
-}
-
-// setBase sets BASE.
-func (da *doubleArray) setBase(i, base int) {
- da.bc[i].SetBase(base)
-}
-
-// setCheck sets CHECK.
-func (da *doubleArray) setCheck(i int, check byte) {
- da.bc[i].SetCheck(check)
-}
-
-// findEmptyIndex returns an index of unused BASE/CHECK node.
-func (da *doubleArray) findEmptyIndex(start int) int {
- i := start
- for ; i < len(da.bc); i++ {
- if da.bc[i].IsEmpty() {
- break
- }
- }
- return i
-}
-
-// findBase returns good BASE.
-func (da *doubleArray) findBase(siblings []sibling, start int, usedBase map[int]struct{}) (base int) {
- for idx, firstChar := start+1, siblings[0].c; ; idx = da.findEmptyIndex(idx + 1) {
- base = nextIndex(idx, firstChar)
- if _, used := usedBase[base]; used {
- continue
- }
- i := 0
- for ; i < len(siblings); i++ {
- next := nextIndex(base, siblings[i].c)
- if len(da.bc) <= next {
- da.bc = append(da.bc, make([]baseCheck, next-len(da.bc)+1)...)
- }
- if !da.bc[next].IsEmpty() {
- break
- }
- }
- if i == len(siblings) {
- break
- }
- }
- usedBase[base] = struct{}{}
- return base
-}
-
-func (da *doubleArray) arrange(records []*record, idx, depth int, usedBase map[int]struct{}) (base int, siblings []sibling, leaf *record, err error) {
- siblings, leaf, err = makeSiblings(records, depth)
- if err != nil {
- return -1, nil, nil, err
- }
- if len(siblings) < 1 {
- return -1, nil, leaf, nil
- }
- base = da.findBase(siblings, idx, usedBase)
- if base > MaxSize {
- return -1, nil, nil, errors.New("denco: too many elements of internal slice")
- }
- da.setBase(idx, base)
- return base, siblings, leaf, err
-}
-
-// node represents a node of Double-Array.
-type node struct {
- data interface{}
-
- // Names of path parameters.
- paramNames []string
-}
-
-// makeNode returns a new node from record.
-func makeNode(r *record) (*node, error) {
- dups := make(map[string]bool)
- for _, name := range r.paramNames {
- if dups[name] {
- return nil, fmt.Errorf("denco: path parameter `%v' is duplicated in the key `%v'", name, r.Key)
- }
- dups[name] = true
- }
- return &node{data: r.Value, paramNames: r.paramNames}, nil
-}
-
-// sibling represents an intermediate data of build for Double-Array.
-type sibling struct {
- // An index of start of duplicated characters.
- start int
-
- // An index of end of duplicated characters.
- end int
-
- // A character of sibling.
- c byte
-}
-
-// nextIndex returns a next index of array of BASE/CHECK.
-func nextIndex(base int, c byte) int {
- return base ^ int(c)
-}
-
-// makeSiblings returns slice of sibling.
-func makeSiblings(records []*record, depth int) (sib []sibling, leaf *record, err error) {
- var (
- pc byte
- n int
- )
- for i, r := range records {
- if len(r.Key) <= depth {
- leaf = r
- continue
- }
- c := r.Key[depth]
- switch {
- case pc < c:
- sib = append(sib, sibling{start: i, c: c})
- case pc == c:
- continue
- default:
- return nil, nil, errors.New("denco: BUG: routing table hasn't been sorted")
- }
- if n > 0 {
- sib[n-1].end = i
- }
- pc = c
- n++
- }
- if n == 0 {
- return nil, leaf, nil
- }
- sib[n-1].end = len(records)
- return sib, leaf, nil
-}
-
-// Record represents a record data for router construction.
-type Record struct {
- // Key for router construction.
- Key string
-
- // Result value for Key.
- Value interface{}
-}
-
-// NewRecord returns a new Record.
-func NewRecord(key string, value interface{}) Record {
- return Record{
- Key: key,
- Value: value,
- }
-}
-
-// record represents a record that use to build the Double-Array.
-type record struct {
- Record
- paramNames []string
-}
-
-// makeRecords returns the records that use to build Double-Arrays.
-func makeRecords(srcs []Record) (statics, params []*record) {
- termChar := string(TerminationCharacter)
- paramPrefix := string(SeparatorCharacter) + string(ParamCharacter)
- wildcardPrefix := string(SeparatorCharacter) + string(WildcardCharacter)
- restconfPrefix := string(PathParamCharacter) + string(ParamCharacter)
- for _, r := range srcs {
- if strings.Contains(r.Key, paramPrefix) || strings.Contains(r.Key, wildcardPrefix) || strings.Contains(r.Key, restconfPrefix) {
- r.Key += termChar
- params = append(params, &record{Record: r})
- } else {
- statics = append(statics, &record{Record: r})
- }
- }
- return statics, params
-}
-
-// recordSlice represents a slice of Record for sort and implements the sort.Interface.
-type recordSlice []*record
-
-// Len implements the sort.Interface.Len.
-func (rs recordSlice) Len() int {
- return len(rs)
-}
-
-// Less implements the sort.Interface.Less.
-func (rs recordSlice) Less(i, j int) bool {
- return rs[i].Key < rs[j].Key
-}
-
-// Swap implements the sort.Interface.Swap.
-func (rs recordSlice) Swap(i, j int) {
- rs[i], rs[j] = rs[j], rs[i]
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/server.go b/vendor/github.com/go-openapi/runtime/middleware/denco/server.go
deleted file mode 100644
index 0886713c18..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/denco/server.go
+++ /dev/null
@@ -1,106 +0,0 @@
-package denco
-
-import (
- "net/http"
-)
-
-// Mux represents a multiplexer for HTTP request.
-type Mux struct{}
-
-// NewMux returns a new Mux.
-func NewMux() *Mux {
- return &Mux{}
-}
-
-// GET is shorthand of Mux.Handler("GET", path, handler).
-func (m *Mux) GET(path string, handler HandlerFunc) Handler {
- return m.Handler("GET", path, handler)
-}
-
-// POST is shorthand of Mux.Handler("POST", path, handler).
-func (m *Mux) POST(path string, handler HandlerFunc) Handler {
- return m.Handler("POST", path, handler)
-}
-
-// PUT is shorthand of Mux.Handler("PUT", path, handler).
-func (m *Mux) PUT(path string, handler HandlerFunc) Handler {
- return m.Handler("PUT", path, handler)
-}
-
-// HEAD is shorthand of Mux.Handler("HEAD", path, handler).
-func (m *Mux) HEAD(path string, handler HandlerFunc) Handler {
- return m.Handler("HEAD", path, handler)
-}
-
-// Handler returns a handler for HTTP method.
-func (m *Mux) Handler(method, path string, handler HandlerFunc) Handler {
- return Handler{
- Method: method,
- Path: path,
- Func: handler,
- }
-}
-
-// Build builds a http.Handler.
-func (m *Mux) Build(handlers []Handler) (http.Handler, error) {
- recordMap := make(map[string][]Record)
- for _, h := range handlers {
- recordMap[h.Method] = append(recordMap[h.Method], NewRecord(h.Path, h.Func))
- }
- mux := newServeMux()
- for m, records := range recordMap {
- router := New()
- if err := router.Build(records); err != nil {
- return nil, err
- }
- mux.routers[m] = router
- }
- return mux, nil
-}
-
-// Handler represents a handler of HTTP request.
-type Handler struct {
- // Method is an HTTP method.
- Method string
-
- // Path is a routing path for handler.
- Path string
-
- // Func is a function of handler of HTTP request.
- Func HandlerFunc
-}
-
-// The HandlerFunc type is aliased to type of handler function.
-type HandlerFunc func(w http.ResponseWriter, r *http.Request, params Params)
-
-type serveMux struct {
- routers map[string]*Router
-}
-
-func newServeMux() *serveMux {
- return &serveMux{
- routers: make(map[string]*Router),
- }
-}
-
-// ServeHTTP implements http.Handler interface.
-func (mux *serveMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
- handler, params := mux.handler(r.Method, r.URL.Path)
- handler(w, r, params)
-}
-
-func (mux *serveMux) handler(method, path string) (HandlerFunc, []Param) {
- if router, found := mux.routers[method]; found {
- if handler, params, found := router.Lookup(path); found {
- return handler.(HandlerFunc), params
- }
- }
- return NotFound, nil
-}
-
-// NotFound replies to the request with an HTTP 404 not found error.
-// NotFound is called when unknown HTTP method or a handler not found.
-// If you want to use the your own NotFound handler, please overwrite this variable.
-var NotFound = func(w http.ResponseWriter, r *http.Request, _ Params) {
- http.NotFound(w, r)
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/util.go b/vendor/github.com/go-openapi/runtime/middleware/denco/util.go
deleted file mode 100644
index edc1f6ab80..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/denco/util.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package denco
-
-// NextSeparator returns an index of next separator in path.
-func NextSeparator(path string, start int) int {
- for start < len(path) {
- if c := path[start]; c == '/' || c == TerminationCharacter {
- break
- }
- start++
- }
- return start
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/doc.go b/vendor/github.com/go-openapi/runtime/middleware/doc.go
deleted file mode 100644
index 836a98850d..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/doc.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/*
-Package middleware provides the library with helper functions for serving swagger APIs.
-
-Pseudo middleware handler
-
- import (
- "net/http"
-
- "github.com/go-openapi/errors"
- )
-
- func newCompleteMiddleware(ctx *Context) http.Handler {
- return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- // use context to lookup routes
- if matched, ok := ctx.RouteInfo(r); ok {
-
- if matched.NeedsAuth() {
- if _, err := ctx.Authorize(r, matched); err != nil {
- ctx.Respond(rw, r, matched.Produces, matched, err)
- return
- }
- }
-
- bound, validation := ctx.BindAndValidate(r, matched)
- if validation != nil {
- ctx.Respond(rw, r, matched.Produces, matched, validation)
- return
- }
-
- result, err := matched.Handler.Handle(bound)
- if err != nil {
- ctx.Respond(rw, r, matched.Produces, matched, err)
- return
- }
-
- ctx.Respond(rw, r, matched.Produces, matched, result)
- return
- }
-
- // Not found, check if it exists in the other methods first
- if others := ctx.AllowedMethods(r); len(others) > 0 {
- ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others))
- return
- }
- ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.Path))
- })
- }
-*/
-package middleware
diff --git a/vendor/github.com/go-openapi/runtime/middleware/header/header.go b/vendor/github.com/go-openapi/runtime/middleware/header/header.go
deleted file mode 100644
index df073c87d9..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/header/header.go
+++ /dev/null
@@ -1,332 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-//
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file or at
-// https://developers.google.com/open-source/licenses/bsd.
-
-// this file was taken from the github.com/golang/gddo repository
-
-// Package header provides functions for parsing HTTP headers.
-package header
-
-import (
- "net/http"
- "strings"
- "time"
-)
-
-// Octet types from RFC 2616.
-var octetTypes [256]octetType
-
-type octetType byte
-
-const (
- isToken octetType = 1 << iota
- isSpace
-)
-
-func init() {
- // OCTET =
- // CHAR =
- // CTL =
- // CR =
- // LF =
- // SP =
- // HT =
- // <"> =
- // CRLF = CR LF
- // LWS = [CRLF] 1*( SP | HT )
- // TEXT =
- // separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <">
- // | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT
- // token = 1*
- // qdtext = >
-
- for c := 0; c < 256; c++ {
- var t octetType
- isCtl := c <= 31 || c == 127
- isChar := 0 <= c && c <= 127
- isSeparator := strings.ContainsRune(" \t\"(),/:;<=>?@[]\\{}", rune(c))
- if strings.ContainsRune(" \t\r\n", rune(c)) {
- t |= isSpace
- }
- if isChar && !isCtl && !isSeparator {
- t |= isToken
- }
- octetTypes[c] = t
- }
-}
-
-// Copy returns a shallow copy of the header.
-func Copy(header http.Header) http.Header {
- h := make(http.Header)
- for k, vs := range header {
- h[k] = vs
- }
- return h
-}
-
-var timeLayouts = []string{"Mon, 02 Jan 2006 15:04:05 GMT", time.RFC850, time.ANSIC}
-
-// ParseTime parses the header as time. The zero value is returned if the
-// header is not present or there is an error parsing the
-// header.
-func ParseTime(header http.Header, key string) time.Time {
- if s := header.Get(key); s != "" {
- for _, layout := range timeLayouts {
- if t, err := time.Parse(layout, s); err == nil {
- return t.UTC()
- }
- }
- }
- return time.Time{}
-}
-
-// ParseList parses a comma separated list of values. Commas are ignored in
-// quoted strings. Quoted values are not unescaped or unquoted. Whitespace is
-// trimmed.
-func ParseList(header http.Header, key string) []string {
- var result []string
- for _, s := range header[http.CanonicalHeaderKey(key)] {
- begin := 0
- end := 0
- escape := false
- quote := false
- for i := 0; i < len(s); i++ {
- b := s[i]
- switch {
- case escape:
- escape = false
- end = i + 1
- case quote:
- switch b {
- case '\\':
- escape = true
- case '"':
- quote = false
- }
- end = i + 1
- case b == '"':
- quote = true
- end = i + 1
- case octetTypes[b]&isSpace != 0:
- if begin == end {
- begin = i + 1
- end = begin
- }
- case b == ',':
- if begin < end {
- result = append(result, s[begin:end])
- }
- begin = i + 1
- end = begin
- default:
- end = i + 1
- }
- }
- if begin < end {
- result = append(result, s[begin:end])
- }
- }
- return result
-}
-
-// ParseValueAndParams parses a comma separated list of values with optional
-// semicolon separated name-value pairs. Content-Type and Content-Disposition
-// headers are in this format.
-func ParseValueAndParams(header http.Header, key string) (string, map[string]string) {
- return parseValueAndParams(header.Get(key))
-}
-
-func parseValueAndParams(s string) (value string, params map[string]string) {
- params = make(map[string]string)
- value, s = expectTokenSlash(s)
- if value == "" {
- return
- }
- value = strings.ToLower(value)
- s = skipSpace(s)
- for strings.HasPrefix(s, ";") {
- var pkey string
- pkey, s = expectToken(skipSpace(s[1:]))
- if pkey == "" {
- return
- }
- if !strings.HasPrefix(s, "=") {
- return
- }
- var pvalue string
- pvalue, s = expectTokenOrQuoted(s[1:])
- if pvalue == "" {
- return
- }
- pkey = strings.ToLower(pkey)
- params[pkey] = pvalue
- s = skipSpace(s)
- }
- return
-}
-
-// AcceptSpec ...
-type AcceptSpec struct {
- Value string
- Q float64
-}
-
-// ParseAccept2 ...
-func ParseAccept2(header http.Header, key string) (specs []AcceptSpec) {
- for _, en := range ParseList(header, key) {
- v, p := parseValueAndParams(en)
- var spec AcceptSpec
- spec.Value = v
- spec.Q = 1.0
- if p != nil {
- if q, ok := p["q"]; ok {
- spec.Q, _ = expectQuality(q)
- }
- }
- if spec.Q < 0.0 {
- continue
- }
- specs = append(specs, spec)
- }
-
- return
-}
-
-// ParseAccept parses Accept* headers.
-func ParseAccept(header http.Header, key string) []AcceptSpec {
- var specs []AcceptSpec
-loop:
- for _, s := range header[key] {
- for {
- var spec AcceptSpec
- spec.Value, s = expectTokenSlash(s)
- if spec.Value == "" {
- continue loop
- }
- spec.Q = 1.0
- s = skipSpace(s)
- if strings.HasPrefix(s, ";") {
- s = skipSpace(s[1:])
- for !strings.HasPrefix(s, "q=") && s != "" && !strings.HasPrefix(s, ",") {
- s = skipSpace(s[1:])
- }
- if strings.HasPrefix(s, "q=") {
- spec.Q, s = expectQuality(s[2:])
- if spec.Q < 0.0 {
- continue loop
- }
- }
- }
-
- specs = append(specs, spec)
- s = skipSpace(s)
- if !strings.HasPrefix(s, ",") {
- continue loop
- }
- s = skipSpace(s[1:])
- }
- }
-
- return specs
-}
-
-func skipSpace(s string) (rest string) {
- i := 0
- for ; i < len(s); i++ {
- if octetTypes[s[i]]&isSpace == 0 {
- break
- }
- }
- return s[i:]
-}
-
-func expectToken(s string) (token, rest string) {
- i := 0
- for ; i < len(s); i++ {
- if octetTypes[s[i]]&isToken == 0 {
- break
- }
- }
- return s[:i], s[i:]
-}
-
-func expectTokenSlash(s string) (token, rest string) {
- i := 0
- for ; i < len(s); i++ {
- b := s[i]
- if (octetTypes[b]&isToken == 0) && b != '/' {
- break
- }
- }
- return s[:i], s[i:]
-}
-
-func expectQuality(s string) (q float64, rest string) {
- switch {
- case len(s) == 0:
- return -1, ""
- case s[0] == '0':
- // q is already 0
- s = s[1:]
- case s[0] == '1':
- s = s[1:]
- q = 1
- case s[0] == '.':
- // q is already 0
- default:
- return -1, ""
- }
- if !strings.HasPrefix(s, ".") {
- return q, s
- }
- s = s[1:]
- i := 0
- n := 0
- d := 1
- for ; i < len(s); i++ {
- b := s[i]
- if b < '0' || b > '9' {
- break
- }
- n = n*10 + int(b) - '0'
- d *= 10
- }
- return q + float64(n)/float64(d), s[i:]
-}
-
-func expectTokenOrQuoted(s string) (value string, rest string) {
- if !strings.HasPrefix(s, "\"") {
- return expectToken(s)
- }
- s = s[1:]
- for i := 0; i < len(s); i++ {
- switch s[i] {
- case '"':
- return s[:i], s[i+1:]
- case '\\':
- p := make([]byte, len(s)-1)
- j := copy(p, s[:i])
- escape := true
- for i++; i < len(s); i++ {
- b := s[i]
- switch {
- case escape:
- escape = false
- p[j] = b
- j++
- case b == '\\':
- escape = true
- case b == '"':
- return string(p[:j]), s[i+1:]
- default:
- p[j] = b
- j++
- }
- }
- return "", ""
- }
- }
- return "", ""
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/negotiate.go b/vendor/github.com/go-openapi/runtime/middleware/negotiate.go
deleted file mode 100644
index a9b6f27d3d..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/negotiate.go
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-//
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file or at
-// https://developers.google.com/open-source/licenses/bsd.
-
-// this file was taken from the github.com/golang/gddo repository
-
-package middleware
-
-import (
- "net/http"
- "strings"
-
- "github.com/go-openapi/runtime/middleware/header"
-)
-
-// NegotiateContentEncoding returns the best offered content encoding for the
-// request's Accept-Encoding header. If two offers match with equal weight and
-// then the offer earlier in the list is preferred. If no offers are
-// acceptable, then "" is returned.
-func NegotiateContentEncoding(r *http.Request, offers []string) string {
- bestOffer := "identity"
- bestQ := -1.0
- specs := header.ParseAccept(r.Header, "Accept-Encoding")
- for _, offer := range offers {
- for _, spec := range specs {
- if spec.Q > bestQ &&
- (spec.Value == "*" || spec.Value == offer) {
- bestQ = spec.Q
- bestOffer = offer
- }
- }
- }
- if bestQ == 0 {
- bestOffer = ""
- }
- return bestOffer
-}
-
-// NegotiateContentType returns the best offered content type for the request's
-// Accept header. If two offers match with equal weight, then the more specific
-// offer is preferred. For example, text/* trumps */*. If two offers match
-// with equal weight and specificity, then the offer earlier in the list is
-// preferred. If no offers match, then defaultOffer is returned.
-func NegotiateContentType(r *http.Request, offers []string, defaultOffer string) string {
- bestOffer := defaultOffer
- bestQ := -1.0
- bestWild := 3
- specs := header.ParseAccept(r.Header, "Accept")
- for _, rawOffer := range offers {
- offer := normalizeOffer(rawOffer)
- // No Accept header: just return the first offer.
- if len(specs) == 0 {
- return rawOffer
- }
- for _, spec := range specs {
- switch {
- case spec.Q == 0.0:
- // ignore
- case spec.Q < bestQ:
- // better match found
- case spec.Value == "*/*":
- if spec.Q > bestQ || bestWild > 2 {
- bestQ = spec.Q
- bestWild = 2
- bestOffer = rawOffer
- }
- case strings.HasSuffix(spec.Value, "/*"):
- if strings.HasPrefix(offer, spec.Value[:len(spec.Value)-1]) &&
- (spec.Q > bestQ || bestWild > 1) {
- bestQ = spec.Q
- bestWild = 1
- bestOffer = rawOffer
- }
- default:
- if spec.Value == offer &&
- (spec.Q > bestQ || bestWild > 0) {
- bestQ = spec.Q
- bestWild = 0
- bestOffer = rawOffer
- }
- }
- }
- }
- return bestOffer
-}
-
-func normalizeOffers(orig []string) (norm []string) {
- for _, o := range orig {
- norm = append(norm, normalizeOffer(o))
- }
- return
-}
-
-func normalizeOffer(orig string) string {
- return strings.SplitN(orig, ";", 2)[0]
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go b/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go
deleted file mode 100644
index bc6942a0f1..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- "net/http"
-
- "github.com/go-openapi/runtime"
-)
-
-type errorResp struct {
- code int
- response interface{}
- headers http.Header
-}
-
-func (e *errorResp) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
- for k, v := range e.headers {
- for _, val := range v {
- rw.Header().Add(k, val)
- }
- }
- if e.code > 0 {
- rw.WriteHeader(e.code)
- } else {
- rw.WriteHeader(http.StatusInternalServerError)
- }
- if err := producer.Produce(rw, e.response); err != nil {
- Logger.Printf("failed to write error response: %v", err)
- }
-}
-
-// NotImplemented the error response when the response is not implemented
-func NotImplemented(message string) Responder {
- return Error(http.StatusNotImplemented, message)
-}
-
-// Error creates a generic responder for returning errors, the data will be serialized
-// with the matching producer for the request
-func Error(code int, data interface{}, headers ...http.Header) Responder {
- var hdr http.Header
- for _, h := range headers {
- for k, v := range h {
- if hdr == nil {
- hdr = make(http.Header)
- }
- hdr[k] = v
- }
- }
- return &errorResp{
- code: code,
- response: data,
- headers: hdr,
- }
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/operation.go b/vendor/github.com/go-openapi/runtime/middleware/operation.go
deleted file mode 100644
index 1175a63cf2..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/operation.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import "net/http"
-
-// NewOperationExecutor creates a context aware middleware that handles the operations after routing
-func NewOperationExecutor(ctx *Context) http.Handler {
- return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- // use context to lookup routes
- route, rCtx, _ := ctx.RouteInfo(r)
- if rCtx != nil {
- r = rCtx
- }
-
- route.Handler.ServeHTTP(rw, r)
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/parameter.go b/vendor/github.com/go-openapi/runtime/middleware/parameter.go
deleted file mode 100644
index 9c3353a95c..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/parameter.go
+++ /dev/null
@@ -1,491 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- "encoding"
- "encoding/base64"
- "fmt"
- "io"
- "net/http"
- "reflect"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-
- "github.com/go-openapi/runtime"
-)
-
-const defaultMaxMemory = 32 << 20
-
-const (
- typeString = "string"
- typeArray = "array"
-)
-
-var textUnmarshalType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
-
-func newUntypedParamBinder(param spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *untypedParamBinder {
- binder := new(untypedParamBinder)
- binder.Name = param.Name
- binder.parameter = ¶m
- binder.formats = formats
- if param.In != "body" {
- binder.validator = validate.NewParamValidator(¶m, formats)
- } else {
- binder.validator = validate.NewSchemaValidator(param.Schema, spec, param.Name, formats)
- }
-
- return binder
-}
-
-type untypedParamBinder struct {
- parameter *spec.Parameter
- formats strfmt.Registry
- Name string
- validator validate.EntityValidator
-}
-
-func (p *untypedParamBinder) Type() reflect.Type {
- return p.typeForSchema(p.parameter.Type, p.parameter.Format, p.parameter.Items)
-}
-
-func (p *untypedParamBinder) typeForSchema(tpe, format string, items *spec.Items) reflect.Type {
- switch tpe {
- case "boolean":
- return reflect.TypeOf(true)
-
- case typeString:
- if tt, ok := p.formats.GetType(format); ok {
- return tt
- }
- return reflect.TypeOf("")
-
- case "integer":
- switch format {
- case "int8":
- return reflect.TypeOf(int8(0))
- case "int16":
- return reflect.TypeOf(int16(0))
- case "int32":
- return reflect.TypeOf(int32(0))
- case "int64":
- return reflect.TypeOf(int64(0))
- default:
- return reflect.TypeOf(int64(0))
- }
-
- case "number":
- switch format {
- case "float":
- return reflect.TypeOf(float32(0))
- case "double":
- return reflect.TypeOf(float64(0))
- }
-
- case typeArray:
- if items == nil {
- return nil
- }
- itemsType := p.typeForSchema(items.Type, items.Format, items.Items)
- if itemsType == nil {
- return nil
- }
- return reflect.MakeSlice(reflect.SliceOf(itemsType), 0, 0).Type()
-
- case "file":
- return reflect.TypeOf(&runtime.File{}).Elem()
-
- case "object":
- return reflect.TypeOf(map[string]interface{}{})
- }
- return nil
-}
-
-func (p *untypedParamBinder) allowsMulti() bool {
- return p.parameter.In == "query" || p.parameter.In == "formData"
-}
-
-func (p *untypedParamBinder) readValue(values runtime.Gettable, target reflect.Value) ([]string, bool, bool, error) {
- name, in, cf, tpe := p.parameter.Name, p.parameter.In, p.parameter.CollectionFormat, p.parameter.Type
- if tpe == typeArray {
- if cf == "multi" {
- if !p.allowsMulti() {
- return nil, false, false, errors.InvalidCollectionFormat(name, in, cf)
- }
- vv, hasKey, _ := values.GetOK(name)
- return vv, false, hasKey, nil
- }
-
- v, hk, hv := values.GetOK(name)
- if !hv {
- return nil, false, hk, nil
- }
- d, c, e := p.readFormattedSliceFieldValue(v[len(v)-1], target)
- return d, c, hk, e
- }
-
- vv, hk, _ := values.GetOK(name)
- return vv, false, hk, nil
-}
-
-func (p *untypedParamBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, target reflect.Value) error {
- // fmt.Println("binding", p.name, "as", p.Type())
- switch p.parameter.In {
- case "query":
- data, custom, hasKey, err := p.readValue(runtime.Values(request.URL.Query()), target)
- if err != nil {
- return err
- }
- if custom {
- return nil
- }
-
- return p.bindValue(data, hasKey, target)
-
- case "header":
- data, custom, hasKey, err := p.readValue(runtime.Values(request.Header), target)
- if err != nil {
- return err
- }
- if custom {
- return nil
- }
- return p.bindValue(data, hasKey, target)
-
- case "path":
- data, custom, hasKey, err := p.readValue(routeParams, target)
- if err != nil {
- return err
- }
- if custom {
- return nil
- }
- return p.bindValue(data, hasKey, target)
-
- case "formData":
- var err error
- var mt string
-
- mt, _, e := runtime.ContentType(request.Header)
- if e != nil {
- // because of the interface conversion go thinks the error is not nil
- // so we first check for nil and then set the err var if it's not nil
- err = e
- }
-
- if err != nil {
- return errors.InvalidContentType("", []string{"multipart/form-data", "application/x-www-form-urlencoded"})
- }
-
- if mt != "multipart/form-data" && mt != "application/x-www-form-urlencoded" {
- return errors.InvalidContentType(mt, []string{"multipart/form-data", "application/x-www-form-urlencoded"})
- }
-
- if mt == "multipart/form-data" {
- if err = request.ParseMultipartForm(defaultMaxMemory); err != nil {
- return errors.NewParseError(p.Name, p.parameter.In, "", err)
- }
- }
-
- if err = request.ParseForm(); err != nil {
- return errors.NewParseError(p.Name, p.parameter.In, "", err)
- }
-
- if p.parameter.Type == "file" {
- file, header, ffErr := request.FormFile(p.parameter.Name)
- if ffErr != nil {
- if p.parameter.Required {
- return errors.NewParseError(p.Name, p.parameter.In, "", ffErr)
- }
-
- return nil
- }
-
- target.Set(reflect.ValueOf(runtime.File{Data: file, Header: header}))
- return nil
- }
-
- if request.MultipartForm != nil {
- data, custom, hasKey, rvErr := p.readValue(runtime.Values(request.MultipartForm.Value), target)
- if rvErr != nil {
- return rvErr
- }
- if custom {
- return nil
- }
- return p.bindValue(data, hasKey, target)
- }
- data, custom, hasKey, err := p.readValue(runtime.Values(request.PostForm), target)
- if err != nil {
- return err
- }
- if custom {
- return nil
- }
- return p.bindValue(data, hasKey, target)
-
- case "body":
- newValue := reflect.New(target.Type())
- if !runtime.HasBody(request) {
- if p.parameter.Default != nil {
- target.Set(reflect.ValueOf(p.parameter.Default))
- }
-
- return nil
- }
- if err := consumer.Consume(request.Body, newValue.Interface()); err != nil {
- if err == io.EOF && p.parameter.Default != nil {
- target.Set(reflect.ValueOf(p.parameter.Default))
- return nil
- }
- tpe := p.parameter.Type
- if p.parameter.Format != "" {
- tpe = p.parameter.Format
- }
- return errors.InvalidType(p.Name, p.parameter.In, tpe, nil)
- }
- target.Set(reflect.Indirect(newValue))
- return nil
- default:
- return errors.New(500, fmt.Sprintf("invalid parameter location %q", p.parameter.In))
- }
-}
-
-func (p *untypedParamBinder) bindValue(data []string, hasKey bool, target reflect.Value) error {
- if p.parameter.Type == typeArray {
- return p.setSliceFieldValue(target, p.parameter.Default, data, hasKey)
- }
- var d string
- if len(data) > 0 {
- d = data[len(data)-1]
- }
- return p.setFieldValue(target, p.parameter.Default, d, hasKey)
-}
-
-func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue interface{}, data string, hasKey bool) error { //nolint:gocyclo
- tpe := p.parameter.Type
- if p.parameter.Format != "" {
- tpe = p.parameter.Format
- }
-
- if (!hasKey || (!p.parameter.AllowEmptyValue && data == "")) && p.parameter.Required && p.parameter.Default == nil {
- return errors.Required(p.Name, p.parameter.In, data)
- }
-
- ok, err := p.tryUnmarshaler(target, defaultValue, data)
- if err != nil {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if ok {
- return nil
- }
-
- defVal := reflect.Zero(target.Type())
- if defaultValue != nil {
- defVal = reflect.ValueOf(defaultValue)
- }
-
- if tpe == "byte" {
- if data == "" {
- if target.CanSet() {
- target.SetBytes(defVal.Bytes())
- }
- return nil
- }
-
- b, err := base64.StdEncoding.DecodeString(data)
- if err != nil {
- b, err = base64.URLEncoding.DecodeString(data)
- if err != nil {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- }
- if target.CanSet() {
- target.SetBytes(b)
- }
- return nil
- }
-
- switch target.Kind() { //nolint:exhaustive // we want to check only types that map from a swagger parameter
- case reflect.Bool:
- if data == "" {
- if target.CanSet() {
- target.SetBool(defVal.Bool())
- }
- return nil
- }
- b, err := swag.ConvertBool(data)
- if err != nil {
- return err
- }
- if target.CanSet() {
- target.SetBool(b)
- }
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- if data == "" {
- if target.CanSet() {
- rd := defVal.Convert(reflect.TypeOf(int64(0)))
- target.SetInt(rd.Int())
- }
- return nil
- }
- i, err := strconv.ParseInt(data, 10, 64)
- if err != nil {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if target.OverflowInt(i) {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if target.CanSet() {
- target.SetInt(i)
- }
-
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- if data == "" {
- if target.CanSet() {
- rd := defVal.Convert(reflect.TypeOf(uint64(0)))
- target.SetUint(rd.Uint())
- }
- return nil
- }
- u, err := strconv.ParseUint(data, 10, 64)
- if err != nil {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if target.OverflowUint(u) {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if target.CanSet() {
- target.SetUint(u)
- }
-
- case reflect.Float32, reflect.Float64:
- if data == "" {
- if target.CanSet() {
- rd := defVal.Convert(reflect.TypeOf(float64(0)))
- target.SetFloat(rd.Float())
- }
- return nil
- }
- f, err := strconv.ParseFloat(data, 64)
- if err != nil {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if target.OverflowFloat(f) {
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- if target.CanSet() {
- target.SetFloat(f)
- }
-
- case reflect.String:
- value := data
- if value == "" {
- value = defVal.String()
- }
- // validate string
- if target.CanSet() {
- target.SetString(value)
- }
-
- case reflect.Ptr:
- if data == "" && defVal.Kind() == reflect.Ptr {
- if target.CanSet() {
- target.Set(defVal)
- }
- return nil
- }
- newVal := reflect.New(target.Type().Elem())
- if err := p.setFieldValue(reflect.Indirect(newVal), defVal, data, hasKey); err != nil {
- return err
- }
- if target.CanSet() {
- target.Set(newVal)
- }
-
- default:
- return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
- }
- return nil
-}
-
-func (p *untypedParamBinder) tryUnmarshaler(target reflect.Value, defaultValue interface{}, data string) (bool, error) {
- if !target.CanSet() {
- return false, nil
- }
- // When a type implements encoding.TextUnmarshaler we'll use that instead of reflecting some more
- if reflect.PtrTo(target.Type()).Implements(textUnmarshalType) {
- if defaultValue != nil && len(data) == 0 {
- target.Set(reflect.ValueOf(defaultValue))
- return true, nil
- }
- value := reflect.New(target.Type())
- if err := value.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(data)); err != nil {
- return true, err
- }
- target.Set(reflect.Indirect(value))
- return true, nil
- }
- return false, nil
-}
-
-func (p *untypedParamBinder) readFormattedSliceFieldValue(data string, target reflect.Value) ([]string, bool, error) {
- ok, err := p.tryUnmarshaler(target, p.parameter.Default, data)
- if err != nil {
- return nil, true, err
- }
- if ok {
- return nil, true, nil
- }
-
- return swag.SplitByFormat(data, p.parameter.CollectionFormat), false, nil
-}
-
-func (p *untypedParamBinder) setSliceFieldValue(target reflect.Value, defaultValue interface{}, data []string, hasKey bool) error {
- sz := len(data)
- if (!hasKey || (!p.parameter.AllowEmptyValue && (sz == 0 || (sz == 1 && data[0] == "")))) && p.parameter.Required && defaultValue == nil {
- return errors.Required(p.Name, p.parameter.In, data)
- }
-
- defVal := reflect.Zero(target.Type())
- if defaultValue != nil {
- defVal = reflect.ValueOf(defaultValue)
- }
-
- if !target.CanSet() {
- return nil
- }
- if sz == 0 {
- target.Set(defVal)
- return nil
- }
-
- value := reflect.MakeSlice(reflect.SliceOf(target.Type().Elem()), sz, sz)
-
- for i := 0; i < sz; i++ {
- if err := p.setFieldValue(value.Index(i), nil, data[i], hasKey); err != nil {
- return err
- }
- }
-
- target.Set(value)
-
- return nil
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go
deleted file mode 100644
index ef75e7441f..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package middleware
-
-import (
- "bytes"
- "fmt"
- "html/template"
- "net/http"
- "path"
-)
-
-// RapiDocOpts configures the RapiDoc middlewares
-type RapiDocOpts struct {
- // BasePath for the UI, defaults to: /
- BasePath string
-
- // Path combines with BasePath to construct the path to the UI, defaults to: "docs".
- Path string
-
- // SpecURL is the URL of the spec document.
- //
- // Defaults to: /swagger.json
- SpecURL string
-
- // Title for the documentation site, default to: API documentation
- Title string
-
- // Template specifies a custom template to serve the UI
- Template string
-
- // RapiDocURL points to the js asset that generates the rapidoc site.
- //
- // Defaults to https://unpkg.com/rapidoc/dist/rapidoc-min.js
- RapiDocURL string
-}
-
-func (r *RapiDocOpts) EnsureDefaults() {
- common := toCommonUIOptions(r)
- common.EnsureDefaults()
- fromCommonToAnyOptions(common, r)
-
- // rapidoc-specifics
- if r.RapiDocURL == "" {
- r.RapiDocURL = rapidocLatest
- }
- if r.Template == "" {
- r.Template = rapidocTemplate
- }
-}
-
-// RapiDoc creates a middleware to serve a documentation site for a swagger spec.
-//
-// This allows for altering the spec before starting the http listener.
-func RapiDoc(opts RapiDocOpts, next http.Handler) http.Handler {
- opts.EnsureDefaults()
-
- pth := path.Join(opts.BasePath, opts.Path)
- tmpl := template.Must(template.New("rapidoc").Parse(opts.Template))
- assets := bytes.NewBuffer(nil)
- if err := tmpl.Execute(assets, opts); err != nil {
- panic(fmt.Errorf("cannot execute template: %w", err))
- }
-
- return serveUI(pth, assets.Bytes(), next)
-}
-
-const (
- rapidocLatest = "https://unpkg.com/rapidoc/dist/rapidoc-min.js"
- rapidocTemplate = `
-
-
- {{ .Title }}
-
-
-
-
-
-
-
-`
-)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/redoc.go b/vendor/github.com/go-openapi/runtime/middleware/redoc.go
deleted file mode 100644
index b96b01e7f3..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/redoc.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package middleware
-
-import (
- "bytes"
- "fmt"
- "html/template"
- "net/http"
- "path"
-)
-
-// RedocOpts configures the Redoc middlewares
-type RedocOpts struct {
- // BasePath for the UI, defaults to: /
- BasePath string
-
- // Path combines with BasePath to construct the path to the UI, defaults to: "docs".
- Path string
-
- // SpecURL is the URL of the spec document.
- //
- // Defaults to: /swagger.json
- SpecURL string
-
- // Title for the documentation site, default to: API documentation
- Title string
-
- // Template specifies a custom template to serve the UI
- Template string
-
- // RedocURL points to the js that generates the redoc site.
- //
- // Defaults to: https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js
- RedocURL string
-}
-
-// EnsureDefaults in case some options are missing
-func (r *RedocOpts) EnsureDefaults() {
- common := toCommonUIOptions(r)
- common.EnsureDefaults()
- fromCommonToAnyOptions(common, r)
-
- // redoc-specifics
- if r.RedocURL == "" {
- r.RedocURL = redocLatest
- }
- if r.Template == "" {
- r.Template = redocTemplate
- }
-}
-
-// Redoc creates a middleware to serve a documentation site for a swagger spec.
-//
-// This allows for altering the spec before starting the http listener.
-func Redoc(opts RedocOpts, next http.Handler) http.Handler {
- opts.EnsureDefaults()
-
- pth := path.Join(opts.BasePath, opts.Path)
- tmpl := template.Must(template.New("redoc").Parse(opts.Template))
- assets := bytes.NewBuffer(nil)
- if err := tmpl.Execute(assets, opts); err != nil {
- panic(fmt.Errorf("cannot execute template: %w", err))
- }
-
- return serveUI(pth, assets.Bytes(), next)
-}
-
-const (
- redocLatest = "https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js"
- redocTemplate = `
-
-
- {{ .Title }}
-
-
-
-
-
-
-
-
-
-
-
-
-
-`
-)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/request.go b/vendor/github.com/go-openapi/runtime/middleware/request.go
deleted file mode 100644
index 82e1436652..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/request.go
+++ /dev/null
@@ -1,117 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- "net/http"
- "reflect"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/runtime/logger"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-// UntypedRequestBinder binds and validates the data from a http request
-type UntypedRequestBinder struct {
- Spec *spec.Swagger
- Parameters map[string]spec.Parameter
- Formats strfmt.Registry
- paramBinders map[string]*untypedParamBinder
- debugLogf func(string, ...any) // a logging function to debug context and all components using it
-}
-
-// NewUntypedRequestBinder creates a new binder for reading a request.
-func NewUntypedRequestBinder(parameters map[string]spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *UntypedRequestBinder {
- binders := make(map[string]*untypedParamBinder)
- for fieldName, param := range parameters {
- binders[fieldName] = newUntypedParamBinder(param, spec, formats)
- }
- return &UntypedRequestBinder{
- Parameters: parameters,
- paramBinders: binders,
- Spec: spec,
- Formats: formats,
- debugLogf: debugLogfFunc(nil),
- }
-}
-
-// Bind perform the databinding and validation
-func (o *UntypedRequestBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, data interface{}) error {
- val := reflect.Indirect(reflect.ValueOf(data))
- isMap := val.Kind() == reflect.Map
- var result []error
- o.debugLogf("binding %d parameters for %s %s", len(o.Parameters), request.Method, request.URL.EscapedPath())
- for fieldName, param := range o.Parameters {
- binder := o.paramBinders[fieldName]
- o.debugLogf("binding parameter %s for %s %s", fieldName, request.Method, request.URL.EscapedPath())
- var target reflect.Value
- if !isMap {
- binder.Name = fieldName
- target = val.FieldByName(fieldName)
- }
-
- if isMap {
- tpe := binder.Type()
- if tpe == nil {
- if param.Schema.Type.Contains(typeArray) {
- tpe = reflect.TypeOf([]interface{}{})
- } else {
- tpe = reflect.TypeOf(map[string]interface{}{})
- }
- }
- target = reflect.Indirect(reflect.New(tpe))
- }
-
- if !target.IsValid() {
- result = append(result, errors.New(500, "parameter name %q is an unknown field", binder.Name))
- continue
- }
-
- if err := binder.Bind(request, routeParams, consumer, target); err != nil {
- result = append(result, err)
- continue
- }
-
- if binder.validator != nil {
- rr := binder.validator.Validate(target.Interface())
- if rr != nil && rr.HasErrors() {
- result = append(result, rr.AsError())
- }
- }
-
- if isMap {
- val.SetMapIndex(reflect.ValueOf(param.Name), target)
- }
- }
-
- if len(result) > 0 {
- return errors.CompositeValidationError(result...)
- }
-
- return nil
-}
-
-// SetLogger allows for injecting a logger to catch debug entries.
-//
-// The logger is enabled in DEBUG mode only.
-func (o *UntypedRequestBinder) SetLogger(lg logger.Logger) {
- o.debugLogf = debugLogfFunc(lg)
-}
-
-func (o *UntypedRequestBinder) setDebugLogf(fn func(string, ...any)) {
- o.debugLogf = fn
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/router.go b/vendor/github.com/go-openapi/runtime/middleware/router.go
deleted file mode 100644
index 3a6aee90e5..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/router.go
+++ /dev/null
@@ -1,531 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- "fmt"
- "net/http"
- "net/url"
- fpath "path"
- "regexp"
- "strings"
-
- "github.com/go-openapi/runtime/logger"
- "github.com/go-openapi/runtime/security"
- "github.com/go-openapi/swag"
-
- "github.com/go-openapi/analysis"
- "github.com/go-openapi/errors"
- "github.com/go-openapi/loads"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/runtime/middleware/denco"
-)
-
-// RouteParam is a object to capture route params in a framework agnostic way.
-// implementations of the muxer should use these route params to communicate with the
-// swagger framework
-type RouteParam struct {
- Name string
- Value string
-}
-
-// RouteParams the collection of route params
-type RouteParams []RouteParam
-
-// Get gets the value for the route param for the specified key
-func (r RouteParams) Get(name string) string {
- vv, _, _ := r.GetOK(name)
- if len(vv) > 0 {
- return vv[len(vv)-1]
- }
- return ""
-}
-
-// GetOK gets the value but also returns booleans to indicate if a key or value
-// is present. This aids in validation and satisfies an interface in use there
-//
-// The returned values are: data, has key, has value
-func (r RouteParams) GetOK(name string) ([]string, bool, bool) {
- for _, p := range r {
- if p.Name == name {
- return []string{p.Value}, true, p.Value != ""
- }
- }
- return nil, false, false
-}
-
-// NewRouter creates a new context-aware router middleware
-func NewRouter(ctx *Context, next http.Handler) http.Handler {
- if ctx.router == nil {
- ctx.router = DefaultRouter(ctx.spec, ctx.api, WithDefaultRouterLoggerFunc(ctx.debugLogf))
- }
-
- return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- if _, rCtx, ok := ctx.RouteInfo(r); ok {
- next.ServeHTTP(rw, rCtx)
- return
- }
-
- // Not found, check if it exists in the other methods first
- if others := ctx.AllowedMethods(r); len(others) > 0 {
- ctx.Respond(rw, r, ctx.analyzer.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others))
- return
- }
-
- ctx.Respond(rw, r, ctx.analyzer.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.EscapedPath()))
- })
-}
-
-// RoutableAPI represents an interface for things that can serve
-// as a provider of implementations for the swagger router
-type RoutableAPI interface {
- HandlerFor(string, string) (http.Handler, bool)
- ServeErrorFor(string) func(http.ResponseWriter, *http.Request, error)
- ConsumersFor([]string) map[string]runtime.Consumer
- ProducersFor([]string) map[string]runtime.Producer
- AuthenticatorsFor(map[string]spec.SecurityScheme) map[string]runtime.Authenticator
- Authorizer() runtime.Authorizer
- Formats() strfmt.Registry
- DefaultProduces() string
- DefaultConsumes() string
-}
-
-// Router represents a swagger-aware router
-type Router interface {
- Lookup(method, path string) (*MatchedRoute, bool)
- OtherMethods(method, path string) []string
-}
-
-type defaultRouteBuilder struct {
- spec *loads.Document
- analyzer *analysis.Spec
- api RoutableAPI
- records map[string][]denco.Record
- debugLogf func(string, ...any) // a logging function to debug context and all components using it
-}
-
-type defaultRouter struct {
- spec *loads.Document
- routers map[string]*denco.Router
- debugLogf func(string, ...any) // a logging function to debug context and all components using it
-}
-
-func newDefaultRouteBuilder(spec *loads.Document, api RoutableAPI, opts ...DefaultRouterOpt) *defaultRouteBuilder {
- var o defaultRouterOpts
- for _, apply := range opts {
- apply(&o)
- }
- if o.debugLogf == nil {
- o.debugLogf = debugLogfFunc(nil) // defaults to standard logger
- }
-
- return &defaultRouteBuilder{
- spec: spec,
- analyzer: analysis.New(spec.Spec()),
- api: api,
- records: make(map[string][]denco.Record),
- debugLogf: o.debugLogf,
- }
-}
-
-// DefaultRouterOpt allows to inject optional behavior to the default router.
-type DefaultRouterOpt func(*defaultRouterOpts)
-
-type defaultRouterOpts struct {
- debugLogf func(string, ...any)
-}
-
-// WithDefaultRouterLogger sets the debug logger for the default router.
-//
-// This is enabled only in DEBUG mode.
-func WithDefaultRouterLogger(lg logger.Logger) DefaultRouterOpt {
- return func(o *defaultRouterOpts) {
- o.debugLogf = debugLogfFunc(lg)
- }
-}
-
-// WithDefaultRouterLoggerFunc sets a logging debug method for the default router.
-func WithDefaultRouterLoggerFunc(fn func(string, ...any)) DefaultRouterOpt {
- return func(o *defaultRouterOpts) {
- o.debugLogf = fn
- }
-}
-
-// DefaultRouter creates a default implementation of the router
-func DefaultRouter(spec *loads.Document, api RoutableAPI, opts ...DefaultRouterOpt) Router {
- builder := newDefaultRouteBuilder(spec, api, opts...)
- if spec != nil {
- for method, paths := range builder.analyzer.Operations() {
- for path, operation := range paths {
- fp := fpath.Join(spec.BasePath(), path)
- builder.debugLogf("adding route %s %s %q", method, fp, operation.ID)
- builder.AddRoute(method, fp, operation)
- }
- }
- }
- return builder.Build()
-}
-
-// RouteAuthenticator is an authenticator that can compose several authenticators together.
-// It also knows when it contains an authenticator that allows for anonymous pass through.
-// Contains a group of 1 or more authenticators that have a logical AND relationship
-type RouteAuthenticator struct {
- Authenticator map[string]runtime.Authenticator
- Schemes []string
- Scopes map[string][]string
- allScopes []string
- commonScopes []string
- allowAnonymous bool
-}
-
-func (ra *RouteAuthenticator) AllowsAnonymous() bool {
- return ra.allowAnonymous
-}
-
-// AllScopes returns a list of unique scopes that is the combination
-// of all the scopes in the requirements
-func (ra *RouteAuthenticator) AllScopes() []string {
- return ra.allScopes
-}
-
-// CommonScopes returns a list of unique scopes that are common in all the
-// scopes in the requirements
-func (ra *RouteAuthenticator) CommonScopes() []string {
- return ra.commonScopes
-}
-
-// Authenticate Authenticator interface implementation
-func (ra *RouteAuthenticator) Authenticate(req *http.Request, route *MatchedRoute) (bool, interface{}, error) {
- if ra.allowAnonymous {
- route.Authenticator = ra
- return true, nil, nil
- }
- // iterate in proper order
- var lastResult interface{}
- for _, scheme := range ra.Schemes {
- if authenticator, ok := ra.Authenticator[scheme]; ok {
- applies, princ, err := authenticator.Authenticate(&security.ScopedAuthRequest{
- Request: req,
- RequiredScopes: ra.Scopes[scheme],
- })
- if !applies {
- return false, nil, nil
- }
- if err != nil {
- route.Authenticator = ra
- return true, nil, err
- }
- lastResult = princ
- }
- }
- route.Authenticator = ra
- return true, lastResult, nil
-}
-
-func stringSliceUnion(slices ...[]string) []string {
- unique := make(map[string]struct{})
- var result []string
- for _, slice := range slices {
- for _, entry := range slice {
- if _, ok := unique[entry]; ok {
- continue
- }
- unique[entry] = struct{}{}
- result = append(result, entry)
- }
- }
- return result
-}
-
-func stringSliceIntersection(slices ...[]string) []string {
- unique := make(map[string]int)
- var intersection []string
-
- total := len(slices)
- var emptyCnt int
- for _, slice := range slices {
- if len(slice) == 0 {
- emptyCnt++
- continue
- }
-
- for _, entry := range slice {
- unique[entry]++
- if unique[entry] == total-emptyCnt { // this entry appeared in all the non-empty slices
- intersection = append(intersection, entry)
- }
- }
- }
-
- return intersection
-}
-
-// RouteAuthenticators represents a group of authenticators that represent a logical OR
-type RouteAuthenticators []RouteAuthenticator
-
-// AllowsAnonymous returns true when there is an authenticator that means optional auth
-func (ras RouteAuthenticators) AllowsAnonymous() bool {
- for _, ra := range ras {
- if ra.AllowsAnonymous() {
- return true
- }
- }
- return false
-}
-
-// Authenticate method implemention so this collection can be used as authenticator
-func (ras RouteAuthenticators) Authenticate(req *http.Request, route *MatchedRoute) (bool, interface{}, error) {
- var lastError error
- var allowsAnon bool
- var anonAuth RouteAuthenticator
-
- for _, ra := range ras {
- if ra.AllowsAnonymous() {
- anonAuth = ra
- allowsAnon = true
- continue
- }
- applies, usr, err := ra.Authenticate(req, route)
- if !applies || err != nil || usr == nil {
- if err != nil {
- lastError = err
- }
- continue
- }
- return applies, usr, nil
- }
-
- if allowsAnon && lastError == nil {
- route.Authenticator = &anonAuth
- return true, nil, lastError
- }
- return lastError != nil, nil, lastError
-}
-
-type routeEntry struct {
- PathPattern string
- BasePath string
- Operation *spec.Operation
- Consumes []string
- Consumers map[string]runtime.Consumer
- Produces []string
- Producers map[string]runtime.Producer
- Parameters map[string]spec.Parameter
- Handler http.Handler
- Formats strfmt.Registry
- Binder *UntypedRequestBinder
- Authenticators RouteAuthenticators
- Authorizer runtime.Authorizer
-}
-
-// MatchedRoute represents the route that was matched in this request
-type MatchedRoute struct {
- routeEntry
- Params RouteParams
- Consumer runtime.Consumer
- Producer runtime.Producer
- Authenticator *RouteAuthenticator
-}
-
-// HasAuth returns true when the route has a security requirement defined
-func (m *MatchedRoute) HasAuth() bool {
- return len(m.Authenticators) > 0
-}
-
-// NeedsAuth returns true when the request still
-// needs to perform authentication
-func (m *MatchedRoute) NeedsAuth() bool {
- return m.HasAuth() && m.Authenticator == nil
-}
-
-func (d *defaultRouter) Lookup(method, path string) (*MatchedRoute, bool) {
- mth := strings.ToUpper(method)
- d.debugLogf("looking up route for %s %s", method, path)
- if Debug {
- if len(d.routers) == 0 {
- d.debugLogf("there are no known routers")
- }
- for meth := range d.routers {
- d.debugLogf("got a router for %s", meth)
- }
- }
- if router, ok := d.routers[mth]; ok {
- if m, rp, ok := router.Lookup(fpath.Clean(path)); ok && m != nil {
- if entry, ok := m.(*routeEntry); ok {
- d.debugLogf("found a route for %s %s with %d parameters", method, path, len(entry.Parameters))
- var params RouteParams
- for _, p := range rp {
- v, err := url.PathUnescape(p.Value)
- if err != nil {
- d.debugLogf("failed to escape %q: %v", p.Value, err)
- v = p.Value
- }
- // a workaround to handle fragment/composing parameters until they are supported in denco router
- // check if this parameter is a fragment within a path segment
- if xpos := strings.Index(entry.PathPattern, fmt.Sprintf("{%s}", p.Name)) + len(p.Name) + 2; xpos < len(entry.PathPattern) && entry.PathPattern[xpos] != '/' {
- // extract fragment parameters
- ep := strings.Split(entry.PathPattern[xpos:], "/")[0]
- pnames, pvalues := decodeCompositParams(p.Name, v, ep, nil, nil)
- for i, pname := range pnames {
- params = append(params, RouteParam{Name: pname, Value: pvalues[i]})
- }
- } else {
- // use the parameter directly
- params = append(params, RouteParam{Name: p.Name, Value: v})
- }
- }
- return &MatchedRoute{routeEntry: *entry, Params: params}, true
- }
- } else {
- d.debugLogf("couldn't find a route by path for %s %s", method, path)
- }
- } else {
- d.debugLogf("couldn't find a route by method for %s %s", method, path)
- }
- return nil, false
-}
-
-func (d *defaultRouter) OtherMethods(method, path string) []string {
- mn := strings.ToUpper(method)
- var methods []string
- for k, v := range d.routers {
- if k != mn {
- if _, _, ok := v.Lookup(fpath.Clean(path)); ok {
- methods = append(methods, k)
- continue
- }
- }
- }
- return methods
-}
-
-func (d *defaultRouter) SetLogger(lg logger.Logger) {
- d.debugLogf = debugLogfFunc(lg)
-}
-
-// convert swagger parameters per path segment into a denco parameter as multiple parameters per segment are not supported in denco
-var pathConverter = regexp.MustCompile(`{(.+?)}([^/]*)`)
-
-func decodeCompositParams(name string, value string, pattern string, names []string, values []string) ([]string, []string) {
- pleft := strings.Index(pattern, "{")
- names = append(names, name)
- if pleft < 0 {
- if strings.HasSuffix(value, pattern) {
- values = append(values, value[:len(value)-len(pattern)])
- } else {
- values = append(values, "")
- }
- } else {
- toskip := pattern[:pleft]
- pright := strings.Index(pattern, "}")
- vright := strings.Index(value, toskip)
- if vright >= 0 {
- values = append(values, value[:vright])
- } else {
- values = append(values, "")
- value = ""
- }
- return decodeCompositParams(pattern[pleft+1:pright], value[vright+len(toskip):], pattern[pright+1:], names, values)
- }
- return names, values
-}
-
-func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Operation) {
- mn := strings.ToUpper(method)
-
- bp := fpath.Clean(d.spec.BasePath())
- if len(bp) > 0 && bp[len(bp)-1] == '/' {
- bp = bp[:len(bp)-1]
- }
-
- d.debugLogf("operation: %#v", *operation)
- if handler, ok := d.api.HandlerFor(method, strings.TrimPrefix(path, bp)); ok {
- consumes := d.analyzer.ConsumesFor(operation)
- produces := d.analyzer.ProducesFor(operation)
- parameters := d.analyzer.ParamsFor(method, strings.TrimPrefix(path, bp))
-
- // add API defaults if not part of the spec
- if defConsumes := d.api.DefaultConsumes(); defConsumes != "" && !swag.ContainsStringsCI(consumes, defConsumes) {
- consumes = append(consumes, defConsumes)
- }
-
- if defProduces := d.api.DefaultProduces(); defProduces != "" && !swag.ContainsStringsCI(produces, defProduces) {
- produces = append(produces, defProduces)
- }
-
- requestBinder := NewUntypedRequestBinder(parameters, d.spec.Spec(), d.api.Formats())
- requestBinder.setDebugLogf(d.debugLogf)
- record := denco.NewRecord(pathConverter.ReplaceAllString(path, ":$1"), &routeEntry{
- BasePath: bp,
- PathPattern: path,
- Operation: operation,
- Handler: handler,
- Consumes: consumes,
- Produces: produces,
- Consumers: d.api.ConsumersFor(normalizeOffers(consumes)),
- Producers: d.api.ProducersFor(normalizeOffers(produces)),
- Parameters: parameters,
- Formats: d.api.Formats(),
- Binder: requestBinder,
- Authenticators: d.buildAuthenticators(operation),
- Authorizer: d.api.Authorizer(),
- })
- d.records[mn] = append(d.records[mn], record)
- }
-}
-
-func (d *defaultRouteBuilder) buildAuthenticators(operation *spec.Operation) RouteAuthenticators {
- requirements := d.analyzer.SecurityRequirementsFor(operation)
- auths := make([]RouteAuthenticator, 0, len(requirements))
- for _, reqs := range requirements {
- schemes := make([]string, 0, len(reqs))
- scopes := make(map[string][]string, len(reqs))
- scopeSlices := make([][]string, 0, len(reqs))
- for _, req := range reqs {
- schemes = append(schemes, req.Name)
- scopes[req.Name] = req.Scopes
- scopeSlices = append(scopeSlices, req.Scopes)
- }
-
- definitions := d.analyzer.SecurityDefinitionsForRequirements(reqs)
- authenticators := d.api.AuthenticatorsFor(definitions)
- auths = append(auths, RouteAuthenticator{
- Authenticator: authenticators,
- Schemes: schemes,
- Scopes: scopes,
- allScopes: stringSliceUnion(scopeSlices...),
- commonScopes: stringSliceIntersection(scopeSlices...),
- allowAnonymous: len(reqs) == 1 && reqs[0].Name == "",
- })
- }
- return auths
-}
-
-func (d *defaultRouteBuilder) Build() *defaultRouter {
- routers := make(map[string]*denco.Router)
- for method, records := range d.records {
- router := denco.New()
- _ = router.Build(records)
- routers[method] = router
- }
- return &defaultRouter{
- spec: d.spec,
- routers: routers,
- debugLogf: d.debugLogf,
- }
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/security.go b/vendor/github.com/go-openapi/runtime/middleware/security.go
deleted file mode 100644
index 2b061caefc..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/security.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import "net/http"
-
-func newSecureAPI(ctx *Context, next http.Handler) http.Handler {
- return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- route, rCtx, _ := ctx.RouteInfo(r)
- if rCtx != nil {
- r = rCtx
- }
- if route != nil && !route.NeedsAuth() {
- next.ServeHTTP(rw, r)
- return
- }
-
- _, rCtx, err := ctx.Authorize(r, route)
- if err != nil {
- ctx.Respond(rw, r, route.Produces, route, err)
- return
- }
- r = rCtx
-
- next.ServeHTTP(rw, r)
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/spec.go b/vendor/github.com/go-openapi/runtime/middleware/spec.go
deleted file mode 100644
index 87e17e3424..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/spec.go
+++ /dev/null
@@ -1,102 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- "net/http"
- "path"
-)
-
-const (
- contentTypeHeader = "Content-Type"
- applicationJSON = "application/json"
-)
-
-// SpecOption can be applied to the Spec serving middleware
-type SpecOption func(*specOptions)
-
-var defaultSpecOptions = specOptions{
- Path: "",
- Document: "swagger.json",
-}
-
-type specOptions struct {
- Path string
- Document string
-}
-
-func specOptionsWithDefaults(opts []SpecOption) specOptions {
- o := defaultSpecOptions
- for _, apply := range opts {
- apply(&o)
- }
-
- return o
-}
-
-// Spec creates a middleware to serve a swagger spec as a JSON document.
-//
-// This allows for altering the spec before starting the http listener.
-//
-// The basePath argument indicates the path of the spec document (defaults to "/").
-// Additional SpecOption can be used to change the name of the document (defaults to "swagger.json").
-func Spec(basePath string, b []byte, next http.Handler, opts ...SpecOption) http.Handler {
- if basePath == "" {
- basePath = "/"
- }
- o := specOptionsWithDefaults(opts)
- pth := path.Join(basePath, o.Path, o.Document)
-
- return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- if path.Clean(r.URL.Path) == pth {
- rw.Header().Set(contentTypeHeader, applicationJSON)
- rw.WriteHeader(http.StatusOK)
- _, _ = rw.Write(b)
-
- return
- }
-
- if next != nil {
- next.ServeHTTP(rw, r)
-
- return
- }
-
- rw.Header().Set(contentTypeHeader, applicationJSON)
- rw.WriteHeader(http.StatusNotFound)
- })
-}
-
-// WithSpecPath sets the path to be joined to the base path of the Spec middleware.
-//
-// This is empty by default.
-func WithSpecPath(pth string) SpecOption {
- return func(o *specOptions) {
- o.Path = pth
- }
-}
-
-// WithSpecDocument sets the name of the JSON document served as a spec.
-//
-// By default, this is "swagger.json"
-func WithSpecDocument(doc string) SpecOption {
- return func(o *specOptions) {
- if doc == "" {
- return
- }
-
- o.Document = doc
- }
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go
deleted file mode 100644
index ec3c10cbaf..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go
+++ /dev/null
@@ -1,175 +0,0 @@
-package middleware
-
-import (
- "bytes"
- "fmt"
- "html/template"
- "net/http"
- "path"
-)
-
-// SwaggerUIOpts configures the SwaggerUI middleware
-type SwaggerUIOpts struct {
- // BasePath for the API, defaults to: /
- BasePath string
-
- // Path combines with BasePath to construct the path to the UI, defaults to: "docs".
- Path string
-
- // SpecURL is the URL of the spec document.
- //
- // Defaults to: /swagger.json
- SpecURL string
-
- // Title for the documentation site, default to: API documentation
- Title string
-
- // Template specifies a custom template to serve the UI
- Template string
-
- // OAuthCallbackURL the url called after OAuth2 login
- OAuthCallbackURL string
-
- // The three components needed to embed swagger-ui
-
- // SwaggerURL points to the js that generates the SwaggerUI site.
- //
- // Defaults to: https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js
- SwaggerURL string
-
- SwaggerPresetURL string
- SwaggerStylesURL string
-
- Favicon32 string
- Favicon16 string
-}
-
-// EnsureDefaults in case some options are missing
-func (r *SwaggerUIOpts) EnsureDefaults() {
- r.ensureDefaults()
-
- if r.Template == "" {
- r.Template = swaggeruiTemplate
- }
-}
-
-func (r *SwaggerUIOpts) EnsureDefaultsOauth2() {
- r.ensureDefaults()
-
- if r.Template == "" {
- r.Template = swaggerOAuthTemplate
- }
-}
-
-func (r *SwaggerUIOpts) ensureDefaults() {
- common := toCommonUIOptions(r)
- common.EnsureDefaults()
- fromCommonToAnyOptions(common, r)
-
- // swaggerui-specifics
- if r.OAuthCallbackURL == "" {
- r.OAuthCallbackURL = path.Join(r.BasePath, r.Path, "oauth2-callback")
- }
- if r.SwaggerURL == "" {
- r.SwaggerURL = swaggerLatest
- }
- if r.SwaggerPresetURL == "" {
- r.SwaggerPresetURL = swaggerPresetLatest
- }
- if r.SwaggerStylesURL == "" {
- r.SwaggerStylesURL = swaggerStylesLatest
- }
- if r.Favicon16 == "" {
- r.Favicon16 = swaggerFavicon16Latest
- }
- if r.Favicon32 == "" {
- r.Favicon32 = swaggerFavicon32Latest
- }
-}
-
-// SwaggerUI creates a middleware to serve a documentation site for a swagger spec.
-//
-// This allows for altering the spec before starting the http listener.
-func SwaggerUI(opts SwaggerUIOpts, next http.Handler) http.Handler {
- opts.EnsureDefaults()
-
- pth := path.Join(opts.BasePath, opts.Path)
- tmpl := template.Must(template.New("swaggerui").Parse(opts.Template))
- assets := bytes.NewBuffer(nil)
- if err := tmpl.Execute(assets, opts); err != nil {
- panic(fmt.Errorf("cannot execute template: %w", err))
- }
-
- return serveUI(pth, assets.Bytes(), next)
-}
-
-const (
- swaggerLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"
- swaggerPresetLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui-standalone-preset.js"
- swaggerStylesLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui.css"
- swaggerFavicon32Latest = "https://unpkg.com/swagger-ui-dist/favicon-32x32.png"
- swaggerFavicon16Latest = "https://unpkg.com/swagger-ui-dist/favicon-16x16.png"
- swaggeruiTemplate = `
-
-
-
-
- {{ .Title }}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-`
-)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go
deleted file mode 100644
index e81212f71c..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go
+++ /dev/null
@@ -1,105 +0,0 @@
-package middleware
-
-import (
- "bytes"
- "fmt"
- "net/http"
- "text/template"
-)
-
-func SwaggerUIOAuth2Callback(opts SwaggerUIOpts, next http.Handler) http.Handler {
- opts.EnsureDefaultsOauth2()
-
- pth := opts.OAuthCallbackURL
- tmpl := template.Must(template.New("swaggeroauth").Parse(opts.Template))
- assets := bytes.NewBuffer(nil)
- if err := tmpl.Execute(assets, opts); err != nil {
- panic(fmt.Errorf("cannot execute template: %w", err))
- }
-
- return serveUI(pth, assets.Bytes(), next)
-}
-
-const (
- swaggerOAuthTemplate = `
-
-
-
- {{ .Title }}
-
-
-
-
-
-`
-)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/ui_options.go b/vendor/github.com/go-openapi/runtime/middleware/ui_options.go
deleted file mode 100644
index b86efa0089..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/ui_options.go
+++ /dev/null
@@ -1,173 +0,0 @@
-package middleware
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
- "net/http"
- "path"
- "strings"
-)
-
-const (
- // constants that are common to all UI-serving middlewares
- defaultDocsPath = "docs"
- defaultDocsURL = "/swagger.json"
- defaultDocsTitle = "API Documentation"
-)
-
-// uiOptions defines common options for UI serving middlewares.
-type uiOptions struct {
- // BasePath for the UI, defaults to: /
- BasePath string
-
- // Path combines with BasePath to construct the path to the UI, defaults to: "docs".
- Path string
-
- // SpecURL is the URL of the spec document.
- //
- // Defaults to: /swagger.json
- SpecURL string
-
- // Title for the documentation site, default to: API documentation
- Title string
-
- // Template specifies a custom template to serve the UI
- Template string
-}
-
-// toCommonUIOptions converts any UI option type to retain the common options.
-//
-// This uses gob encoding/decoding to convert common fields from one struct to another.
-func toCommonUIOptions(opts interface{}) uiOptions {
- var buf bytes.Buffer
- enc := gob.NewEncoder(&buf)
- dec := gob.NewDecoder(&buf)
- var o uiOptions
- err := enc.Encode(opts)
- if err != nil {
- panic(err)
- }
-
- err = dec.Decode(&o)
- if err != nil {
- panic(err)
- }
-
- return o
-}
-
-func fromCommonToAnyOptions[T any](source uiOptions, target *T) {
- var buf bytes.Buffer
- enc := gob.NewEncoder(&buf)
- dec := gob.NewDecoder(&buf)
- err := enc.Encode(source)
- if err != nil {
- panic(err)
- }
-
- err = dec.Decode(target)
- if err != nil {
- panic(err)
- }
-}
-
-// UIOption can be applied to UI serving middleware, such as Context.APIHandler or
-// Context.APIHandlerSwaggerUI to alter the defaut behavior.
-type UIOption func(*uiOptions)
-
-func uiOptionsWithDefaults(opts []UIOption) uiOptions {
- var o uiOptions
- for _, apply := range opts {
- apply(&o)
- }
-
- return o
-}
-
-// WithUIBasePath sets the base path from where to serve the UI assets.
-//
-// By default, Context middleware sets this value to the API base path.
-func WithUIBasePath(base string) UIOption {
- return func(o *uiOptions) {
- if !strings.HasPrefix(base, "/") {
- base = "/" + base
- }
- o.BasePath = base
- }
-}
-
-// WithUIPath sets the path from where to serve the UI assets (i.e. /{basepath}/{path}.
-func WithUIPath(pth string) UIOption {
- return func(o *uiOptions) {
- o.Path = pth
- }
-}
-
-// WithUISpecURL sets the path from where to serve swagger spec document.
-//
-// This may be specified as a full URL or a path.
-//
-// By default, this is "/swagger.json"
-func WithUISpecURL(specURL string) UIOption {
- return func(o *uiOptions) {
- o.SpecURL = specURL
- }
-}
-
-// WithUITitle sets the title of the UI.
-//
-// By default, Context middleware sets this value to the title found in the API spec.
-func WithUITitle(title string) UIOption {
- return func(o *uiOptions) {
- o.Title = title
- }
-}
-
-// WithTemplate allows to set a custom template for the UI.
-//
-// UI middleware will panic if the template does not parse or execute properly.
-func WithTemplate(tpl string) UIOption {
- return func(o *uiOptions) {
- o.Template = tpl
- }
-}
-
-// EnsureDefaults in case some options are missing
-func (r *uiOptions) EnsureDefaults() {
- if r.BasePath == "" {
- r.BasePath = "/"
- }
- if r.Path == "" {
- r.Path = defaultDocsPath
- }
- if r.SpecURL == "" {
- r.SpecURL = defaultDocsURL
- }
- if r.Title == "" {
- r.Title = defaultDocsTitle
- }
-}
-
-// serveUI creates a middleware that serves a templated asset as text/html.
-func serveUI(pth string, assets []byte, next http.Handler) http.Handler {
- return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- if path.Clean(r.URL.Path) == pth {
- rw.Header().Set(contentTypeHeader, "text/html; charset=utf-8")
- rw.WriteHeader(http.StatusOK)
- _, _ = rw.Write(assets)
-
- return
- }
-
- if next != nil {
- next.ServeHTTP(rw, r)
-
- return
- }
-
- rw.Header().Set(contentTypeHeader, "text/plain")
- rw.WriteHeader(http.StatusNotFound)
- _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth)))
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go
deleted file mode 100644
index 7b7269bd19..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go
+++ /dev/null
@@ -1,287 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package untyped
-
-import (
- "fmt"
- "net/http"
- "sort"
- "strings"
-
- "github.com/go-openapi/analysis"
- "github.com/go-openapi/errors"
- "github.com/go-openapi/loads"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-
- "github.com/go-openapi/runtime"
-)
-
-// NewAPI creates the default untyped API
-func NewAPI(spec *loads.Document) *API {
- var an *analysis.Spec
- if spec != nil && spec.Spec() != nil {
- an = analysis.New(spec.Spec())
- }
- api := &API{
- spec: spec,
- analyzer: an,
- consumers: make(map[string]runtime.Consumer, 10),
- producers: make(map[string]runtime.Producer, 10),
- authenticators: make(map[string]runtime.Authenticator),
- operations: make(map[string]map[string]runtime.OperationHandler),
- ServeError: errors.ServeError,
- Models: make(map[string]func() interface{}),
- formats: strfmt.NewFormats(),
- }
- return api.WithJSONDefaults()
-}
-
-// API represents an untyped mux for a swagger spec
-type API struct {
- spec *loads.Document
- analyzer *analysis.Spec
- DefaultProduces string
- DefaultConsumes string
- consumers map[string]runtime.Consumer
- producers map[string]runtime.Producer
- authenticators map[string]runtime.Authenticator
- authorizer runtime.Authorizer
- operations map[string]map[string]runtime.OperationHandler
- ServeError func(http.ResponseWriter, *http.Request, error)
- Models map[string]func() interface{}
- formats strfmt.Registry
-}
-
-// WithJSONDefaults loads the json defaults for this api
-func (d *API) WithJSONDefaults() *API {
- d.DefaultConsumes = runtime.JSONMime
- d.DefaultProduces = runtime.JSONMime
- d.consumers[runtime.JSONMime] = runtime.JSONConsumer()
- d.producers[runtime.JSONMime] = runtime.JSONProducer()
- return d
-}
-
-// WithoutJSONDefaults clears the json defaults for this api
-func (d *API) WithoutJSONDefaults() *API {
- d.DefaultConsumes = ""
- d.DefaultProduces = ""
- delete(d.consumers, runtime.JSONMime)
- delete(d.producers, runtime.JSONMime)
- return d
-}
-
-// Formats returns the registered string formats
-func (d *API) Formats() strfmt.Registry {
- if d.formats == nil {
- d.formats = strfmt.NewFormats()
- }
- return d.formats
-}
-
-// RegisterFormat registers a custom format validator
-func (d *API) RegisterFormat(name string, format strfmt.Format, validator strfmt.Validator) {
- if d.formats == nil {
- d.formats = strfmt.NewFormats()
- }
- d.formats.Add(name, format, validator)
-}
-
-// RegisterAuth registers an auth handler in this api
-func (d *API) RegisterAuth(scheme string, handler runtime.Authenticator) {
- if d.authenticators == nil {
- d.authenticators = make(map[string]runtime.Authenticator)
- }
- d.authenticators[scheme] = handler
-}
-
-// RegisterAuthorizer registers an authorizer handler in this api
-func (d *API) RegisterAuthorizer(handler runtime.Authorizer) {
- d.authorizer = handler
-}
-
-// RegisterConsumer registers a consumer for a media type.
-func (d *API) RegisterConsumer(mediaType string, handler runtime.Consumer) {
- if d.consumers == nil {
- d.consumers = make(map[string]runtime.Consumer, 10)
- }
- d.consumers[strings.ToLower(mediaType)] = handler
-}
-
-// RegisterProducer registers a producer for a media type
-func (d *API) RegisterProducer(mediaType string, handler runtime.Producer) {
- if d.producers == nil {
- d.producers = make(map[string]runtime.Producer, 10)
- }
- d.producers[strings.ToLower(mediaType)] = handler
-}
-
-// RegisterOperation registers an operation handler for an operation name
-func (d *API) RegisterOperation(method, path string, handler runtime.OperationHandler) {
- if d.operations == nil {
- d.operations = make(map[string]map[string]runtime.OperationHandler, 30)
- }
- um := strings.ToUpper(method)
- if b, ok := d.operations[um]; !ok || b == nil {
- d.operations[um] = make(map[string]runtime.OperationHandler)
- }
- d.operations[um][path] = handler
-}
-
-// OperationHandlerFor returns the operation handler for the specified id if it can be found
-func (d *API) OperationHandlerFor(method, path string) (runtime.OperationHandler, bool) {
- if d.operations == nil {
- return nil, false
- }
- if pi, ok := d.operations[strings.ToUpper(method)]; ok {
- h, ok := pi[path]
- return h, ok
- }
- return nil, false
-}
-
-// ConsumersFor gets the consumers for the specified media types
-func (d *API) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
- result := make(map[string]runtime.Consumer)
- for _, mt := range mediaTypes {
- if consumer, ok := d.consumers[mt]; ok {
- result[mt] = consumer
- }
- }
- return result
-}
-
-// ProducersFor gets the producers for the specified media types
-func (d *API) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
- result := make(map[string]runtime.Producer)
- for _, mt := range mediaTypes {
- if producer, ok := d.producers[mt]; ok {
- result[mt] = producer
- }
- }
- return result
-}
-
-// AuthenticatorsFor gets the authenticators for the specified security schemes
-func (d *API) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
- result := make(map[string]runtime.Authenticator)
- for k := range schemes {
- if a, ok := d.authenticators[k]; ok {
- result[k] = a
- }
- }
- return result
-}
-
-// Authorizer returns the registered authorizer
-func (d *API) Authorizer() runtime.Authorizer {
- return d.authorizer
-}
-
-// Validate validates this API for any missing items
-func (d *API) Validate() error {
- return d.validate()
-}
-
-// validateWith validates the registrations in this API against the provided spec analyzer
-func (d *API) validate() error {
- consumes := make([]string, 0, len(d.consumers))
- for k := range d.consumers {
- consumes = append(consumes, k)
- }
-
- produces := make([]string, 0, len(d.producers))
- for k := range d.producers {
- produces = append(produces, k)
- }
-
- authenticators := make([]string, 0, len(d.authenticators))
- for k := range d.authenticators {
- authenticators = append(authenticators, k)
- }
-
- operations := make([]string, 0, len(d.operations))
- for m, v := range d.operations {
- for p := range v {
- operations = append(operations, fmt.Sprintf("%s %s", strings.ToUpper(m), p))
- }
- }
-
- secDefinitions := d.spec.Spec().SecurityDefinitions
- definedAuths := make([]string, 0, len(secDefinitions))
- for k := range secDefinitions {
- definedAuths = append(definedAuths, k)
- }
-
- if err := d.verify("consumes", consumes, d.analyzer.RequiredConsumes()); err != nil {
- return err
- }
- if err := d.verify("produces", produces, d.analyzer.RequiredProduces()); err != nil {
- return err
- }
- if err := d.verify("operation", operations, d.analyzer.OperationMethodPaths()); err != nil {
- return err
- }
-
- requiredAuths := d.analyzer.RequiredSecuritySchemes()
- if err := d.verify("auth scheme", authenticators, requiredAuths); err != nil {
- return err
- }
- if err := d.verify("security definitions", definedAuths, requiredAuths); err != nil {
- return err
- }
- return nil
-}
-
-func (d *API) verify(name string, registrations []string, expectations []string) error {
- sort.Strings(registrations)
- sort.Strings(expectations)
-
- expected := map[string]struct{}{}
- seen := map[string]struct{}{}
-
- for _, v := range expectations {
- expected[v] = struct{}{}
- }
-
- var unspecified []string
- for _, v := range registrations {
- seen[v] = struct{}{}
- if _, ok := expected[v]; !ok {
- unspecified = append(unspecified, v)
- }
- }
-
- for k := range seen {
- delete(expected, k)
- }
-
- unregistered := make([]string, 0, len(expected))
- for k := range expected {
- unregistered = append(unregistered, k)
- }
- sort.Strings(unspecified)
- sort.Strings(unregistered)
-
- if len(unregistered) > 0 || len(unspecified) > 0 {
- return &errors.APIVerificationFailed{
- Section: name,
- MissingSpecification: unspecified,
- MissingRegistration: unregistered,
- }
- }
-
- return nil
-}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/validation.go b/vendor/github.com/go-openapi/runtime/middleware/validation.go
deleted file mode 100644
index 0a5356c607..0000000000
--- a/vendor/github.com/go-openapi/runtime/middleware/validation.go
+++ /dev/null
@@ -1,130 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package middleware
-
-import (
- "mime"
- "net/http"
- "strings"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/swag"
-
- "github.com/go-openapi/runtime"
-)
-
-type validation struct {
- context *Context
- result []error
- request *http.Request
- route *MatchedRoute
- bound map[string]interface{}
-}
-
-// ContentType validates the content type of a request
-func validateContentType(allowed []string, actual string) error {
- if len(allowed) == 0 {
- return nil
- }
- mt, _, err := mime.ParseMediaType(actual)
- if err != nil {
- return errors.InvalidContentType(actual, allowed)
- }
- if swag.ContainsStringsCI(allowed, mt) {
- return nil
- }
- if swag.ContainsStringsCI(allowed, "*/*") {
- return nil
- }
- parts := strings.Split(actual, "/")
- if len(parts) == 2 && swag.ContainsStringsCI(allowed, parts[0]+"/*") {
- return nil
- }
- return errors.InvalidContentType(actual, allowed)
-}
-
-func validateRequest(ctx *Context, request *http.Request, route *MatchedRoute) *validation {
- validate := &validation{
- context: ctx,
- request: request,
- route: route,
- bound: make(map[string]interface{}),
- }
- validate.debugLogf("validating request %s %s", request.Method, request.URL.EscapedPath())
-
- validate.contentType()
- if len(validate.result) == 0 {
- validate.responseFormat()
- }
- if len(validate.result) == 0 {
- validate.parameters()
- }
-
- return validate
-}
-
-func (v *validation) debugLogf(format string, args ...any) {
- v.context.debugLogf(format, args...)
-}
-
-func (v *validation) parameters() {
- v.debugLogf("validating request parameters for %s %s", v.request.Method, v.request.URL.EscapedPath())
- if result := v.route.Binder.Bind(v.request, v.route.Params, v.route.Consumer, v.bound); result != nil {
- if result.Error() == "validation failure list" {
- for _, e := range result.(*errors.Validation).Value.([]interface{}) {
- v.result = append(v.result, e.(error))
- }
- return
- }
- v.result = append(v.result, result)
- }
-}
-
-func (v *validation) contentType() {
- if len(v.result) == 0 && runtime.HasBody(v.request) {
- v.debugLogf("validating body content type for %s %s", v.request.Method, v.request.URL.EscapedPath())
- ct, _, req, err := v.context.ContentType(v.request)
- if err != nil {
- v.result = append(v.result, err)
- } else {
- v.request = req
- }
-
- if len(v.result) == 0 {
- v.debugLogf("validating content type for %q against [%s]", ct, strings.Join(v.route.Consumes, ", "))
- if err := validateContentType(v.route.Consumes, ct); err != nil {
- v.result = append(v.result, err)
- }
- }
- if ct != "" && v.route.Consumer == nil {
- cons, ok := v.route.Consumers[ct]
- if !ok {
- v.result = append(v.result, errors.New(500, "no consumer registered for %s", ct))
- } else {
- v.route.Consumer = cons
- }
- }
- }
-}
-
-func (v *validation) responseFormat() {
- // if the route provides values for Produces and no format could be identify then return an error.
- // if the route does not specify values for Produces then treat request as valid since the API designer
- // choose not to specify the format for responses.
- if str, rCtx := v.context.ResponseFormat(v.request, v.route.Produces); str == "" && len(v.route.Produces) > 0 {
- v.request = rCtx
- v.result = append(v.result, errors.InvalidResponseFormat(v.request.Header.Get(runtime.HeaderAccept), v.route.Produces))
- }
-}
diff --git a/vendor/github.com/go-openapi/runtime/request.go b/vendor/github.com/go-openapi/runtime/request.go
deleted file mode 100644
index 9e3e1ecb14..0000000000
--- a/vendor/github.com/go-openapi/runtime/request.go
+++ /dev/null
@@ -1,149 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "bufio"
- "context"
- "errors"
- "io"
- "net/http"
- "strings"
-
- "github.com/go-openapi/swag"
-)
-
-// CanHaveBody returns true if this method can have a body
-func CanHaveBody(method string) bool {
- mn := strings.ToUpper(method)
- return mn == "POST" || mn == "PUT" || mn == "PATCH" || mn == "DELETE"
-}
-
-// IsSafe returns true if this is a request with a safe method
-func IsSafe(r *http.Request) bool {
- mn := strings.ToUpper(r.Method)
- return mn == "GET" || mn == "HEAD"
-}
-
-// AllowsBody returns true if the request allows for a body
-func AllowsBody(r *http.Request) bool {
- mn := strings.ToUpper(r.Method)
- return mn != "HEAD"
-}
-
-// HasBody returns true if this method needs a content-type
-func HasBody(r *http.Request) bool {
- // happy case: we have a content length set
- if r.ContentLength > 0 {
- return true
- }
-
- if r.Header.Get("content-length") != "" {
- // in this case, no Transfer-Encoding should be present
- // we have a header set but it was explicitly set to 0, so we assume no body
- return false
- }
-
- rdr := newPeekingReader(r.Body)
- r.Body = rdr
- return rdr.HasContent()
-}
-
-func newPeekingReader(r io.ReadCloser) *peekingReader {
- if r == nil {
- return nil
- }
- return &peekingReader{
- underlying: bufio.NewReader(r),
- orig: r,
- }
-}
-
-type peekingReader struct {
- underlying interface {
- Buffered() int
- Peek(int) ([]byte, error)
- Read([]byte) (int, error)
- }
- orig io.ReadCloser
-}
-
-func (p *peekingReader) HasContent() bool {
- if p == nil {
- return false
- }
- if p.underlying.Buffered() > 0 {
- return true
- }
- b, err := p.underlying.Peek(1)
- if err != nil {
- return false
- }
- return len(b) > 0
-}
-
-func (p *peekingReader) Read(d []byte) (int, error) {
- if p == nil {
- return 0, io.EOF
- }
- if p.underlying == nil {
- return 0, io.ErrUnexpectedEOF
- }
- return p.underlying.Read(d)
-}
-
-func (p *peekingReader) Close() error {
- if p.underlying == nil {
- return errors.New("reader already closed")
- }
- p.underlying = nil
- if p.orig != nil {
- return p.orig.Close()
- }
- return nil
-}
-
-// JSONRequest creates a new http request with json headers set.
-//
-// It uses context.Background.
-func JSONRequest(method, urlStr string, body io.Reader) (*http.Request, error) {
- req, err := http.NewRequestWithContext(context.Background(), method, urlStr, body)
- if err != nil {
- return nil, err
- }
- req.Header.Add(HeaderContentType, JSONMime)
- req.Header.Add(HeaderAccept, JSONMime)
- return req, nil
-}
-
-// Gettable for things with a method GetOK(string) (data string, hasKey bool, hasValue bool)
-type Gettable interface {
- GetOK(string) ([]string, bool, bool)
-}
-
-// ReadSingleValue reads a single value from the source
-func ReadSingleValue(values Gettable, name string) string {
- vv, _, hv := values.GetOK(name)
- if hv {
- return vv[len(vv)-1]
- }
- return ""
-}
-
-// ReadCollectionValue reads a collection value from a string data source
-func ReadCollectionValue(values Gettable, name, collectionFormat string) []string {
- v := ReadSingleValue(values, name)
- return swag.SplitByFormat(v, collectionFormat)
-}
diff --git a/vendor/github.com/go-openapi/runtime/security/authenticator.go b/vendor/github.com/go-openapi/runtime/security/authenticator.go
deleted file mode 100644
index bb30472bbe..0000000000
--- a/vendor/github.com/go-openapi/runtime/security/authenticator.go
+++ /dev/null
@@ -1,277 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package security
-
-import (
- "context"
- "net/http"
- "strings"
-
- "github.com/go-openapi/errors"
-
- "github.com/go-openapi/runtime"
-)
-
-const (
- query = "query"
- header = "header"
- accessTokenParam = "access_token"
-)
-
-// HttpAuthenticator is a function that authenticates a HTTP request
-func HttpAuthenticator(handler func(*http.Request) (bool, interface{}, error)) runtime.Authenticator { //nolint:revive,stylecheck
- return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) {
- if request, ok := params.(*http.Request); ok {
- return handler(request)
- }
- if scoped, ok := params.(*ScopedAuthRequest); ok {
- return handler(scoped.Request)
- }
- return false, nil, nil
- })
-}
-
-// ScopedAuthenticator is a function that authenticates a HTTP request against a list of valid scopes
-func ScopedAuthenticator(handler func(*ScopedAuthRequest) (bool, interface{}, error)) runtime.Authenticator {
- return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) {
- if request, ok := params.(*ScopedAuthRequest); ok {
- return handler(request)
- }
- return false, nil, nil
- })
-}
-
-// UserPassAuthentication authentication function
-type UserPassAuthentication func(string, string) (interface{}, error)
-
-// UserPassAuthenticationCtx authentication function with context.Context
-type UserPassAuthenticationCtx func(context.Context, string, string) (context.Context, interface{}, error)
-
-// TokenAuthentication authentication function
-type TokenAuthentication func(string) (interface{}, error)
-
-// TokenAuthenticationCtx authentication function with context.Context
-type TokenAuthenticationCtx func(context.Context, string) (context.Context, interface{}, error)
-
-// ScopedTokenAuthentication authentication function
-type ScopedTokenAuthentication func(string, []string) (interface{}, error)
-
-// ScopedTokenAuthenticationCtx authentication function with context.Context
-type ScopedTokenAuthenticationCtx func(context.Context, string, []string) (context.Context, interface{}, error)
-
-var DefaultRealmName = "API"
-
-type secCtxKey uint8
-
-const (
- failedBasicAuth secCtxKey = iota
- oauth2SchemeName
-)
-
-func FailedBasicAuth(r *http.Request) string {
- return FailedBasicAuthCtx(r.Context())
-}
-
-func FailedBasicAuthCtx(ctx context.Context) string {
- v, ok := ctx.Value(failedBasicAuth).(string)
- if !ok {
- return ""
- }
- return v
-}
-
-func OAuth2SchemeName(r *http.Request) string {
- return OAuth2SchemeNameCtx(r.Context())
-}
-
-func OAuth2SchemeNameCtx(ctx context.Context) string {
- v, ok := ctx.Value(oauth2SchemeName).(string)
- if !ok {
- return ""
- }
- return v
-}
-
-// BasicAuth creates a basic auth authenticator with the provided authentication function
-func BasicAuth(authenticate UserPassAuthentication) runtime.Authenticator {
- return BasicAuthRealm(DefaultRealmName, authenticate)
-}
-
-// BasicAuthRealm creates a basic auth authenticator with the provided authentication function and realm name
-func BasicAuthRealm(realm string, authenticate UserPassAuthentication) runtime.Authenticator {
- if realm == "" {
- realm = DefaultRealmName
- }
-
- return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
- if usr, pass, ok := r.BasicAuth(); ok {
- p, err := authenticate(usr, pass)
- if err != nil {
- *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm))
- }
- return true, p, err
- }
- *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm))
- return false, nil, nil
- })
-}
-
-// BasicAuthCtx creates a basic auth authenticator with the provided authentication function with support for context.Context
-func BasicAuthCtx(authenticate UserPassAuthenticationCtx) runtime.Authenticator {
- return BasicAuthRealmCtx(DefaultRealmName, authenticate)
-}
-
-// BasicAuthRealmCtx creates a basic auth authenticator with the provided authentication function and realm name with support for context.Context
-func BasicAuthRealmCtx(realm string, authenticate UserPassAuthenticationCtx) runtime.Authenticator {
- if realm == "" {
- realm = DefaultRealmName
- }
-
- return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
- if usr, pass, ok := r.BasicAuth(); ok {
- ctx, p, err := authenticate(r.Context(), usr, pass)
- if err != nil {
- ctx = context.WithValue(ctx, failedBasicAuth, realm)
- }
- *r = *r.WithContext(ctx)
- return true, p, err
- }
- *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm))
- return false, nil, nil
- })
-}
-
-// APIKeyAuth creates an authenticator that uses a token for authorization.
-// This token can be obtained from either a header or a query string
-func APIKeyAuth(name, in string, authenticate TokenAuthentication) runtime.Authenticator {
- inl := strings.ToLower(in)
- if inl != query && inl != header {
- // panic because this is most likely a typo
- panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\""))
- }
-
- var getToken func(*http.Request) string
- switch inl {
- case header:
- getToken = func(r *http.Request) string { return r.Header.Get(name) }
- case query:
- getToken = func(r *http.Request) string { return r.URL.Query().Get(name) }
- }
-
- return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
- token := getToken(r)
- if token == "" {
- return false, nil, nil
- }
-
- p, err := authenticate(token)
- return true, p, err
- })
-}
-
-// APIKeyAuthCtx creates an authenticator that uses a token for authorization with support for context.Context.
-// This token can be obtained from either a header or a query string
-func APIKeyAuthCtx(name, in string, authenticate TokenAuthenticationCtx) runtime.Authenticator {
- inl := strings.ToLower(in)
- if inl != query && inl != header {
- // panic because this is most likely a typo
- panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\""))
- }
-
- var getToken func(*http.Request) string
- switch inl {
- case header:
- getToken = func(r *http.Request) string { return r.Header.Get(name) }
- case query:
- getToken = func(r *http.Request) string { return r.URL.Query().Get(name) }
- }
-
- return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
- token := getToken(r)
- if token == "" {
- return false, nil, nil
- }
-
- ctx, p, err := authenticate(r.Context(), token)
- *r = *r.WithContext(ctx)
- return true, p, err
- })
-}
-
-// ScopedAuthRequest contains both a http request and the required scopes for a particular operation
-type ScopedAuthRequest struct {
- Request *http.Request
- RequiredScopes []string
-}
-
-// BearerAuth for use with oauth2 flows
-func BearerAuth(name string, authenticate ScopedTokenAuthentication) runtime.Authenticator {
- const prefix = "Bearer "
- return ScopedAuthenticator(func(r *ScopedAuthRequest) (bool, interface{}, error) {
- var token string
- hdr := r.Request.Header.Get(runtime.HeaderAuthorization)
- if strings.HasPrefix(hdr, prefix) {
- token = strings.TrimPrefix(hdr, prefix)
- }
- if token == "" {
- qs := r.Request.URL.Query()
- token = qs.Get(accessTokenParam)
- }
- //#nosec
- ct, _, _ := runtime.ContentType(r.Request.Header)
- if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") {
- token = r.Request.FormValue(accessTokenParam)
- }
-
- if token == "" {
- return false, nil, nil
- }
-
- rctx := context.WithValue(r.Request.Context(), oauth2SchemeName, name)
- *r.Request = *r.Request.WithContext(rctx)
- p, err := authenticate(token, r.RequiredScopes)
- return true, p, err
- })
-}
-
-// BearerAuthCtx for use with oauth2 flows with support for context.Context.
-func BearerAuthCtx(name string, authenticate ScopedTokenAuthenticationCtx) runtime.Authenticator {
- const prefix = "Bearer "
- return ScopedAuthenticator(func(r *ScopedAuthRequest) (bool, interface{}, error) {
- var token string
- hdr := r.Request.Header.Get(runtime.HeaderAuthorization)
- if strings.HasPrefix(hdr, prefix) {
- token = strings.TrimPrefix(hdr, prefix)
- }
- if token == "" {
- qs := r.Request.URL.Query()
- token = qs.Get(accessTokenParam)
- }
- //#nosec
- ct, _, _ := runtime.ContentType(r.Request.Header)
- if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") {
- token = r.Request.FormValue(accessTokenParam)
- }
-
- if token == "" {
- return false, nil, nil
- }
-
- rctx := context.WithValue(r.Request.Context(), oauth2SchemeName, name)
- ctx, p, err := authenticate(rctx, token, r.RequiredScopes)
- *r.Request = *r.Request.WithContext(ctx)
- return true, p, err
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/security/authorizer.go b/vendor/github.com/go-openapi/runtime/security/authorizer.go
deleted file mode 100644
index 00c1a4d6a4..0000000000
--- a/vendor/github.com/go-openapi/runtime/security/authorizer.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package security
-
-import (
- "net/http"
-
- "github.com/go-openapi/runtime"
-)
-
-// Authorized provides a default implementation of the Authorizer interface where all
-// requests are authorized (successful)
-func Authorized() runtime.Authorizer {
- return runtime.AuthorizerFunc(func(_ *http.Request, _ interface{}) error { return nil })
-}
diff --git a/vendor/github.com/go-openapi/runtime/statuses.go b/vendor/github.com/go-openapi/runtime/statuses.go
deleted file mode 100644
index 3b011a0bff..0000000000
--- a/vendor/github.com/go-openapi/runtime/statuses.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-// Statuses lists the most common HTTP status codes to default message
-// taken from https://httpstatuses.com/
-var Statuses = map[int]string{
- 100: "Continue",
- 101: "Switching Protocols",
- 102: "Processing",
- 103: "Checkpoint",
- 122: "URI too long",
- 200: "OK",
- 201: "Created",
- 202: "Accepted",
- 203: "Request Processed",
- 204: "No Content",
- 205: "Reset Content",
- 206: "Partial Content",
- 207: "Multi-Status",
- 208: "Already Reported",
- 226: "IM Used",
- 300: "Multiple Choices",
- 301: "Moved Permanently",
- 302: "Found",
- 303: "See Other",
- 304: "Not Modified",
- 305: "Use Proxy",
- 306: "Switch Proxy",
- 307: "Temporary Redirect",
- 308: "Permanent Redirect",
- 400: "Bad Request",
- 401: "Unauthorized",
- 402: "Payment Required",
- 403: "Forbidden",
- 404: "Not Found",
- 405: "Method Not Allowed",
- 406: "Not Acceptable",
- 407: "Proxy Authentication Required",
- 408: "Request Timeout",
- 409: "Conflict",
- 410: "Gone",
- 411: "Length Required",
- 412: "Precondition Failed",
- 413: "Request Entity Too Large",
- 414: "Request-URI Too Long",
- 415: "Unsupported Media Type",
- 416: "Request Range Not Satisfiable",
- 417: "Expectation Failed",
- 418: "I'm a teapot",
- 420: "Enhance Your Calm",
- 422: "Unprocessable Entity",
- 423: "Locked",
- 424: "Failed Dependency",
- 426: "Upgrade Required",
- 428: "Precondition Required",
- 429: "Too Many Requests",
- 431: "Request Header Fields Too Large",
- 444: "No Response",
- 449: "Retry With",
- 450: "Blocked by Windows Parental Controls",
- 451: "Wrong Exchange Server",
- 499: "Client Closed Request",
- 500: "Internal Server Error",
- 501: "Not Implemented",
- 502: "Bad Gateway",
- 503: "Service Unavailable",
- 504: "Gateway Timeout",
- 505: "HTTP Version Not Supported",
- 506: "Variant Also Negotiates",
- 507: "Insufficient Storage",
- 508: "Loop Detected",
- 509: "Bandwidth Limit Exceeded",
- 510: "Not Extended",
- 511: "Network Authentication Required",
- 598: "Network read timeout error",
- 599: "Network connect timeout error",
-}
diff --git a/vendor/github.com/go-openapi/runtime/text.go b/vendor/github.com/go-openapi/runtime/text.go
deleted file mode 100644
index f33320b7dd..0000000000
--- a/vendor/github.com/go-openapi/runtime/text.go
+++ /dev/null
@@ -1,116 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "bytes"
- "encoding"
- "errors"
- "fmt"
- "io"
- "reflect"
-
- "github.com/go-openapi/swag"
-)
-
-// TextConsumer creates a new text consumer
-func TextConsumer() Consumer {
- return ConsumerFunc(func(reader io.Reader, data interface{}) error {
- if reader == nil {
- return errors.New("TextConsumer requires a reader") // early exit
- }
-
- buf := new(bytes.Buffer)
- _, err := buf.ReadFrom(reader)
- if err != nil {
- return err
- }
- b := buf.Bytes()
-
- // If the buffer is empty, no need to unmarshal it, which causes a panic.
- if len(b) == 0 {
- return nil
- }
-
- if tu, ok := data.(encoding.TextUnmarshaler); ok {
- err := tu.UnmarshalText(b)
- if err != nil {
- return fmt.Errorf("text consumer: %v", err)
- }
-
- return nil
- }
-
- t := reflect.TypeOf(data)
- if data != nil && t.Kind() == reflect.Ptr {
- v := reflect.Indirect(reflect.ValueOf(data))
- if t.Elem().Kind() == reflect.String {
- v.SetString(string(b))
- return nil
- }
- }
-
- return fmt.Errorf("%v (%T) is not supported by the TextConsumer, %s",
- data, data, "can be resolved by supporting TextUnmarshaler interface")
- })
-}
-
-// TextProducer creates a new text producer
-func TextProducer() Producer {
- return ProducerFunc(func(writer io.Writer, data interface{}) error {
- if writer == nil {
- return errors.New("TextProducer requires a writer") // early exit
- }
-
- if data == nil {
- return errors.New("no data given to produce text from")
- }
-
- if tm, ok := data.(encoding.TextMarshaler); ok {
- txt, err := tm.MarshalText()
- if err != nil {
- return fmt.Errorf("text producer: %v", err)
- }
- _, err = writer.Write(txt)
- return err
- }
-
- if str, ok := data.(error); ok {
- _, err := writer.Write([]byte(str.Error()))
- return err
- }
-
- if str, ok := data.(fmt.Stringer); ok {
- _, err := writer.Write([]byte(str.String()))
- return err
- }
-
- v := reflect.Indirect(reflect.ValueOf(data))
- if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice {
- b, err := swag.WriteJSON(data)
- if err != nil {
- return err
- }
- _, err = writer.Write(b)
- return err
- }
- if v.Kind() != reflect.String {
- return fmt.Errorf("%T is not a supported type by the TextProducer", data)
- }
-
- _, err := writer.Write([]byte(v.String()))
- return err
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/values.go b/vendor/github.com/go-openapi/runtime/values.go
deleted file mode 100644
index 11f5732af4..0000000000
--- a/vendor/github.com/go-openapi/runtime/values.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package runtime
-
-// Values typically represent parameters on a http request.
-type Values map[string][]string
-
-// GetOK returns the values collection for the given key.
-// When the key is present in the map it will return true for hasKey.
-// When the value is not empty it will return true for hasValue.
-func (v Values) GetOK(key string) (value []string, hasKey bool, hasValue bool) {
- value, hasKey = v[key]
- if !hasKey {
- return
- }
- if len(value) == 0 {
- return
- }
- hasValue = true
- return
-}
diff --git a/vendor/github.com/go-openapi/runtime/xml.go b/vendor/github.com/go-openapi/runtime/xml.go
deleted file mode 100644
index 821c7393df..0000000000
--- a/vendor/github.com/go-openapi/runtime/xml.go
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package runtime
-
-import (
- "encoding/xml"
- "io"
-)
-
-// XMLConsumer creates a new XML consumer
-func XMLConsumer() Consumer {
- return ConsumerFunc(func(reader io.Reader, data interface{}) error {
- dec := xml.NewDecoder(reader)
- return dec.Decode(data)
- })
-}
-
-// XMLProducer creates a new XML producer
-func XMLProducer() Producer {
- return ProducerFunc(func(writer io.Writer, data interface{}) error {
- enc := xml.NewEncoder(writer)
- return enc.Encode(data)
- })
-}
diff --git a/vendor/github.com/go-openapi/runtime/yamlpc/yaml.go b/vendor/github.com/go-openapi/runtime/yamlpc/yaml.go
deleted file mode 100644
index a1a0a589df..0000000000
--- a/vendor/github.com/go-openapi/runtime/yamlpc/yaml.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package yamlpc
-
-import (
- "io"
-
- "github.com/go-openapi/runtime"
- "gopkg.in/yaml.v3"
-)
-
-// YAMLConsumer creates a consumer for yaml data
-func YAMLConsumer() runtime.Consumer {
- return runtime.ConsumerFunc(func(r io.Reader, v interface{}) error {
- dec := yaml.NewDecoder(r)
- return dec.Decode(v)
- })
-}
-
-// YAMLProducer creates a producer for yaml data
-func YAMLProducer() runtime.Producer {
- return runtime.ProducerFunc(func(w io.Writer, v interface{}) error {
- enc := yaml.NewEncoder(w)
- defer enc.Close()
- return enc.Encode(v)
- })
-}
diff --git a/vendor/github.com/go-openapi/spec/.editorconfig b/vendor/github.com/go-openapi/spec/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/spec/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/spec/.gitignore b/vendor/github.com/go-openapi/spec/.gitignore
deleted file mode 100644
index f47cb2045f..0000000000
--- a/vendor/github.com/go-openapi/spec/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-*.out
diff --git a/vendor/github.com/go-openapi/spec/.golangci.yml b/vendor/github.com/go-openapi/spec/.golangci.yml
deleted file mode 100644
index 22f8d21cca..0000000000
--- a/vendor/github.com/go-openapi/spec/.golangci.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-linters-settings:
- govet:
- check-shadowing: true
- golint:
- min-confidence: 0
- gocyclo:
- min-complexity: 45
- maligned:
- suggest-new: true
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - maligned
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - gomnd
- - exhaustivestruct
- - goerr113
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - ifshort
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- - deadcode
- - interfacer
- - scopelint
- - varcheck
- - structcheck
- - golint
- - nosnakecase
diff --git a/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/spec/LICENSE b/vendor/github.com/go-openapi/spec/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/spec/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/spec/README.md b/vendor/github.com/go-openapi/spec/README.md
deleted file mode 100644
index 7fd2810c69..0000000000
--- a/vendor/github.com/go-openapi/spec/README.md
+++ /dev/null
@@ -1,54 +0,0 @@
-# OpenAPI v2 object model [](https://github.com/go-openapi/spec/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/spec)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/spec)
-[](https://goreportcard.com/report/github.com/go-openapi/spec)
-
-The object model for OpenAPI specification documents.
-
-### FAQ
-
-* What does this do?
-
-> 1. This package knows how to marshal and unmarshal Swagger API specifications into a golang object model
-> 2. It knows how to resolve $ref and expand them to make a single root document
-
-* How does it play with the rest of the go-openapi packages ?
-
-> 1. This package is at the core of the go-openapi suite of packages and [code generator](https://github.com/go-swagger/go-swagger)
-> 2. There is a [spec loading package](https://github.com/go-openapi/loads) to fetch specs as JSON or YAML from local or remote locations
-> 3. There is a [spec validation package](https://github.com/go-openapi/validate) built on top of it
-> 4. There is a [spec analysis package](https://github.com/go-openapi/analysis) built on top of it, to analyze, flatten, fix and merge spec documents
-
-* Does this library support OpenAPI 3?
-
-> No.
-> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
-> There is no plan to make it evolve toward supporting OpenAPI 3.x.
-> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
->
-> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3
-
-* Does the unmarshaling support YAML?
-
-> Not directly. The exposed types know only how to unmarshal from JSON.
->
-> In order to load a YAML document as a Swagger spec, you need to use the loaders provided by
-> github.com/go-openapi/loads
->
-> Take a look at the example there: https://pkg.go.dev/github.com/go-openapi/loads#example-Spec
->
-> See also https://github.com/go-openapi/spec/issues/164
-
-* How can I validate a spec?
-
-> Validation is provided by [the validate package](http://github.com/go-openapi/validate)
-
-* Why do we have an `ID` field for `Schema` which is not part of the swagger spec?
-
-> We found jsonschema compatibility more important: since `id` in jsonschema influences
-> how `$ref` are resolved.
-> This `id` does not conflict with any property named `id`.
->
-> See also https://github.com/go-openapi/spec/issues/23
diff --git a/vendor/github.com/go-openapi/spec/cache.go b/vendor/github.com/go-openapi/spec/cache.go
deleted file mode 100644
index 122993b44b..0000000000
--- a/vendor/github.com/go-openapi/spec/cache.go
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "sync"
-)
-
-// ResolutionCache a cache for resolving urls
-type ResolutionCache interface {
- Get(string) (interface{}, bool)
- Set(string, interface{})
-}
-
-type simpleCache struct {
- lock sync.RWMutex
- store map[string]interface{}
-}
-
-func (s *simpleCache) ShallowClone() ResolutionCache {
- store := make(map[string]interface{}, len(s.store))
- s.lock.RLock()
- for k, v := range s.store {
- store[k] = v
- }
- s.lock.RUnlock()
-
- return &simpleCache{
- store: store,
- }
-}
-
-// Get retrieves a cached URI
-func (s *simpleCache) Get(uri string) (interface{}, bool) {
- s.lock.RLock()
- v, ok := s.store[uri]
-
- s.lock.RUnlock()
- return v, ok
-}
-
-// Set caches a URI
-func (s *simpleCache) Set(uri string, data interface{}) {
- s.lock.Lock()
- s.store[uri] = data
- s.lock.Unlock()
-}
-
-var (
- // resCache is a package level cache for $ref resolution and expansion.
- // It is initialized lazily by methods that have the need for it: no
- // memory is allocated unless some expander methods are called.
- //
- // It is initialized with JSON schema and swagger schema,
- // which do not mutate during normal operations.
- //
- // All subsequent utilizations of this cache are produced from a shallow
- // clone of this initial version.
- resCache *simpleCache
- onceCache sync.Once
-
- _ ResolutionCache = &simpleCache{}
-)
-
-// initResolutionCache initializes the URI resolution cache. To be wrapped in a sync.Once.Do call.
-func initResolutionCache() {
- resCache = defaultResolutionCache()
-}
-
-func defaultResolutionCache() *simpleCache {
- return &simpleCache{store: map[string]interface{}{
- "http://swagger.io/v2/schema.json": MustLoadSwagger20Schema(),
- "http://json-schema.org/draft-04/schema": MustLoadJSONSchemaDraft04(),
- }}
-}
-
-func cacheOrDefault(cache ResolutionCache) ResolutionCache {
- onceCache.Do(initResolutionCache)
-
- if cache != nil {
- return cache
- }
-
- // get a shallow clone of the base cache with swagger and json schema
- return resCache.ShallowClone()
-}
diff --git a/vendor/github.com/go-openapi/spec/contact_info.go b/vendor/github.com/go-openapi/spec/contact_info.go
deleted file mode 100644
index 2f7bb219b5..0000000000
--- a/vendor/github.com/go-openapi/spec/contact_info.go
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-
- "github.com/go-openapi/swag"
-)
-
-// ContactInfo contact information for the exposed API.
-//
-// For more information: http://goo.gl/8us55a#contactObject
-type ContactInfo struct {
- ContactInfoProps
- VendorExtensible
-}
-
-// ContactInfoProps hold the properties of a ContactInfo object
-type ContactInfoProps struct {
- Name string `json:"name,omitempty"`
- URL string `json:"url,omitempty"`
- Email string `json:"email,omitempty"`
-}
-
-// UnmarshalJSON hydrates ContactInfo from json
-func (c *ContactInfo) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &c.ContactInfoProps); err != nil {
- return err
- }
- return json.Unmarshal(data, &c.VendorExtensible)
-}
-
-// MarshalJSON produces ContactInfo as json
-func (c ContactInfo) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(c.ContactInfoProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(c.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2), nil
-}
diff --git a/vendor/github.com/go-openapi/spec/debug.go b/vendor/github.com/go-openapi/spec/debug.go
deleted file mode 100644
index fc889f6d0b..0000000000
--- a/vendor/github.com/go-openapi/spec/debug.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "fmt"
- "log"
- "os"
- "path"
- "runtime"
-)
-
-// Debug is true when the SWAGGER_DEBUG env var is not empty.
-//
-// It enables a more verbose logging of this package.
-var Debug = os.Getenv("SWAGGER_DEBUG") != ""
-
-var (
- // specLogger is a debug logger for this package
- specLogger *log.Logger
-)
-
-func init() {
- debugOptions()
-}
-
-func debugOptions() {
- specLogger = log.New(os.Stdout, "spec:", log.LstdFlags)
-}
-
-func debugLog(msg string, args ...interface{}) {
- // A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog()
- if Debug {
- _, file1, pos1, _ := runtime.Caller(1)
- specLogger.Printf("%s:%d: %s", path.Base(file1), pos1, fmt.Sprintf(msg, args...))
- }
-}
diff --git a/vendor/github.com/go-openapi/spec/embed.go b/vendor/github.com/go-openapi/spec/embed.go
deleted file mode 100644
index 1f4284750a..0000000000
--- a/vendor/github.com/go-openapi/spec/embed.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package spec
-
-import (
- "embed"
- "path"
-)
-
-//go:embed schemas/*.json schemas/*/*.json
-var assets embed.FS
-
-func jsonschemaDraft04JSONBytes() ([]byte, error) {
- return assets.ReadFile(path.Join("schemas", "jsonschema-draft-04.json"))
-}
-
-func v2SchemaJSONBytes() ([]byte, error) {
- return assets.ReadFile(path.Join("schemas", "v2", "schema.json"))
-}
diff --git a/vendor/github.com/go-openapi/spec/errors.go b/vendor/github.com/go-openapi/spec/errors.go
deleted file mode 100644
index 6992c7ba73..0000000000
--- a/vendor/github.com/go-openapi/spec/errors.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package spec
-
-import "errors"
-
-// Error codes
-var (
- // ErrUnknownTypeForReference indicates that a resolved reference was found in an unsupported container type
- ErrUnknownTypeForReference = errors.New("unknown type for the resolved reference")
-
- // ErrResolveRefNeedsAPointer indicates that a $ref target must be a valid JSON pointer
- ErrResolveRefNeedsAPointer = errors.New("resolve ref: target needs to be a pointer")
-
- // ErrDerefUnsupportedType indicates that a resolved reference was found in an unsupported container type.
- // At the moment, $ref are supported only inside: schemas, parameters, responses, path items
- ErrDerefUnsupportedType = errors.New("deref: unsupported type")
-
- // ErrExpandUnsupportedType indicates that $ref expansion is attempted on some invalid type
- ErrExpandUnsupportedType = errors.New("expand: unsupported type. Input should be of type *Parameter or *Response")
-)
diff --git a/vendor/github.com/go-openapi/spec/expander.go b/vendor/github.com/go-openapi/spec/expander.go
deleted file mode 100644
index b81a5699a0..0000000000
--- a/vendor/github.com/go-openapi/spec/expander.go
+++ /dev/null
@@ -1,607 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "fmt"
-)
-
-// ExpandOptions provides options for the spec expander.
-//
-// RelativeBase is the path to the root document. This can be a remote URL or a path to a local file.
-//
-// If left empty, the root document is assumed to be located in the current working directory:
-// all relative $ref's will be resolved from there.
-//
-// PathLoader injects a document loading method. By default, this resolves to the function provided by the SpecLoader package variable.
-type ExpandOptions struct {
- RelativeBase string // the path to the root document to expand. This is a file, not a directory
- SkipSchemas bool // do not expand schemas, just paths, parameters and responses
- ContinueOnError bool // continue expanding even after and error is found
- PathLoader func(string) (json.RawMessage, error) `json:"-"` // the document loading method that takes a path as input and yields a json document
- AbsoluteCircularRef bool // circular $ref remaining after expansion remain absolute URLs
-}
-
-func optionsOrDefault(opts *ExpandOptions) *ExpandOptions {
- if opts != nil {
- clone := *opts // shallow clone to avoid internal changes to be propagated to the caller
- if clone.RelativeBase != "" {
- clone.RelativeBase = normalizeBase(clone.RelativeBase)
- }
- // if the relative base is empty, let the schema loader choose a pseudo root document
- return &clone
- }
- return &ExpandOptions{}
-}
-
-// ExpandSpec expands the references in a swagger spec
-func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
- options = optionsOrDefault(options)
- resolver := defaultSchemaLoader(spec, options, nil, nil)
-
- specBasePath := options.RelativeBase
-
- if !options.SkipSchemas {
- for key, definition := range spec.Definitions {
- parentRefs := make([]string, 0, 10)
- parentRefs = append(parentRefs, "#/definitions/"+key)
-
- def, err := expandSchema(definition, parentRefs, resolver, specBasePath)
- if resolver.shouldStopOnError(err) {
- return err
- }
- if def != nil {
- spec.Definitions[key] = *def
- }
- }
- }
-
- for key := range spec.Parameters {
- parameter := spec.Parameters[key]
- if err := expandParameterOrResponse(¶meter, resolver, specBasePath); resolver.shouldStopOnError(err) {
- return err
- }
- spec.Parameters[key] = parameter
- }
-
- for key := range spec.Responses {
- response := spec.Responses[key]
- if err := expandParameterOrResponse(&response, resolver, specBasePath); resolver.shouldStopOnError(err) {
- return err
- }
- spec.Responses[key] = response
- }
-
- if spec.Paths != nil {
- for key := range spec.Paths.Paths {
- pth := spec.Paths.Paths[key]
- if err := expandPathItem(&pth, resolver, specBasePath); resolver.shouldStopOnError(err) {
- return err
- }
- spec.Paths.Paths[key] = pth
- }
- }
-
- return nil
-}
-
-const rootBase = ".root"
-
-// baseForRoot loads in the cache the root document and produces a fake ".root" base path entry
-// for further $ref resolution
-func baseForRoot(root interface{}, cache ResolutionCache) string {
- // cache the root document to resolve $ref's
- normalizedBase := normalizeBase(rootBase)
-
- if root == nil {
- // ensure that we never leave a nil root: always cache the root base pseudo-document
- cachedRoot, found := cache.Get(normalizedBase)
- if found && cachedRoot != nil {
- // the cache is already preloaded with a root
- return normalizedBase
- }
-
- root = map[string]interface{}{}
- }
-
- cache.Set(normalizedBase, root)
-
- return normalizedBase
-}
-
-// ExpandSchema expands the refs in the schema object with reference to the root object.
-//
-// go-openapi/validate uses this function.
-//
-// Notice that it is impossible to reference a json schema in a different document other than root
-// (use ExpandSchemaWithBasePath to resolve external references).
-//
-// Setting the cache is optional and this parameter may safely be left to nil.
-func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error {
- cache = cacheOrDefault(cache)
- if root == nil {
- root = schema
- }
-
- opts := &ExpandOptions{
- // when a root is specified, cache the root as an in-memory document for $ref retrieval
- RelativeBase: baseForRoot(root, cache),
- SkipSchemas: false,
- ContinueOnError: false,
- }
-
- return ExpandSchemaWithBasePath(schema, cache, opts)
-}
-
-// ExpandSchemaWithBasePath expands the refs in the schema object, base path configured through expand options.
-//
-// Setting the cache is optional and this parameter may safely be left to nil.
-func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *ExpandOptions) error {
- if schema == nil {
- return nil
- }
-
- cache = cacheOrDefault(cache)
-
- opts = optionsOrDefault(opts)
-
- resolver := defaultSchemaLoader(nil, opts, cache, nil)
-
- parentRefs := make([]string, 0, 10)
- s, err := expandSchema(*schema, parentRefs, resolver, opts.RelativeBase)
- if err != nil {
- return err
- }
- if s != nil {
- // guard for when continuing on error
- *schema = *s
- }
-
- return nil
-}
-
-func expandItems(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
- if target.Items == nil {
- return &target, nil
- }
-
- // array
- if target.Items.Schema != nil {
- t, err := expandSchema(*target.Items.Schema, parentRefs, resolver, basePath)
- if err != nil {
- return nil, err
- }
- *target.Items.Schema = *t
- }
-
- // tuple
- for i := range target.Items.Schemas {
- t, err := expandSchema(target.Items.Schemas[i], parentRefs, resolver, basePath)
- if err != nil {
- return nil, err
- }
- target.Items.Schemas[i] = *t
- }
-
- return &target, nil
-}
-
-func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
- if target.Ref.String() == "" && target.Ref.IsRoot() {
- newRef := normalizeRef(&target.Ref, basePath)
- target.Ref = *newRef
- return &target, nil
- }
-
- // change the base path of resolution when an ID is encountered
- // otherwise the basePath should inherit the parent's
- if target.ID != "" {
- basePath, _ = resolver.setSchemaID(target, target.ID, basePath)
- }
-
- if target.Ref.String() != "" {
- if !resolver.options.SkipSchemas {
- return expandSchemaRef(target, parentRefs, resolver, basePath)
- }
-
- // when "expand" with SkipSchema, we just rebase the existing $ref without replacing
- // the full schema.
- rebasedRef, err := NewRef(normalizeURI(target.Ref.String(), basePath))
- if err != nil {
- return nil, err
- }
- target.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID)
-
- return &target, nil
- }
-
- for k := range target.Definitions {
- tt, err := expandSchema(target.Definitions[k], parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if tt != nil {
- target.Definitions[k] = *tt
- }
- }
-
- t, err := expandItems(target, parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- target = *t
- }
-
- for i := range target.AllOf {
- t, err := expandSchema(target.AllOf[i], parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- target.AllOf[i] = *t
- }
- }
-
- for i := range target.AnyOf {
- t, err := expandSchema(target.AnyOf[i], parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- target.AnyOf[i] = *t
- }
- }
-
- for i := range target.OneOf {
- t, err := expandSchema(target.OneOf[i], parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- target.OneOf[i] = *t
- }
- }
-
- if target.Not != nil {
- t, err := expandSchema(*target.Not, parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- *target.Not = *t
- }
- }
-
- for k := range target.Properties {
- t, err := expandSchema(target.Properties[k], parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- target.Properties[k] = *t
- }
- }
-
- if target.AdditionalProperties != nil && target.AdditionalProperties.Schema != nil {
- t, err := expandSchema(*target.AdditionalProperties.Schema, parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- *target.AdditionalProperties.Schema = *t
- }
- }
-
- for k := range target.PatternProperties {
- t, err := expandSchema(target.PatternProperties[k], parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- target.PatternProperties[k] = *t
- }
- }
-
- for k := range target.Dependencies {
- if target.Dependencies[k].Schema != nil {
- t, err := expandSchema(*target.Dependencies[k].Schema, parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- *target.Dependencies[k].Schema = *t
- }
- }
- }
-
- if target.AdditionalItems != nil && target.AdditionalItems.Schema != nil {
- t, err := expandSchema(*target.AdditionalItems.Schema, parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return &target, err
- }
- if t != nil {
- *target.AdditionalItems.Schema = *t
- }
- }
- return &target, nil
-}
-
-func expandSchemaRef(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
- // if a Ref is found, all sibling fields are skipped
- // Ref also changes the resolution scope of children expandSchema
-
- // here the resolution scope is changed because a $ref was encountered
- normalizedRef := normalizeRef(&target.Ref, basePath)
- normalizedBasePath := normalizedRef.RemoteURI()
-
- if resolver.isCircular(normalizedRef, basePath, parentRefs...) {
- // this means there is a cycle in the recursion tree: return the Ref
- // - circular refs cannot be expanded. We leave them as ref.
- // - denormalization means that a new local file ref is set relative to the original basePath
- debugLog("short circuit circular ref: basePath: %s, normalizedPath: %s, normalized ref: %s",
- basePath, normalizedBasePath, normalizedRef.String())
- if !resolver.options.AbsoluteCircularRef {
- target.Ref = denormalizeRef(normalizedRef, resolver.context.basePath, resolver.context.rootID)
- } else {
- target.Ref = *normalizedRef
- }
- return &target, nil
- }
-
- var t *Schema
- err := resolver.Resolve(&target.Ref, &t, basePath)
- if resolver.shouldStopOnError(err) {
- return nil, err
- }
-
- if t == nil {
- // guard for when continuing on error
- return &target, nil
- }
-
- parentRefs = append(parentRefs, normalizedRef.String())
- transitiveResolver := resolver.transitiveResolver(basePath, target.Ref)
-
- basePath = resolver.updateBasePath(transitiveResolver, normalizedBasePath)
-
- return expandSchema(*t, parentRefs, transitiveResolver, basePath)
-}
-
-func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string) error {
- if pathItem == nil {
- return nil
- }
-
- parentRefs := make([]string, 0, 10)
- if err := resolver.deref(pathItem, parentRefs, basePath); resolver.shouldStopOnError(err) {
- return err
- }
-
- if pathItem.Ref.String() != "" {
- transitiveResolver := resolver.transitiveResolver(basePath, pathItem.Ref)
- basePath = transitiveResolver.updateBasePath(resolver, basePath)
- resolver = transitiveResolver
- }
-
- pathItem.Ref = Ref{}
- for i := range pathItem.Parameters {
- if err := expandParameterOrResponse(&(pathItem.Parameters[i]), resolver, basePath); resolver.shouldStopOnError(err) {
- return err
- }
- }
-
- ops := []*Operation{
- pathItem.Get,
- pathItem.Head,
- pathItem.Options,
- pathItem.Put,
- pathItem.Post,
- pathItem.Patch,
- pathItem.Delete,
- }
- for _, op := range ops {
- if err := expandOperation(op, resolver, basePath); resolver.shouldStopOnError(err) {
- return err
- }
- }
-
- return nil
-}
-
-func expandOperation(op *Operation, resolver *schemaLoader, basePath string) error {
- if op == nil {
- return nil
- }
-
- for i := range op.Parameters {
- param := op.Parameters[i]
- if err := expandParameterOrResponse(¶m, resolver, basePath); resolver.shouldStopOnError(err) {
- return err
- }
- op.Parameters[i] = param
- }
-
- if op.Responses == nil {
- return nil
- }
-
- responses := op.Responses
- if err := expandParameterOrResponse(responses.Default, resolver, basePath); resolver.shouldStopOnError(err) {
- return err
- }
-
- for code := range responses.StatusCodeResponses {
- response := responses.StatusCodeResponses[code]
- if err := expandParameterOrResponse(&response, resolver, basePath); resolver.shouldStopOnError(err) {
- return err
- }
- responses.StatusCodeResponses[code] = response
- }
-
- return nil
-}
-
-// ExpandResponseWithRoot expands a response based on a root document, not a fetchable document
-//
-// Notice that it is impossible to reference a json schema in a different document other than root
-// (use ExpandResponse to resolve external references).
-//
-// Setting the cache is optional and this parameter may safely be left to nil.
-func ExpandResponseWithRoot(response *Response, root interface{}, cache ResolutionCache) error {
- cache = cacheOrDefault(cache)
- opts := &ExpandOptions{
- RelativeBase: baseForRoot(root, cache),
- }
- resolver := defaultSchemaLoader(root, opts, cache, nil)
-
- return expandParameterOrResponse(response, resolver, opts.RelativeBase)
-}
-
-// ExpandResponse expands a response based on a basepath
-//
-// All refs inside response will be resolved relative to basePath
-func ExpandResponse(response *Response, basePath string) error {
- opts := optionsOrDefault(&ExpandOptions{
- RelativeBase: basePath,
- })
- resolver := defaultSchemaLoader(nil, opts, nil, nil)
-
- return expandParameterOrResponse(response, resolver, opts.RelativeBase)
-}
-
-// ExpandParameterWithRoot expands a parameter based on a root document, not a fetchable document.
-//
-// Notice that it is impossible to reference a json schema in a different document other than root
-// (use ExpandParameter to resolve external references).
-func ExpandParameterWithRoot(parameter *Parameter, root interface{}, cache ResolutionCache) error {
- cache = cacheOrDefault(cache)
-
- opts := &ExpandOptions{
- RelativeBase: baseForRoot(root, cache),
- }
- resolver := defaultSchemaLoader(root, opts, cache, nil)
-
- return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
-}
-
-// ExpandParameter expands a parameter based on a basepath.
-// This is the exported version of expandParameter
-// all refs inside parameter will be resolved relative to basePath
-func ExpandParameter(parameter *Parameter, basePath string) error {
- opts := optionsOrDefault(&ExpandOptions{
- RelativeBase: basePath,
- })
- resolver := defaultSchemaLoader(nil, opts, nil, nil)
-
- return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
-}
-
-func getRefAndSchema(input interface{}) (*Ref, *Schema, error) {
- var (
- ref *Ref
- sch *Schema
- )
-
- switch refable := input.(type) {
- case *Parameter:
- if refable == nil {
- return nil, nil, nil
- }
- ref = &refable.Ref
- sch = refable.Schema
- case *Response:
- if refable == nil {
- return nil, nil, nil
- }
- ref = &refable.Ref
- sch = refable.Schema
- default:
- return nil, nil, fmt.Errorf("unsupported type: %T: %w", input, ErrExpandUnsupportedType)
- }
-
- return ref, sch, nil
-}
-
-func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePath string) error {
- ref, sch, err := getRefAndSchema(input)
- if err != nil {
- return err
- }
-
- if ref == nil && sch == nil { // nothing to do
- return nil
- }
-
- parentRefs := make([]string, 0, 10)
- if ref != nil {
- // dereference this $ref
- if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) {
- return err
- }
-
- ref, sch, _ = getRefAndSchema(input)
- }
-
- if ref.String() != "" {
- transitiveResolver := resolver.transitiveResolver(basePath, *ref)
- basePath = resolver.updateBasePath(transitiveResolver, basePath)
- resolver = transitiveResolver
- }
-
- if sch == nil {
- // nothing to be expanded
- if ref != nil {
- *ref = Ref{}
- }
-
- return nil
- }
-
- if sch.Ref.String() != "" {
- rebasedRef, ern := NewRef(normalizeURI(sch.Ref.String(), basePath))
- if ern != nil {
- return ern
- }
-
- if resolver.isCircular(&rebasedRef, basePath, parentRefs...) {
- // this is a circular $ref: stop expansion
- if !resolver.options.AbsoluteCircularRef {
- sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID)
- } else {
- sch.Ref = rebasedRef
- }
- }
- }
-
- // $ref expansion or rebasing is performed by expandSchema below
- if ref != nil {
- *ref = Ref{}
- }
-
- // expand schema
- // yes, we do it even if options.SkipSchema is true: we have to go down that rabbit hole and rebase nested $ref)
- s, err := expandSchema(*sch, parentRefs, resolver, basePath)
- if resolver.shouldStopOnError(err) {
- return err
- }
-
- if s != nil { // guard for when continuing on error
- *sch = *s
- }
-
- return nil
-}
diff --git a/vendor/github.com/go-openapi/spec/external_docs.go b/vendor/github.com/go-openapi/spec/external_docs.go
deleted file mode 100644
index 88add91b2b..0000000000
--- a/vendor/github.com/go-openapi/spec/external_docs.go
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-// ExternalDocumentation allows referencing an external resource for
-// extended documentation.
-//
-// For more information: http://goo.gl/8us55a#externalDocumentationObject
-type ExternalDocumentation struct {
- Description string `json:"description,omitempty"`
- URL string `json:"url,omitempty"`
-}
diff --git a/vendor/github.com/go-openapi/spec/header.go b/vendor/github.com/go-openapi/spec/header.go
deleted file mode 100644
index 9dfd17b185..0000000000
--- a/vendor/github.com/go-openapi/spec/header.go
+++ /dev/null
@@ -1,203 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-const (
- jsonArray = "array"
-)
-
-// HeaderProps describes a response header
-type HeaderProps struct {
- Description string `json:"description,omitempty"`
-}
-
-// Header describes a header for a response of the API
-//
-// For more information: http://goo.gl/8us55a#headerObject
-type Header struct {
- CommonValidations
- SimpleSchema
- VendorExtensible
- HeaderProps
-}
-
-// ResponseHeader creates a new header instance for use in a response
-func ResponseHeader() *Header {
- return new(Header)
-}
-
-// WithDescription sets the description on this response, allows for chaining
-func (h *Header) WithDescription(description string) *Header {
- h.Description = description
- return h
-}
-
-// Typed a fluent builder method for the type of parameter
-func (h *Header) Typed(tpe, format string) *Header {
- h.Type = tpe
- h.Format = format
- return h
-}
-
-// CollectionOf a fluent builder method for an array item
-func (h *Header) CollectionOf(items *Items, format string) *Header {
- h.Type = jsonArray
- h.Items = items
- h.CollectionFormat = format
- return h
-}
-
-// WithDefault sets the default value on this item
-func (h *Header) WithDefault(defaultValue interface{}) *Header {
- h.Default = defaultValue
- return h
-}
-
-// WithMaxLength sets a max length value
-func (h *Header) WithMaxLength(max int64) *Header {
- h.MaxLength = &max
- return h
-}
-
-// WithMinLength sets a min length value
-func (h *Header) WithMinLength(min int64) *Header {
- h.MinLength = &min
- return h
-}
-
-// WithPattern sets a pattern value
-func (h *Header) WithPattern(pattern string) *Header {
- h.Pattern = pattern
- return h
-}
-
-// WithMultipleOf sets a multiple of value
-func (h *Header) WithMultipleOf(number float64) *Header {
- h.MultipleOf = &number
- return h
-}
-
-// WithMaximum sets a maximum number value
-func (h *Header) WithMaximum(max float64, exclusive bool) *Header {
- h.Maximum = &max
- h.ExclusiveMaximum = exclusive
- return h
-}
-
-// WithMinimum sets a minimum number value
-func (h *Header) WithMinimum(min float64, exclusive bool) *Header {
- h.Minimum = &min
- h.ExclusiveMinimum = exclusive
- return h
-}
-
-// WithEnum sets a the enum values (replace)
-func (h *Header) WithEnum(values ...interface{}) *Header {
- h.Enum = append([]interface{}{}, values...)
- return h
-}
-
-// WithMaxItems sets the max items
-func (h *Header) WithMaxItems(size int64) *Header {
- h.MaxItems = &size
- return h
-}
-
-// WithMinItems sets the min items
-func (h *Header) WithMinItems(size int64) *Header {
- h.MinItems = &size
- return h
-}
-
-// UniqueValues dictates that this array can only have unique items
-func (h *Header) UniqueValues() *Header {
- h.UniqueItems = true
- return h
-}
-
-// AllowDuplicates this array can have duplicates
-func (h *Header) AllowDuplicates() *Header {
- h.UniqueItems = false
- return h
-}
-
-// WithValidations is a fluent method to set header validations
-func (h *Header) WithValidations(val CommonValidations) *Header {
- h.SetValidations(SchemaValidations{CommonValidations: val})
- return h
-}
-
-// MarshalJSON marshal this to JSON
-func (h Header) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(h.CommonValidations)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(h.SimpleSchema)
- if err != nil {
- return nil, err
- }
- b3, err := json.Marshal(h.HeaderProps)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// UnmarshalJSON unmarshals this header from JSON
-func (h *Header) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &h.CommonValidations); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &h.SimpleSchema); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &h.VendorExtensible); err != nil {
- return err
- }
- return json.Unmarshal(data, &h.HeaderProps)
-}
-
-// JSONLookup look up a value by the json property name
-func (h Header) JSONLookup(token string) (interface{}, error) {
- if ex, ok := h.Extensions[token]; ok {
- return &ex, nil
- }
-
- r, _, err := jsonpointer.GetForToken(h.CommonValidations, token)
- if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
- return nil, err
- }
- if r != nil {
- return r, nil
- }
- r, _, err = jsonpointer.GetForToken(h.SimpleSchema, token)
- if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
- return nil, err
- }
- if r != nil {
- return r, nil
- }
- r, _, err = jsonpointer.GetForToken(h.HeaderProps, token)
- return r, err
-}
diff --git a/vendor/github.com/go-openapi/spec/info.go b/vendor/github.com/go-openapi/spec/info.go
deleted file mode 100644
index 582f0fd4c4..0000000000
--- a/vendor/github.com/go-openapi/spec/info.go
+++ /dev/null
@@ -1,184 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "strconv"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// Extensions vendor specific extensions
-type Extensions map[string]interface{}
-
-// Add adds a value to these extensions
-func (e Extensions) Add(key string, value interface{}) {
- realKey := strings.ToLower(key)
- e[realKey] = value
-}
-
-// GetString gets a string value from the extensions
-func (e Extensions) GetString(key string) (string, bool) {
- if v, ok := e[strings.ToLower(key)]; ok {
- str, ok := v.(string)
- return str, ok
- }
- return "", false
-}
-
-// GetInt gets a int value from the extensions
-func (e Extensions) GetInt(key string) (int, bool) {
- realKey := strings.ToLower(key)
-
- if v, ok := e.GetString(realKey); ok {
- if r, err := strconv.Atoi(v); err == nil {
- return r, true
- }
- }
-
- if v, ok := e[realKey]; ok {
- if r, rOk := v.(float64); rOk {
- return int(r), true
- }
- }
- return -1, false
-}
-
-// GetBool gets a string value from the extensions
-func (e Extensions) GetBool(key string) (bool, bool) {
- if v, ok := e[strings.ToLower(key)]; ok {
- str, ok := v.(bool)
- return str, ok
- }
- return false, false
-}
-
-// GetStringSlice gets a string value from the extensions
-func (e Extensions) GetStringSlice(key string) ([]string, bool) {
- if v, ok := e[strings.ToLower(key)]; ok {
- arr, isSlice := v.([]interface{})
- if !isSlice {
- return nil, false
- }
- var strs []string
- for _, iface := range arr {
- str, isString := iface.(string)
- if !isString {
- return nil, false
- }
- strs = append(strs, str)
- }
- return strs, ok
- }
- return nil, false
-}
-
-// VendorExtensible composition block.
-type VendorExtensible struct {
- Extensions Extensions
-}
-
-// AddExtension adds an extension to this extensible object
-func (v *VendorExtensible) AddExtension(key string, value interface{}) {
- if value == nil {
- return
- }
- if v.Extensions == nil {
- v.Extensions = make(map[string]interface{})
- }
- v.Extensions.Add(key, value)
-}
-
-// MarshalJSON marshals the extensions to json
-func (v VendorExtensible) MarshalJSON() ([]byte, error) {
- toser := make(map[string]interface{})
- for k, v := range v.Extensions {
- lk := strings.ToLower(k)
- if strings.HasPrefix(lk, "x-") {
- toser[k] = v
- }
- }
- return json.Marshal(toser)
-}
-
-// UnmarshalJSON for this extensible object
-func (v *VendorExtensible) UnmarshalJSON(data []byte) error {
- var d map[string]interface{}
- if err := json.Unmarshal(data, &d); err != nil {
- return err
- }
- for k, vv := range d {
- lk := strings.ToLower(k)
- if strings.HasPrefix(lk, "x-") {
- if v.Extensions == nil {
- v.Extensions = map[string]interface{}{}
- }
- v.Extensions[k] = vv
- }
- }
- return nil
-}
-
-// InfoProps the properties for an info definition
-type InfoProps struct {
- Description string `json:"description,omitempty"`
- Title string `json:"title,omitempty"`
- TermsOfService string `json:"termsOfService,omitempty"`
- Contact *ContactInfo `json:"contact,omitempty"`
- License *License `json:"license,omitempty"`
- Version string `json:"version,omitempty"`
-}
-
-// Info object provides metadata about the API.
-// The metadata can be used by the clients if needed, and can be presented in the Swagger-UI for convenience.
-//
-// For more information: http://goo.gl/8us55a#infoObject
-type Info struct {
- VendorExtensible
- InfoProps
-}
-
-// JSONLookup look up a value by the json property name
-func (i Info) JSONLookup(token string) (interface{}, error) {
- if ex, ok := i.Extensions[token]; ok {
- return &ex, nil
- }
- r, _, err := jsonpointer.GetForToken(i.InfoProps, token)
- return r, err
-}
-
-// MarshalJSON marshal this to JSON
-func (i Info) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(i.InfoProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(i.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2), nil
-}
-
-// UnmarshalJSON marshal this from JSON
-func (i *Info) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &i.InfoProps); err != nil {
- return err
- }
- return json.Unmarshal(data, &i.VendorExtensible)
-}
diff --git a/vendor/github.com/go-openapi/spec/items.go b/vendor/github.com/go-openapi/spec/items.go
deleted file mode 100644
index e2afb2133b..0000000000
--- a/vendor/github.com/go-openapi/spec/items.go
+++ /dev/null
@@ -1,234 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-const (
- jsonRef = "$ref"
-)
-
-// SimpleSchema describe swagger simple schemas for parameters and headers
-type SimpleSchema struct {
- Type string `json:"type,omitempty"`
- Nullable bool `json:"nullable,omitempty"`
- Format string `json:"format,omitempty"`
- Items *Items `json:"items,omitempty"`
- CollectionFormat string `json:"collectionFormat,omitempty"`
- Default interface{} `json:"default,omitempty"`
- Example interface{} `json:"example,omitempty"`
-}
-
-// TypeName return the type (or format) of a simple schema
-func (s *SimpleSchema) TypeName() string {
- if s.Format != "" {
- return s.Format
- }
- return s.Type
-}
-
-// ItemsTypeName yields the type of items in a simple schema array
-func (s *SimpleSchema) ItemsTypeName() string {
- if s.Items == nil {
- return ""
- }
- return s.Items.TypeName()
-}
-
-// Items a limited subset of JSON-Schema's items object.
-// It is used by parameter definitions that are not located in "body".
-//
-// For more information: http://goo.gl/8us55a#items-object
-type Items struct {
- Refable
- CommonValidations
- SimpleSchema
- VendorExtensible
-}
-
-// NewItems creates a new instance of items
-func NewItems() *Items {
- return &Items{}
-}
-
-// Typed a fluent builder method for the type of item
-func (i *Items) Typed(tpe, format string) *Items {
- i.Type = tpe
- i.Format = format
- return i
-}
-
-// AsNullable flags this schema as nullable.
-func (i *Items) AsNullable() *Items {
- i.Nullable = true
- return i
-}
-
-// CollectionOf a fluent builder method for an array item
-func (i *Items) CollectionOf(items *Items, format string) *Items {
- i.Type = jsonArray
- i.Items = items
- i.CollectionFormat = format
- return i
-}
-
-// WithDefault sets the default value on this item
-func (i *Items) WithDefault(defaultValue interface{}) *Items {
- i.Default = defaultValue
- return i
-}
-
-// WithMaxLength sets a max length value
-func (i *Items) WithMaxLength(max int64) *Items {
- i.MaxLength = &max
- return i
-}
-
-// WithMinLength sets a min length value
-func (i *Items) WithMinLength(min int64) *Items {
- i.MinLength = &min
- return i
-}
-
-// WithPattern sets a pattern value
-func (i *Items) WithPattern(pattern string) *Items {
- i.Pattern = pattern
- return i
-}
-
-// WithMultipleOf sets a multiple of value
-func (i *Items) WithMultipleOf(number float64) *Items {
- i.MultipleOf = &number
- return i
-}
-
-// WithMaximum sets a maximum number value
-func (i *Items) WithMaximum(max float64, exclusive bool) *Items {
- i.Maximum = &max
- i.ExclusiveMaximum = exclusive
- return i
-}
-
-// WithMinimum sets a minimum number value
-func (i *Items) WithMinimum(min float64, exclusive bool) *Items {
- i.Minimum = &min
- i.ExclusiveMinimum = exclusive
- return i
-}
-
-// WithEnum sets a the enum values (replace)
-func (i *Items) WithEnum(values ...interface{}) *Items {
- i.Enum = append([]interface{}{}, values...)
- return i
-}
-
-// WithMaxItems sets the max items
-func (i *Items) WithMaxItems(size int64) *Items {
- i.MaxItems = &size
- return i
-}
-
-// WithMinItems sets the min items
-func (i *Items) WithMinItems(size int64) *Items {
- i.MinItems = &size
- return i
-}
-
-// UniqueValues dictates that this array can only have unique items
-func (i *Items) UniqueValues() *Items {
- i.UniqueItems = true
- return i
-}
-
-// AllowDuplicates this array can have duplicates
-func (i *Items) AllowDuplicates() *Items {
- i.UniqueItems = false
- return i
-}
-
-// WithValidations is a fluent method to set Items validations
-func (i *Items) WithValidations(val CommonValidations) *Items {
- i.SetValidations(SchemaValidations{CommonValidations: val})
- return i
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (i *Items) UnmarshalJSON(data []byte) error {
- var validations CommonValidations
- if err := json.Unmarshal(data, &validations); err != nil {
- return err
- }
- var ref Refable
- if err := json.Unmarshal(data, &ref); err != nil {
- return err
- }
- var simpleSchema SimpleSchema
- if err := json.Unmarshal(data, &simpleSchema); err != nil {
- return err
- }
- var vendorExtensible VendorExtensible
- if err := json.Unmarshal(data, &vendorExtensible); err != nil {
- return err
- }
- i.Refable = ref
- i.CommonValidations = validations
- i.SimpleSchema = simpleSchema
- i.VendorExtensible = vendorExtensible
- return nil
-}
-
-// MarshalJSON converts this items object to JSON
-func (i Items) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(i.CommonValidations)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(i.SimpleSchema)
- if err != nil {
- return nil, err
- }
- b3, err := json.Marshal(i.Refable)
- if err != nil {
- return nil, err
- }
- b4, err := json.Marshal(i.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b4, b3, b1, b2), nil
-}
-
-// JSONLookup look up a value by the json property name
-func (i Items) JSONLookup(token string) (interface{}, error) {
- if token == jsonRef {
- return &i.Ref, nil
- }
-
- r, _, err := jsonpointer.GetForToken(i.CommonValidations, token)
- if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
- return nil, err
- }
- if r != nil {
- return r, nil
- }
- r, _, err = jsonpointer.GetForToken(i.SimpleSchema, token)
- return r, err
-}
diff --git a/vendor/github.com/go-openapi/spec/license.go b/vendor/github.com/go-openapi/spec/license.go
deleted file mode 100644
index b42f80368e..0000000000
--- a/vendor/github.com/go-openapi/spec/license.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-
- "github.com/go-openapi/swag"
-)
-
-// License information for the exposed API.
-//
-// For more information: http://goo.gl/8us55a#licenseObject
-type License struct {
- LicenseProps
- VendorExtensible
-}
-
-// LicenseProps holds the properties of a License object
-type LicenseProps struct {
- Name string `json:"name,omitempty"`
- URL string `json:"url,omitempty"`
-}
-
-// UnmarshalJSON hydrates License from json
-func (l *License) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &l.LicenseProps); err != nil {
- return err
- }
- return json.Unmarshal(data, &l.VendorExtensible)
-}
-
-// MarshalJSON produces License as json
-func (l License) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(l.LicenseProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(l.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2), nil
-}
diff --git a/vendor/github.com/go-openapi/spec/normalizer.go b/vendor/github.com/go-openapi/spec/normalizer.go
deleted file mode 100644
index e8b6009945..0000000000
--- a/vendor/github.com/go-openapi/spec/normalizer.go
+++ /dev/null
@@ -1,202 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "net/url"
- "path"
- "strings"
-)
-
-const fileScheme = "file"
-
-// normalizeURI ensures that all $ref paths used internally by the expander are canonicalized.
-//
-// NOTE(windows): there is a tolerance over the strict URI format on windows.
-//
-// The normalizer accepts relative file URLs like 'Path\File.JSON' as well as absolute file URLs like
-// 'C:\Path\file.Yaml'.
-//
-// Both are canonicalized with a "file://" scheme, slashes and a lower-cased path:
-// 'file:///c:/path/file.yaml'
-//
-// URLs can be specified with a file scheme, like in 'file:///folder/file.json' or
-// 'file:///c:\folder\File.json'.
-//
-// URLs like file://C:\folder are considered invalid (i.e. there is no host 'c:\folder') and a "repair"
-// is attempted.
-//
-// The base path argument is assumed to be canonicalized (e.g. using normalizeBase()).
-func normalizeURI(refPath, base string) string {
- refURL, err := parseURL(refPath)
- if err != nil {
- specLogger.Printf("warning: invalid URI in $ref %q: %v", refPath, err)
- refURL, refPath = repairURI(refPath)
- }
-
- fixWindowsURI(refURL, refPath) // noop on non-windows OS
-
- refURL.Path = path.Clean(refURL.Path)
- if refURL.Path == "." {
- refURL.Path = ""
- }
-
- r := MustCreateRef(refURL.String())
- if r.IsCanonical() {
- return refURL.String()
- }
-
- baseURL, _ := parseURL(base)
- if path.IsAbs(refURL.Path) {
- baseURL.Path = refURL.Path
- } else if refURL.Path != "" {
- baseURL.Path = path.Join(path.Dir(baseURL.Path), refURL.Path)
- }
- // copying fragment from ref to base
- baseURL.Fragment = refURL.Fragment
-
- return baseURL.String()
-}
-
-// denormalizeRef returns the simplest notation for a normalized $ref, given the path of the original root document.
-//
-// When calling this, we assume that:
-// * $ref is a canonical URI
-// * originalRelativeBase is a canonical URI
-//
-// denormalizeRef is currently used when we rewrite a $ref after a circular $ref has been detected.
-// In this case, expansion stops and normally renders the internal canonical $ref.
-//
-// This internal $ref is eventually rebased to the original RelativeBase used for the expansion.
-//
-// There is a special case for schemas that are anchored with an "id":
-// in that case, the rebasing is performed // against the id only if this is an anchor for the initial root document.
-// All other intermediate "id"'s found along the way are ignored for the purpose of rebasing.
-func denormalizeRef(ref *Ref, originalRelativeBase, id string) Ref {
- debugLog("denormalizeRef called:\n$ref: %q\noriginal: %s\nroot ID:%s", ref.String(), originalRelativeBase, id)
-
- if ref.String() == "" || ref.IsRoot() || ref.HasFragmentOnly {
- // short circuit: $ref to current doc
- return *ref
- }
-
- if id != "" {
- idBaseURL, err := parseURL(id)
- if err == nil { // if the schema id is not usable as a URI, ignore it
- if ref, ok := rebase(ref, idBaseURL, true); ok { // rebase, but keep references to root unchaged (do not want $ref: "")
- // $ref relative to the ID of the schema in the root document
- return ref
- }
- }
- }
-
- originalRelativeBaseURL, _ := parseURL(originalRelativeBase)
-
- r, _ := rebase(ref, originalRelativeBaseURL, false)
-
- return r
-}
-
-func rebase(ref *Ref, v *url.URL, notEqual bool) (Ref, bool) {
- var newBase url.URL
-
- u := ref.GetURL()
-
- if u.Scheme != v.Scheme || u.Host != v.Host {
- return *ref, false
- }
-
- docPath := v.Path
- v.Path = path.Dir(v.Path)
-
- if v.Path == "." {
- v.Path = ""
- } else if !strings.HasSuffix(v.Path, "/") {
- v.Path += "/"
- }
-
- newBase.Fragment = u.Fragment
-
- if strings.HasPrefix(u.Path, docPath) {
- newBase.Path = strings.TrimPrefix(u.Path, docPath)
- } else {
- newBase.Path = strings.TrimPrefix(u.Path, v.Path)
- }
-
- if notEqual && newBase.Path == "" && newBase.Fragment == "" {
- // do not want rebasing to end up in an empty $ref
- return *ref, false
- }
-
- if path.IsAbs(newBase.Path) {
- // whenever we end up with an absolute path, specify the scheme and host
- newBase.Scheme = v.Scheme
- newBase.Host = v.Host
- }
-
- return MustCreateRef(newBase.String()), true
-}
-
-// normalizeRef canonicalize a Ref, using a canonical relativeBase as its absolute anchor
-func normalizeRef(ref *Ref, relativeBase string) *Ref {
- r := MustCreateRef(normalizeURI(ref.String(), relativeBase))
- return &r
-}
-
-// normalizeBase performs a normalization of the input base path.
-//
-// This always yields a canonical URI (absolute), usable for the document cache.
-//
-// It ensures that all further internal work on basePath may safely assume
-// a non-empty, cross-platform, canonical URI (i.e. absolute).
-//
-// This normalization tolerates windows paths (e.g. C:\x\y\File.dat) and transform this
-// in a file:// URL with lower cased drive letter and path.
-//
-// See also: https://en.wikipedia.org/wiki/File_URI_scheme
-func normalizeBase(in string) string {
- u, err := parseURL(in)
- if err != nil {
- specLogger.Printf("warning: invalid URI in RelativeBase %q: %v", in, err)
- u, in = repairURI(in)
- }
-
- u.Fragment = "" // any fragment in the base is irrelevant
-
- fixWindowsURI(u, in) // noop on non-windows OS
-
- u.Path = path.Clean(u.Path)
- if u.Path == "." { // empty after Clean()
- u.Path = ""
- }
-
- if u.Scheme != "" {
- if path.IsAbs(u.Path) || u.Scheme != fileScheme {
- // this is absolute or explicitly not a local file: we're good
- return u.String()
- }
- }
-
- // no scheme or file scheme with relative path: assume file and make it absolute
- // enforce scheme file://... with absolute path.
- //
- // If the input path is relative, we anchor the path to the current working directory.
- // NOTE: we may end up with a host component. Leave it unchanged: e.g. file://host/folder/file.json
-
- u.Scheme = fileScheme
- u.Path = absPath(u.Path) // platform-dependent
- u.RawQuery = "" // any query component is irrelevant for a base
- return u.String()
-}
diff --git a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go
deleted file mode 100644
index f19f1a8fb6..0000000000
--- a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go
+++ /dev/null
@@ -1,44 +0,0 @@
-//go:build !windows
-// +build !windows
-
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "net/url"
- "path/filepath"
-)
-
-// absPath makes a file path absolute and compatible with a URI path component.
-//
-// The parameter must be a path, not an URI.
-func absPath(in string) string {
- anchored, err := filepath.Abs(in)
- if err != nil {
- specLogger.Printf("warning: could not resolve current working directory: %v", err)
- return in
- }
- return anchored
-}
-
-func repairURI(in string) (*url.URL, string) {
- u, _ := parseURL("")
- debugLog("repaired URI: original: %q, repaired: %q", in, "")
- return u, ""
-}
-
-func fixWindowsURI(_ *url.URL, _ string) {
-}
diff --git a/vendor/github.com/go-openapi/spec/normalizer_windows.go b/vendor/github.com/go-openapi/spec/normalizer_windows.go
deleted file mode 100644
index a66c532dbc..0000000000
--- a/vendor/github.com/go-openapi/spec/normalizer_windows.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// -build windows
-
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "net/url"
- "os"
- "path"
- "path/filepath"
- "strings"
-)
-
-// absPath makes a file path absolute and compatible with a URI path component
-//
-// The parameter must be a path, not an URI.
-func absPath(in string) string {
- // NOTE(windows): filepath.Abs exhibits a special behavior on windows for empty paths.
- // See https://github.com/golang/go/issues/24441
- if in == "" {
- in = "."
- }
-
- anchored, err := filepath.Abs(in)
- if err != nil {
- specLogger.Printf("warning: could not resolve current working directory: %v", err)
- return in
- }
-
- pth := strings.ReplaceAll(strings.ToLower(anchored), `\`, `/`)
- if !strings.HasPrefix(pth, "/") {
- pth = "/" + pth
- }
-
- return path.Clean(pth)
-}
-
-// repairURI tolerates invalid file URIs with common typos
-// such as 'file://E:\folder\file', that break the regular URL parser.
-//
-// Adopting the same defaults as for unixes (e.g. return an empty path) would
-// result into a counter-intuitive result for that case (e.g. E:\folder\file is
-// eventually resolved as the current directory). The repair will detect the missing "/".
-//
-// Note that this only works for the file scheme.
-func repairURI(in string) (*url.URL, string) {
- const prefix = fileScheme + "://"
- if !strings.HasPrefix(in, prefix) {
- // giving up: resolve to empty path
- u, _ := parseURL("")
-
- return u, ""
- }
-
- // attempt the repair, stripping the scheme should be sufficient
- u, _ := parseURL(strings.TrimPrefix(in, prefix))
- debugLog("repaired URI: original: %q, repaired: %q", in, u.String())
-
- return u, u.String()
-}
-
-// fixWindowsURI tolerates an absolute file path on windows such as C:\Base\File.yaml or \\host\share\Base\File.yaml
-// and makes it a canonical URI: file:///c:/base/file.yaml
-//
-// Catch 22 notes for Windows:
-//
-// * There may be a drive letter on windows (it is lower-cased)
-// * There may be a share UNC, e.g. \\server\folder\data.xml
-// * Paths are case insensitive
-// * Paths may already contain slashes
-// * Paths must be slashed
-//
-// NOTE: there is no escaping. "/" may be valid separators just like "\".
-// We don't use ToSlash() (which escapes everything) because windows now also
-// tolerates the use of "/". Hence, both C:\File.yaml and C:/File.yaml will work.
-func fixWindowsURI(u *url.URL, in string) {
- drive := filepath.VolumeName(in)
-
- if len(drive) > 0 {
- if len(u.Scheme) == 1 && strings.EqualFold(u.Scheme, drive[:1]) { // a path with a drive letter
- u.Scheme = fileScheme
- u.Host = ""
- u.Path = strings.Join([]string{drive, u.Opaque, u.Path}, `/`) // reconstruct the full path component (no fragment, no query)
- } else if u.Host == "" && strings.HasPrefix(u.Path, drive) { // a path with a \\host volume
- // NOTE: the special host@port syntax for UNC is not supported (yet)
- u.Scheme = fileScheme
-
- // this is a modified version of filepath.Dir() to apply on the VolumeName itself
- i := len(drive) - 1
- for i >= 0 && !os.IsPathSeparator(drive[i]) {
- i--
- }
- host := drive[:i] // \\host\share => host
-
- u.Path = strings.TrimPrefix(u.Path, host)
- u.Host = strings.TrimPrefix(host, `\\`)
- }
-
- u.Opaque = ""
- u.Path = strings.ReplaceAll(strings.ToLower(u.Path), `\`, `/`)
-
- // ensure we form an absolute path
- if !strings.HasPrefix(u.Path, "/") {
- u.Path = "/" + u.Path
- }
-
- u.Path = path.Clean(u.Path)
-
- return
- }
-
- if u.Scheme == fileScheme {
- // Handle dodgy cases for file://{...} URIs on windows.
- // A canonical URI should always be followed by an absolute path.
- //
- // Examples:
- // * file:///folder/file => valid, unchanged
- // * file:///c:\folder\file => slashed
- // * file:///./folder/file => valid, cleaned to remove the dot
- // * file:///.\folder\file => remapped to cwd
- // * file:///. => dodgy, remapped to / (consistent with the behavior on unix)
- // * file:///.. => dodgy, remapped to / (consistent with the behavior on unix)
- if (!path.IsAbs(u.Path) && !filepath.IsAbs(u.Path)) || (strings.HasPrefix(u.Path, `/.`) && strings.Contains(u.Path, `\`)) {
- // ensure we form an absolute path
- u.Path, _ = filepath.Abs(strings.TrimLeft(u.Path, `/`))
- if !strings.HasPrefix(u.Path, "/") {
- u.Path = "/" + u.Path
- }
- }
- u.Path = strings.ToLower(u.Path)
- }
-
- // NOTE: lower case normalization does not propagate to inner resources,
- // generated when rebasing: when joining a relative URI with a file to an absolute base,
- // only the base is currently lower-cased.
- //
- // For now, we assume this is good enough for most use cases
- // and try not to generate too many differences
- // between the output produced on different platforms.
- u.Path = path.Clean(strings.ReplaceAll(u.Path, `\`, `/`))
-}
diff --git a/vendor/github.com/go-openapi/spec/operation.go b/vendor/github.com/go-openapi/spec/operation.go
deleted file mode 100644
index a69cca8814..0000000000
--- a/vendor/github.com/go-openapi/spec/operation.go
+++ /dev/null
@@ -1,400 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/json"
- "sort"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-func init() {
- gob.Register(map[string]interface{}{})
- gob.Register([]interface{}{})
-}
-
-// OperationProps describes an operation
-//
-// NOTES:
-// - schemes, when present must be from [http, https, ws, wss]: see validate
-// - Security is handled as a special case: see MarshalJSON function
-type OperationProps struct {
- Description string `json:"description,omitempty"`
- Consumes []string `json:"consumes,omitempty"`
- Produces []string `json:"produces,omitempty"`
- Schemes []string `json:"schemes,omitempty"`
- Tags []string `json:"tags,omitempty"`
- Summary string `json:"summary,omitempty"`
- ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
- ID string `json:"operationId,omitempty"`
- Deprecated bool `json:"deprecated,omitempty"`
- Security []map[string][]string `json:"security,omitempty"`
- Parameters []Parameter `json:"parameters,omitempty"`
- Responses *Responses `json:"responses,omitempty"`
-}
-
-// MarshalJSON takes care of serializing operation properties to JSON
-//
-// We use a custom marhaller here to handle a special cases related to
-// the Security field. We need to preserve zero length slice
-// while omitting the field when the value is nil/unset.
-func (op OperationProps) MarshalJSON() ([]byte, error) {
- type Alias OperationProps
- if op.Security == nil {
- return json.Marshal(&struct {
- Security []map[string][]string `json:"security,omitempty"`
- *Alias
- }{
- Security: op.Security,
- Alias: (*Alias)(&op),
- })
- }
- return json.Marshal(&struct {
- Security []map[string][]string `json:"security"`
- *Alias
- }{
- Security: op.Security,
- Alias: (*Alias)(&op),
- })
-}
-
-// Operation describes a single API operation on a path.
-//
-// For more information: http://goo.gl/8us55a#operationObject
-type Operation struct {
- VendorExtensible
- OperationProps
-}
-
-// SuccessResponse gets a success response model
-func (o *Operation) SuccessResponse() (*Response, int, bool) {
- if o.Responses == nil {
- return nil, 0, false
- }
-
- responseCodes := make([]int, 0, len(o.Responses.StatusCodeResponses))
- for k := range o.Responses.StatusCodeResponses {
- if k >= 200 && k < 300 {
- responseCodes = append(responseCodes, k)
- }
- }
- if len(responseCodes) > 0 {
- sort.Ints(responseCodes)
- v := o.Responses.StatusCodeResponses[responseCodes[0]]
- return &v, responseCodes[0], true
- }
-
- return o.Responses.Default, 0, false
-}
-
-// JSONLookup look up a value by the json property name
-func (o Operation) JSONLookup(token string) (interface{}, error) {
- if ex, ok := o.Extensions[token]; ok {
- return &ex, nil
- }
- r, _, err := jsonpointer.GetForToken(o.OperationProps, token)
- return r, err
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (o *Operation) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &o.OperationProps); err != nil {
- return err
- }
- return json.Unmarshal(data, &o.VendorExtensible)
-}
-
-// MarshalJSON converts this items object to JSON
-func (o Operation) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(o.OperationProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(o.VendorExtensible)
- if err != nil {
- return nil, err
- }
- concated := swag.ConcatJSON(b1, b2)
- return concated, nil
-}
-
-// NewOperation creates a new operation instance.
-// It expects an ID as parameter but not passing an ID is also valid.
-func NewOperation(id string) *Operation {
- op := new(Operation)
- op.ID = id
- return op
-}
-
-// WithID sets the ID property on this operation, allows for chaining.
-func (o *Operation) WithID(id string) *Operation {
- o.ID = id
- return o
-}
-
-// WithDescription sets the description on this operation, allows for chaining
-func (o *Operation) WithDescription(description string) *Operation {
- o.Description = description
- return o
-}
-
-// WithSummary sets the summary on this operation, allows for chaining
-func (o *Operation) WithSummary(summary string) *Operation {
- o.Summary = summary
- return o
-}
-
-// WithExternalDocs sets/removes the external docs for/from this operation.
-// When you pass empty strings as params the external documents will be removed.
-// When you pass non-empty string as one value then those values will be used on the external docs object.
-// So when you pass a non-empty description, you should also pass the url and vice versa.
-func (o *Operation) WithExternalDocs(description, url string) *Operation {
- if description == "" && url == "" {
- o.ExternalDocs = nil
- return o
- }
-
- if o.ExternalDocs == nil {
- o.ExternalDocs = &ExternalDocumentation{}
- }
- o.ExternalDocs.Description = description
- o.ExternalDocs.URL = url
- return o
-}
-
-// Deprecate marks the operation as deprecated
-func (o *Operation) Deprecate() *Operation {
- o.Deprecated = true
- return o
-}
-
-// Undeprecate marks the operation as not deprected
-func (o *Operation) Undeprecate() *Operation {
- o.Deprecated = false
- return o
-}
-
-// WithConsumes adds media types for incoming body values
-func (o *Operation) WithConsumes(mediaTypes ...string) *Operation {
- o.Consumes = append(o.Consumes, mediaTypes...)
- return o
-}
-
-// WithProduces adds media types for outgoing body values
-func (o *Operation) WithProduces(mediaTypes ...string) *Operation {
- o.Produces = append(o.Produces, mediaTypes...)
- return o
-}
-
-// WithTags adds tags for this operation
-func (o *Operation) WithTags(tags ...string) *Operation {
- o.Tags = append(o.Tags, tags...)
- return o
-}
-
-// AddParam adds a parameter to this operation, when a parameter for that location
-// and with that name already exists it will be replaced
-func (o *Operation) AddParam(param *Parameter) *Operation {
- if param == nil {
- return o
- }
-
- for i, p := range o.Parameters {
- if p.Name == param.Name && p.In == param.In {
- params := make([]Parameter, 0, len(o.Parameters)+1)
- params = append(params, o.Parameters[:i]...)
- params = append(params, *param)
- params = append(params, o.Parameters[i+1:]...)
- o.Parameters = params
-
- return o
- }
- }
-
- o.Parameters = append(o.Parameters, *param)
- return o
-}
-
-// RemoveParam removes a parameter from the operation
-func (o *Operation) RemoveParam(name, in string) *Operation {
- for i, p := range o.Parameters {
- if p.Name == name && p.In == in {
- o.Parameters = append(o.Parameters[:i], o.Parameters[i+1:]...)
- return o
- }
- }
- return o
-}
-
-// SecuredWith adds a security scope to this operation.
-func (o *Operation) SecuredWith(name string, scopes ...string) *Operation {
- o.Security = append(o.Security, map[string][]string{name: scopes})
- return o
-}
-
-// WithDefaultResponse adds a default response to the operation.
-// Passing a nil value will remove the response
-func (o *Operation) WithDefaultResponse(response *Response) *Operation {
- return o.RespondsWith(0, response)
-}
-
-// RespondsWith adds a status code response to the operation.
-// When the code is 0 the value of the response will be used as default response value.
-// When the value of the response is nil it will be removed from the operation
-func (o *Operation) RespondsWith(code int, response *Response) *Operation {
- if o.Responses == nil {
- o.Responses = new(Responses)
- }
- if code == 0 {
- o.Responses.Default = response
- return o
- }
- if response == nil {
- delete(o.Responses.StatusCodeResponses, code)
- return o
- }
- if o.Responses.StatusCodeResponses == nil {
- o.Responses.StatusCodeResponses = make(map[int]Response)
- }
- o.Responses.StatusCodeResponses[code] = *response
- return o
-}
-
-type opsAlias OperationProps
-
-type gobAlias struct {
- Security []map[string]struct {
- List []string
- Pad bool
- }
- Alias *opsAlias
- SecurityIsEmpty bool
-}
-
-// GobEncode provides a safe gob encoder for Operation, including empty security requirements
-func (o Operation) GobEncode() ([]byte, error) {
- raw := struct {
- Ext VendorExtensible
- Props OperationProps
- }{
- Ext: o.VendorExtensible,
- Props: o.OperationProps,
- }
- var b bytes.Buffer
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
-}
-
-// GobDecode provides a safe gob decoder for Operation, including empty security requirements
-func (o *Operation) GobDecode(b []byte) error {
- var raw struct {
- Ext VendorExtensible
- Props OperationProps
- }
-
- buf := bytes.NewBuffer(b)
- err := gob.NewDecoder(buf).Decode(&raw)
- if err != nil {
- return err
- }
- o.VendorExtensible = raw.Ext
- o.OperationProps = raw.Props
- return nil
-}
-
-// GobEncode provides a safe gob encoder for Operation, including empty security requirements
-func (op OperationProps) GobEncode() ([]byte, error) {
- raw := gobAlias{
- Alias: (*opsAlias)(&op),
- }
-
- var b bytes.Buffer
- if op.Security == nil {
- // nil security requirement
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
- }
-
- if len(op.Security) == 0 {
- // empty, but non-nil security requirement
- raw.SecurityIsEmpty = true
- raw.Alias.Security = nil
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
- }
-
- raw.Security = make([]map[string]struct {
- List []string
- Pad bool
- }, 0, len(op.Security))
- for _, req := range op.Security {
- v := make(map[string]struct {
- List []string
- Pad bool
- }, len(req))
- for k, val := range req {
- v[k] = struct {
- List []string
- Pad bool
- }{
- List: val,
- }
- }
- raw.Security = append(raw.Security, v)
- }
-
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
-}
-
-// GobDecode provides a safe gob decoder for Operation, including empty security requirements
-func (op *OperationProps) GobDecode(b []byte) error {
- var raw gobAlias
-
- buf := bytes.NewBuffer(b)
- err := gob.NewDecoder(buf).Decode(&raw)
- if err != nil {
- return err
- }
- if raw.Alias == nil {
- return nil
- }
-
- switch {
- case raw.SecurityIsEmpty:
- // empty, but non-nil security requirement
- raw.Alias.Security = []map[string][]string{}
- case len(raw.Alias.Security) == 0:
- // nil security requirement
- raw.Alias.Security = nil
- default:
- raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security))
- for _, req := range raw.Security {
- v := make(map[string][]string, len(req))
- for k, val := range req {
- v[k] = make([]string, 0, len(val.List))
- v[k] = append(v[k], val.List...)
- }
- raw.Alias.Security = append(raw.Alias.Security, v)
- }
- }
-
- *op = *(*OperationProps)(raw.Alias)
- return nil
-}
diff --git a/vendor/github.com/go-openapi/spec/parameter.go b/vendor/github.com/go-openapi/spec/parameter.go
deleted file mode 100644
index bd4f1cdb07..0000000000
--- a/vendor/github.com/go-openapi/spec/parameter.go
+++ /dev/null
@@ -1,326 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// QueryParam creates a query parameter
-func QueryParam(name string) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name, In: "query"}}
-}
-
-// HeaderParam creates a header parameter, this is always required by default
-func HeaderParam(name string) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name, In: "header", Required: true}}
-}
-
-// PathParam creates a path parameter, this is always required
-func PathParam(name string) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name, In: "path", Required: true}}
-}
-
-// BodyParam creates a body parameter
-func BodyParam(name string, schema *Schema) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name, In: "body", Schema: schema}}
-}
-
-// FormDataParam creates a body parameter
-func FormDataParam(name string) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"}}
-}
-
-// FileParam creates a body parameter
-func FileParam(name string) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"},
- SimpleSchema: SimpleSchema{Type: "file"}}
-}
-
-// SimpleArrayParam creates a param for a simple array (string, int, date etc)
-func SimpleArrayParam(name, tpe, fmt string) *Parameter {
- return &Parameter{ParamProps: ParamProps{Name: name},
- SimpleSchema: SimpleSchema{Type: jsonArray, CollectionFormat: "csv",
- Items: &Items{SimpleSchema: SimpleSchema{Type: tpe, Format: fmt}}}}
-}
-
-// ParamRef creates a parameter that's a json reference
-func ParamRef(uri string) *Parameter {
- p := new(Parameter)
- p.Ref = MustCreateRef(uri)
- return p
-}
-
-// ParamProps describes the specific attributes of an operation parameter
-//
-// NOTE:
-// - Schema is defined when "in" == "body": see validate
-// - AllowEmptyValue is allowed where "in" == "query" || "formData"
-type ParamProps struct {
- Description string `json:"description,omitempty"`
- Name string `json:"name,omitempty"`
- In string `json:"in,omitempty"`
- Required bool `json:"required,omitempty"`
- Schema *Schema `json:"schema,omitempty"`
- AllowEmptyValue bool `json:"allowEmptyValue,omitempty"`
-}
-
-// Parameter a unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn).
-//
-// There are five possible parameter types.
-// - Path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part
-// of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`,
-// the path parameter is `itemId`.
-// - Query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`.
-// - Header - Custom headers that are expected as part of the request.
-// - Body - The payload that's appended to the HTTP request. Since there can only be one payload, there can only be
-// _one_ body parameter. The name of the body parameter has no effect on the parameter itself and is used for
-// documentation purposes only. Since Form parameters are also in the payload, body and form parameters cannot exist
-// together for the same operation.
-// - Form - Used to describe the payload of an HTTP request when either `application/x-www-form-urlencoded` or
-// `multipart/form-data` are used as the content type of the request (in Swagger's definition,
-// the [`consumes`](#operationConsumes) property of an operation). This is the only parameter type that can be used
-// to send files, thus supporting the `file` type. Since form parameters are sent in the payload, they cannot be
-// declared together with a body parameter for the same operation. Form parameters have a different format based on
-// the content-type used (for further details, consult http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4).
-// - `application/x-www-form-urlencoded` - Similar to the format of Query parameters but as a payload.
-// For example, `foo=1&bar=swagger` - both `foo` and `bar` are form parameters. This is normally used for simple
-// parameters that are being transferred.
-// - `multipart/form-data` - each parameter takes a section in the payload with an internal header.
-// For example, for the header `Content-Disposition: form-data; name="submit-name"` the name of the parameter is
-// `submit-name`. This type of form parameters is more commonly used for file transfers.
-//
-// For more information: http://goo.gl/8us55a#parameterObject
-type Parameter struct {
- Refable
- CommonValidations
- SimpleSchema
- VendorExtensible
- ParamProps
-}
-
-// JSONLookup look up a value by the json property name
-func (p Parameter) JSONLookup(token string) (interface{}, error) {
- if ex, ok := p.Extensions[token]; ok {
- return &ex, nil
- }
- if token == jsonRef {
- return &p.Ref, nil
- }
-
- r, _, err := jsonpointer.GetForToken(p.CommonValidations, token)
- if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
- return nil, err
- }
- if r != nil {
- return r, nil
- }
- r, _, err = jsonpointer.GetForToken(p.SimpleSchema, token)
- if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
- return nil, err
- }
- if r != nil {
- return r, nil
- }
- r, _, err = jsonpointer.GetForToken(p.ParamProps, token)
- return r, err
-}
-
-// WithDescription a fluent builder method for the description of the parameter
-func (p *Parameter) WithDescription(description string) *Parameter {
- p.Description = description
- return p
-}
-
-// Named a fluent builder method to override the name of the parameter
-func (p *Parameter) Named(name string) *Parameter {
- p.Name = name
- return p
-}
-
-// WithLocation a fluent builder method to override the location of the parameter
-func (p *Parameter) WithLocation(in string) *Parameter {
- p.In = in
- return p
-}
-
-// Typed a fluent builder method for the type of the parameter value
-func (p *Parameter) Typed(tpe, format string) *Parameter {
- p.Type = tpe
- p.Format = format
- return p
-}
-
-// CollectionOf a fluent builder method for an array parameter
-func (p *Parameter) CollectionOf(items *Items, format string) *Parameter {
- p.Type = jsonArray
- p.Items = items
- p.CollectionFormat = format
- return p
-}
-
-// WithDefault sets the default value on this parameter
-func (p *Parameter) WithDefault(defaultValue interface{}) *Parameter {
- p.AsOptional() // with default implies optional
- p.Default = defaultValue
- return p
-}
-
-// AllowsEmptyValues flags this parameter as being ok with empty values
-func (p *Parameter) AllowsEmptyValues() *Parameter {
- p.AllowEmptyValue = true
- return p
-}
-
-// NoEmptyValues flags this parameter as not liking empty values
-func (p *Parameter) NoEmptyValues() *Parameter {
- p.AllowEmptyValue = false
- return p
-}
-
-// AsOptional flags this parameter as optional
-func (p *Parameter) AsOptional() *Parameter {
- p.Required = false
- return p
-}
-
-// AsRequired flags this parameter as required
-func (p *Parameter) AsRequired() *Parameter {
- if p.Default != nil { // with a default required makes no sense
- return p
- }
- p.Required = true
- return p
-}
-
-// WithMaxLength sets a max length value
-func (p *Parameter) WithMaxLength(max int64) *Parameter {
- p.MaxLength = &max
- return p
-}
-
-// WithMinLength sets a min length value
-func (p *Parameter) WithMinLength(min int64) *Parameter {
- p.MinLength = &min
- return p
-}
-
-// WithPattern sets a pattern value
-func (p *Parameter) WithPattern(pattern string) *Parameter {
- p.Pattern = pattern
- return p
-}
-
-// WithMultipleOf sets a multiple of value
-func (p *Parameter) WithMultipleOf(number float64) *Parameter {
- p.MultipleOf = &number
- return p
-}
-
-// WithMaximum sets a maximum number value
-func (p *Parameter) WithMaximum(max float64, exclusive bool) *Parameter {
- p.Maximum = &max
- p.ExclusiveMaximum = exclusive
- return p
-}
-
-// WithMinimum sets a minimum number value
-func (p *Parameter) WithMinimum(min float64, exclusive bool) *Parameter {
- p.Minimum = &min
- p.ExclusiveMinimum = exclusive
- return p
-}
-
-// WithEnum sets a the enum values (replace)
-func (p *Parameter) WithEnum(values ...interface{}) *Parameter {
- p.Enum = append([]interface{}{}, values...)
- return p
-}
-
-// WithMaxItems sets the max items
-func (p *Parameter) WithMaxItems(size int64) *Parameter {
- p.MaxItems = &size
- return p
-}
-
-// WithMinItems sets the min items
-func (p *Parameter) WithMinItems(size int64) *Parameter {
- p.MinItems = &size
- return p
-}
-
-// UniqueValues dictates that this array can only have unique items
-func (p *Parameter) UniqueValues() *Parameter {
- p.UniqueItems = true
- return p
-}
-
-// AllowDuplicates this array can have duplicates
-func (p *Parameter) AllowDuplicates() *Parameter {
- p.UniqueItems = false
- return p
-}
-
-// WithValidations is a fluent method to set parameter validations
-func (p *Parameter) WithValidations(val CommonValidations) *Parameter {
- p.SetValidations(SchemaValidations{CommonValidations: val})
- return p
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (p *Parameter) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &p.CommonValidations); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &p.Refable); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &p.SimpleSchema); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &p.VendorExtensible); err != nil {
- return err
- }
- return json.Unmarshal(data, &p.ParamProps)
-}
-
-// MarshalJSON converts this items object to JSON
-func (p Parameter) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(p.CommonValidations)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(p.SimpleSchema)
- if err != nil {
- return nil, err
- }
- b3, err := json.Marshal(p.Refable)
- if err != nil {
- return nil, err
- }
- b4, err := json.Marshal(p.VendorExtensible)
- if err != nil {
- return nil, err
- }
- b5, err := json.Marshal(p.ParamProps)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b3, b1, b2, b4, b5), nil
-}
diff --git a/vendor/github.com/go-openapi/spec/path_item.go b/vendor/github.com/go-openapi/spec/path_item.go
deleted file mode 100644
index 68fc8e9014..0000000000
--- a/vendor/github.com/go-openapi/spec/path_item.go
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// PathItemProps the path item specific properties
-type PathItemProps struct {
- Get *Operation `json:"get,omitempty"`
- Put *Operation `json:"put,omitempty"`
- Post *Operation `json:"post,omitempty"`
- Delete *Operation `json:"delete,omitempty"`
- Options *Operation `json:"options,omitempty"`
- Head *Operation `json:"head,omitempty"`
- Patch *Operation `json:"patch,omitempty"`
- Parameters []Parameter `json:"parameters,omitempty"`
-}
-
-// PathItem describes the operations available on a single path.
-// A Path Item may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering).
-// The path itself is still exposed to the documentation viewer but they will
-// not know which operations and parameters are available.
-//
-// For more information: http://goo.gl/8us55a#pathItemObject
-type PathItem struct {
- Refable
- VendorExtensible
- PathItemProps
-}
-
-// JSONLookup look up a value by the json property name
-func (p PathItem) JSONLookup(token string) (interface{}, error) {
- if ex, ok := p.Extensions[token]; ok {
- return &ex, nil
- }
- if token == jsonRef {
- return &p.Ref, nil
- }
- r, _, err := jsonpointer.GetForToken(p.PathItemProps, token)
- return r, err
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (p *PathItem) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &p.Refable); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &p.VendorExtensible); err != nil {
- return err
- }
- return json.Unmarshal(data, &p.PathItemProps)
-}
-
-// MarshalJSON converts this items object to JSON
-func (p PathItem) MarshalJSON() ([]byte, error) {
- b3, err := json.Marshal(p.Refable)
- if err != nil {
- return nil, err
- }
- b4, err := json.Marshal(p.VendorExtensible)
- if err != nil {
- return nil, err
- }
- b5, err := json.Marshal(p.PathItemProps)
- if err != nil {
- return nil, err
- }
- concated := swag.ConcatJSON(b3, b4, b5)
- return concated, nil
-}
diff --git a/vendor/github.com/go-openapi/spec/paths.go b/vendor/github.com/go-openapi/spec/paths.go
deleted file mode 100644
index 9dc82a2901..0000000000
--- a/vendor/github.com/go-openapi/spec/paths.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "fmt"
- "strings"
-
- "github.com/go-openapi/swag"
-)
-
-// Paths holds the relative paths to the individual endpoints.
-// The path is appended to the [`basePath`](http://goo.gl/8us55a#swaggerBasePath) in order
-// to construct the full URL.
-// The Paths may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering).
-//
-// For more information: http://goo.gl/8us55a#pathsObject
-type Paths struct {
- VendorExtensible
- Paths map[string]PathItem `json:"-"` // custom serializer to flatten this, each entry must start with "/"
-}
-
-// JSONLookup look up a value by the json property name
-func (p Paths) JSONLookup(token string) (interface{}, error) {
- if pi, ok := p.Paths[token]; ok {
- return &pi, nil
- }
- if ex, ok := p.Extensions[token]; ok {
- return &ex, nil
- }
- return nil, fmt.Errorf("object has no field %q", token)
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (p *Paths) UnmarshalJSON(data []byte) error {
- var res map[string]json.RawMessage
- if err := json.Unmarshal(data, &res); err != nil {
- return err
- }
- for k, v := range res {
- if strings.HasPrefix(strings.ToLower(k), "x-") {
- if p.Extensions == nil {
- p.Extensions = make(map[string]interface{})
- }
- var d interface{}
- if err := json.Unmarshal(v, &d); err != nil {
- return err
- }
- p.Extensions[k] = d
- }
- if strings.HasPrefix(k, "/") {
- if p.Paths == nil {
- p.Paths = make(map[string]PathItem)
- }
- var pi PathItem
- if err := json.Unmarshal(v, &pi); err != nil {
- return err
- }
- p.Paths[k] = pi
- }
- }
- return nil
-}
-
-// MarshalJSON converts this items object to JSON
-func (p Paths) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(p.VendorExtensible)
- if err != nil {
- return nil, err
- }
-
- pths := make(map[string]PathItem)
- for k, v := range p.Paths {
- if strings.HasPrefix(k, "/") {
- pths[k] = v
- }
- }
- b2, err := json.Marshal(pths)
- if err != nil {
- return nil, err
- }
- concated := swag.ConcatJSON(b1, b2)
- return concated, nil
-}
diff --git a/vendor/github.com/go-openapi/spec/properties.go b/vendor/github.com/go-openapi/spec/properties.go
deleted file mode 100644
index 91d2435f01..0000000000
--- a/vendor/github.com/go-openapi/spec/properties.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package spec
-
-import (
- "bytes"
- "encoding/json"
- "reflect"
- "sort"
-)
-
-// OrderSchemaItem holds a named schema (e.g. from a property of an object)
-type OrderSchemaItem struct {
- Name string
- Schema
-}
-
-// OrderSchemaItems is a sortable slice of named schemas.
-// The ordering is defined by the x-order schema extension.
-type OrderSchemaItems []OrderSchemaItem
-
-// MarshalJSON produces a json object with keys defined by the name schemas
-// of the OrderSchemaItems slice, keeping the original order of the slice.
-func (items OrderSchemaItems) MarshalJSON() ([]byte, error) {
- buf := bytes.NewBuffer(nil)
- buf.WriteString("{")
- for i := range items {
- if i > 0 {
- buf.WriteString(",")
- }
- buf.WriteString("\"")
- buf.WriteString(items[i].Name)
- buf.WriteString("\":")
- bs, err := json.Marshal(&items[i].Schema)
- if err != nil {
- return nil, err
- }
- buf.Write(bs)
- }
- buf.WriteString("}")
- return buf.Bytes(), nil
-}
-
-func (items OrderSchemaItems) Len() int { return len(items) }
-func (items OrderSchemaItems) Swap(i, j int) { items[i], items[j] = items[j], items[i] }
-func (items OrderSchemaItems) Less(i, j int) (ret bool) {
- ii, oki := items[i].Extensions.GetInt("x-order")
- ij, okj := items[j].Extensions.GetInt("x-order")
- if oki {
- if okj {
- defer func() {
- if err := recover(); err != nil {
- defer func() {
- if err = recover(); err != nil {
- ret = items[i].Name < items[j].Name
- }
- }()
- ret = reflect.ValueOf(ii).String() < reflect.ValueOf(ij).String()
- }
- }()
- return ii < ij
- }
- return true
- } else if okj {
- return false
- }
- return items[i].Name < items[j].Name
-}
-
-// SchemaProperties is a map representing the properties of a Schema object.
-// It knows how to transform its keys into an ordered slice.
-type SchemaProperties map[string]Schema
-
-// ToOrderedSchemaItems transforms the map of properties into a sortable slice
-func (properties SchemaProperties) ToOrderedSchemaItems() OrderSchemaItems {
- items := make(OrderSchemaItems, 0, len(properties))
- for k, v := range properties {
- items = append(items, OrderSchemaItem{
- Name: k,
- Schema: v,
- })
- }
- sort.Sort(items)
- return items
-}
-
-// MarshalJSON produces properties as json, keeping their order.
-func (properties SchemaProperties) MarshalJSON() ([]byte, error) {
- if properties == nil {
- return []byte("null"), nil
- }
- return json.Marshal(properties.ToOrderedSchemaItems())
-}
diff --git a/vendor/github.com/go-openapi/spec/ref.go b/vendor/github.com/go-openapi/spec/ref.go
deleted file mode 100644
index b0ef9bd9c9..0000000000
--- a/vendor/github.com/go-openapi/spec/ref.go
+++ /dev/null
@@ -1,193 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/json"
- "net/http"
- "os"
- "path/filepath"
-
- "github.com/go-openapi/jsonreference"
-)
-
-// Refable is a struct for things that accept a $ref property
-type Refable struct {
- Ref Ref
-}
-
-// MarshalJSON marshals the ref to json
-func (r Refable) MarshalJSON() ([]byte, error) {
- return r.Ref.MarshalJSON()
-}
-
-// UnmarshalJSON unmarshalss the ref from json
-func (r *Refable) UnmarshalJSON(d []byte) error {
- return json.Unmarshal(d, &r.Ref)
-}
-
-// Ref represents a json reference that is potentially resolved
-type Ref struct {
- jsonreference.Ref
-}
-
-// RemoteURI gets the remote uri part of the ref
-func (r *Ref) RemoteURI() string {
- if r.String() == "" {
- return ""
- }
-
- u := *r.GetURL()
- u.Fragment = ""
- return u.String()
-}
-
-// IsValidURI returns true when the url the ref points to can be found
-func (r *Ref) IsValidURI(basepaths ...string) bool {
- if r.String() == "" {
- return true
- }
-
- v := r.RemoteURI()
- if v == "" {
- return true
- }
-
- if r.HasFullURL {
- //nolint:noctx,gosec
- rr, err := http.Get(v)
- if err != nil {
- return false
- }
- defer rr.Body.Close()
-
- return rr.StatusCode/100 == 2
- }
-
- if !(r.HasFileScheme || r.HasFullFilePath || r.HasURLPathOnly) {
- return false
- }
-
- // check for local file
- pth := v
- if r.HasURLPathOnly {
- base := "."
- if len(basepaths) > 0 {
- base = filepath.Dir(filepath.Join(basepaths...))
- }
- p, e := filepath.Abs(filepath.ToSlash(filepath.Join(base, pth)))
- if e != nil {
- return false
- }
- pth = p
- }
-
- fi, err := os.Stat(filepath.ToSlash(pth))
- if err != nil {
- return false
- }
-
- return !fi.IsDir()
-}
-
-// Inherits creates a new reference from a parent and a child
-// If the child cannot inherit from the parent, an error is returned
-func (r *Ref) Inherits(child Ref) (*Ref, error) {
- ref, err := r.Ref.Inherits(child.Ref)
- if err != nil {
- return nil, err
- }
- return &Ref{Ref: *ref}, nil
-}
-
-// NewRef creates a new instance of a ref object
-// returns an error when the reference uri is an invalid uri
-func NewRef(refURI string) (Ref, error) {
- ref, err := jsonreference.New(refURI)
- if err != nil {
- return Ref{}, err
- }
- return Ref{Ref: ref}, nil
-}
-
-// MustCreateRef creates a ref object but panics when refURI is invalid.
-// Use the NewRef method for a version that returns an error.
-func MustCreateRef(refURI string) Ref {
- return Ref{Ref: jsonreference.MustCreateRef(refURI)}
-}
-
-// MarshalJSON marshals this ref into a JSON object
-func (r Ref) MarshalJSON() ([]byte, error) {
- str := r.String()
- if str == "" {
- if r.IsRoot() {
- return []byte(`{"$ref":""}`), nil
- }
- return []byte("{}"), nil
- }
- v := map[string]interface{}{"$ref": str}
- return json.Marshal(v)
-}
-
-// UnmarshalJSON unmarshals this ref from a JSON object
-func (r *Ref) UnmarshalJSON(d []byte) error {
- var v map[string]interface{}
- if err := json.Unmarshal(d, &v); err != nil {
- return err
- }
- return r.fromMap(v)
-}
-
-// GobEncode provides a safe gob encoder for Ref
-func (r Ref) GobEncode() ([]byte, error) {
- var b bytes.Buffer
- raw, err := r.MarshalJSON()
- if err != nil {
- return nil, err
- }
- err = gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
-}
-
-// GobDecode provides a safe gob decoder for Ref
-func (r *Ref) GobDecode(b []byte) error {
- var raw []byte
- buf := bytes.NewBuffer(b)
- err := gob.NewDecoder(buf).Decode(&raw)
- if err != nil {
- return err
- }
- return json.Unmarshal(raw, r)
-}
-
-func (r *Ref) fromMap(v map[string]interface{}) error {
- if v == nil {
- return nil
- }
-
- if vv, ok := v["$ref"]; ok {
- if str, ok := vv.(string); ok {
- ref, err := jsonreference.New(str)
- if err != nil {
- return err
- }
- *r = Ref{Ref: ref}
- }
- }
-
- return nil
-}
diff --git a/vendor/github.com/go-openapi/spec/resolver.go b/vendor/github.com/go-openapi/spec/resolver.go
deleted file mode 100644
index 47d1ee13fc..0000000000
--- a/vendor/github.com/go-openapi/spec/resolver.go
+++ /dev/null
@@ -1,127 +0,0 @@
-package spec
-
-import (
- "fmt"
-
- "github.com/go-openapi/swag"
-)
-
-func resolveAnyWithBase(root interface{}, ref *Ref, result interface{}, options *ExpandOptions) error {
- options = optionsOrDefault(options)
- resolver := defaultSchemaLoader(root, options, nil, nil)
-
- if err := resolver.Resolve(ref, result, options.RelativeBase); err != nil {
- return err
- }
-
- return nil
-}
-
-// ResolveRefWithBase resolves a reference against a context root with preservation of base path
-func ResolveRefWithBase(root interface{}, ref *Ref, options *ExpandOptions) (*Schema, error) {
- result := new(Schema)
-
- if err := resolveAnyWithBase(root, ref, result, options); err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// ResolveRef resolves a reference for a schema against a context root
-// ref is guaranteed to be in root (no need to go to external files)
-//
-// ResolveRef is ONLY called from the code generation module
-func ResolveRef(root interface{}, ref *Ref) (*Schema, error) {
- res, _, err := ref.GetPointer().Get(root)
- if err != nil {
- return nil, err
- }
-
- switch sch := res.(type) {
- case Schema:
- return &sch, nil
- case *Schema:
- return sch, nil
- case map[string]interface{}:
- newSch := new(Schema)
- if err = swag.DynamicJSONToStruct(sch, newSch); err != nil {
- return nil, err
- }
- return newSch, nil
- default:
- return nil, fmt.Errorf("type: %T: %w", sch, ErrUnknownTypeForReference)
- }
-}
-
-// ResolveParameterWithBase resolves a parameter reference against a context root and base path
-func ResolveParameterWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Parameter, error) {
- result := new(Parameter)
-
- if err := resolveAnyWithBase(root, &ref, result, options); err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// ResolveParameter resolves a parameter reference against a context root
-func ResolveParameter(root interface{}, ref Ref) (*Parameter, error) {
- return ResolveParameterWithBase(root, ref, nil)
-}
-
-// ResolveResponseWithBase resolves response a reference against a context root and base path
-func ResolveResponseWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Response, error) {
- result := new(Response)
-
- err := resolveAnyWithBase(root, &ref, result, options)
- if err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// ResolveResponse resolves response a reference against a context root
-func ResolveResponse(root interface{}, ref Ref) (*Response, error) {
- return ResolveResponseWithBase(root, ref, nil)
-}
-
-// ResolvePathItemWithBase resolves response a path item against a context root and base path
-func ResolvePathItemWithBase(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) {
- result := new(PathItem)
-
- if err := resolveAnyWithBase(root, &ref, result, options); err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// ResolvePathItem resolves response a path item against a context root and base path
-//
-// Deprecated: use ResolvePathItemWithBase instead
-func ResolvePathItem(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) {
- return ResolvePathItemWithBase(root, ref, options)
-}
-
-// ResolveItemsWithBase resolves parameter items reference against a context root and base path.
-//
-// NOTE: stricly speaking, this construct is not supported by Swagger 2.0.
-// Similarly, $ref are forbidden in response headers.
-func ResolveItemsWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) {
- result := new(Items)
-
- if err := resolveAnyWithBase(root, &ref, result, options); err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// ResolveItems resolves parameter items reference against a context root and base path.
-//
-// Deprecated: use ResolveItemsWithBase instead
-func ResolveItems(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) {
- return ResolveItemsWithBase(root, ref, options)
-}
diff --git a/vendor/github.com/go-openapi/spec/response.go b/vendor/github.com/go-openapi/spec/response.go
deleted file mode 100644
index 0340b60d84..0000000000
--- a/vendor/github.com/go-openapi/spec/response.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// ResponseProps properties specific to a response
-type ResponseProps struct {
- Description string `json:"description"`
- Schema *Schema `json:"schema,omitempty"`
- Headers map[string]Header `json:"headers,omitempty"`
- Examples map[string]interface{} `json:"examples,omitempty"`
-}
-
-// Response describes a single response from an API Operation.
-//
-// For more information: http://goo.gl/8us55a#responseObject
-type Response struct {
- Refable
- ResponseProps
- VendorExtensible
-}
-
-// JSONLookup look up a value by the json property name
-func (r Response) JSONLookup(token string) (interface{}, error) {
- if ex, ok := r.Extensions[token]; ok {
- return &ex, nil
- }
- if token == "$ref" {
- return &r.Ref, nil
- }
- ptr, _, err := jsonpointer.GetForToken(r.ResponseProps, token)
- return ptr, err
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (r *Response) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &r.ResponseProps); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &r.Refable); err != nil {
- return err
- }
- return json.Unmarshal(data, &r.VendorExtensible)
-}
-
-// MarshalJSON converts this items object to JSON
-func (r Response) MarshalJSON() ([]byte, error) {
- var (
- b1 []byte
- err error
- )
-
- if r.Ref.String() == "" {
- // when there is no $ref, empty description is rendered as an empty string
- b1, err = json.Marshal(r.ResponseProps)
- } else {
- // when there is $ref inside the schema, description should be omitempty-ied
- b1, err = json.Marshal(struct {
- Description string `json:"description,omitempty"`
- Schema *Schema `json:"schema,omitempty"`
- Headers map[string]Header `json:"headers,omitempty"`
- Examples map[string]interface{} `json:"examples,omitempty"`
- }{
- Description: r.ResponseProps.Description,
- Schema: r.ResponseProps.Schema,
- Examples: r.ResponseProps.Examples,
- })
- }
- if err != nil {
- return nil, err
- }
-
- b2, err := json.Marshal(r.Refable)
- if err != nil {
- return nil, err
- }
- b3, err := json.Marshal(r.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// NewResponse creates a new response instance
-func NewResponse() *Response {
- return new(Response)
-}
-
-// ResponseRef creates a response as a json reference
-func ResponseRef(url string) *Response {
- resp := NewResponse()
- resp.Ref = MustCreateRef(url)
- return resp
-}
-
-// WithDescription sets the description on this response, allows for chaining
-func (r *Response) WithDescription(description string) *Response {
- r.Description = description
- return r
-}
-
-// WithSchema sets the schema on this response, allows for chaining.
-// Passing a nil argument removes the schema from this response
-func (r *Response) WithSchema(schema *Schema) *Response {
- r.Schema = schema
- return r
-}
-
-// AddHeader adds a header to this response
-func (r *Response) AddHeader(name string, header *Header) *Response {
- if header == nil {
- return r.RemoveHeader(name)
- }
- if r.Headers == nil {
- r.Headers = make(map[string]Header)
- }
- r.Headers[name] = *header
- return r
-}
-
-// RemoveHeader removes a header from this response
-func (r *Response) RemoveHeader(name string) *Response {
- delete(r.Headers, name)
- return r
-}
-
-// AddExample adds an example to this response
-func (r *Response) AddExample(mediaType string, example interface{}) *Response {
- if r.Examples == nil {
- r.Examples = make(map[string]interface{})
- }
- r.Examples[mediaType] = example
- return r
-}
diff --git a/vendor/github.com/go-openapi/spec/responses.go b/vendor/github.com/go-openapi/spec/responses.go
deleted file mode 100644
index 16c3076fe8..0000000000
--- a/vendor/github.com/go-openapi/spec/responses.go
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "fmt"
- "reflect"
- "strconv"
- "strings"
-
- "github.com/go-openapi/swag"
-)
-
-// Responses is a container for the expected responses of an operation.
-// The container maps a HTTP response code to the expected response.
-// It is not expected from the documentation to necessarily cover all possible HTTP response codes,
-// since they may not be known in advance. However, it is expected from the documentation to cover
-// a successful operation response and any known errors.
-//
-// The `default` can be used a default response object for all HTTP codes that are not covered
-// individually by the specification.
-//
-// The `Responses Object` MUST contain at least one response code, and it SHOULD be the response
-// for a successful operation call.
-//
-// For more information: http://goo.gl/8us55a#responsesObject
-type Responses struct {
- VendorExtensible
- ResponsesProps
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (r Responses) JSONLookup(token string) (interface{}, error) {
- if token == "default" {
- return r.Default, nil
- }
- if ex, ok := r.Extensions[token]; ok {
- return &ex, nil
- }
- if i, err := strconv.Atoi(token); err == nil {
- if scr, ok := r.StatusCodeResponses[i]; ok {
- return scr, nil
- }
- }
- return nil, fmt.Errorf("object has no field %q", token)
-}
-
-// UnmarshalJSON hydrates this items instance with the data from JSON
-func (r *Responses) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &r.ResponsesProps); err != nil {
- return err
- }
-
- if err := json.Unmarshal(data, &r.VendorExtensible); err != nil {
- return err
- }
- if reflect.DeepEqual(ResponsesProps{}, r.ResponsesProps) {
- r.ResponsesProps = ResponsesProps{}
- }
- return nil
-}
-
-// MarshalJSON converts this items object to JSON
-func (r Responses) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(r.ResponsesProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(r.VendorExtensible)
- if err != nil {
- return nil, err
- }
- concated := swag.ConcatJSON(b1, b2)
- return concated, nil
-}
-
-// ResponsesProps describes all responses for an operation.
-// It tells what is the default response and maps all responses with a
-// HTTP status code.
-type ResponsesProps struct {
- Default *Response
- StatusCodeResponses map[int]Response
-}
-
-// MarshalJSON marshals responses as JSON
-func (r ResponsesProps) MarshalJSON() ([]byte, error) {
- toser := map[string]Response{}
- if r.Default != nil {
- toser["default"] = *r.Default
- }
- for k, v := range r.StatusCodeResponses {
- toser[strconv.Itoa(k)] = v
- }
- return json.Marshal(toser)
-}
-
-// UnmarshalJSON unmarshals responses from JSON
-func (r *ResponsesProps) UnmarshalJSON(data []byte) error {
- var res map[string]json.RawMessage
- if err := json.Unmarshal(data, &res); err != nil {
- return err
- }
-
- if v, ok := res["default"]; ok {
- var defaultRes Response
- if err := json.Unmarshal(v, &defaultRes); err != nil {
- return err
- }
- r.Default = &defaultRes
- delete(res, "default")
- }
- for k, v := range res {
- if !strings.HasPrefix(k, "x-") {
- var statusCodeResp Response
- if err := json.Unmarshal(v, &statusCodeResp); err != nil {
- return err
- }
- if nk, err := strconv.Atoi(k); err == nil {
- if r.StatusCodeResponses == nil {
- r.StatusCodeResponses = map[int]Response{}
- }
- r.StatusCodeResponses[nk] = statusCodeResp
- }
- }
- }
- return nil
-}
diff --git a/vendor/github.com/go-openapi/spec/schema.go b/vendor/github.com/go-openapi/spec/schema.go
deleted file mode 100644
index 4e9be8576b..0000000000
--- a/vendor/github.com/go-openapi/spec/schema.go
+++ /dev/null
@@ -1,645 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "fmt"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// BooleanProperty creates a boolean property
-func BooleanProperty() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"boolean"}}}
-}
-
-// BoolProperty creates a boolean property
-func BoolProperty() *Schema { return BooleanProperty() }
-
-// StringProperty creates a string property
-func StringProperty() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}}
-}
-
-// CharProperty creates a string property
-func CharProperty() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}}
-}
-
-// Float64Property creates a float64/double property
-func Float64Property() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "double"}}
-}
-
-// Float32Property creates a float32/float property
-func Float32Property() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "float"}}
-}
-
-// Int8Property creates an int8 property
-func Int8Property() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int8"}}
-}
-
-// Int16Property creates an int16 property
-func Int16Property() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int16"}}
-}
-
-// Int32Property creates an int32 property
-func Int32Property() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int32"}}
-}
-
-// Int64Property creates an int64 property
-func Int64Property() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int64"}}
-}
-
-// StrFmtProperty creates a property for the named string format
-func StrFmtProperty(format string) *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: format}}
-}
-
-// DateProperty creates a date property
-func DateProperty() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date"}}
-}
-
-// DateTimeProperty creates a date time property
-func DateTimeProperty() *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date-time"}}
-}
-
-// MapProperty creates a map property
-func MapProperty(property *Schema) *Schema {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"object"},
- AdditionalProperties: &SchemaOrBool{Allows: true, Schema: property}}}
-}
-
-// RefProperty creates a ref property
-func RefProperty(name string) *Schema {
- return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}}
-}
-
-// RefSchema creates a ref property
-func RefSchema(name string) *Schema {
- return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}}
-}
-
-// ArrayProperty creates an array property
-func ArrayProperty(items *Schema) *Schema {
- if items == nil {
- return &Schema{SchemaProps: SchemaProps{Type: []string{"array"}}}
- }
- return &Schema{SchemaProps: SchemaProps{Items: &SchemaOrArray{Schema: items}, Type: []string{"array"}}}
-}
-
-// ComposedSchema creates a schema with allOf
-func ComposedSchema(schemas ...Schema) *Schema {
- s := new(Schema)
- s.AllOf = schemas
- return s
-}
-
-// SchemaURL represents a schema url
-type SchemaURL string
-
-// MarshalJSON marshal this to JSON
-func (r SchemaURL) MarshalJSON() ([]byte, error) {
- if r == "" {
- return []byte("{}"), nil
- }
- v := map[string]interface{}{"$schema": string(r)}
- return json.Marshal(v)
-}
-
-// UnmarshalJSON unmarshal this from JSON
-func (r *SchemaURL) UnmarshalJSON(data []byte) error {
- var v map[string]interface{}
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- return r.fromMap(v)
-}
-
-func (r *SchemaURL) fromMap(v map[string]interface{}) error {
- if v == nil {
- return nil
- }
- if vv, ok := v["$schema"]; ok {
- if str, ok := vv.(string); ok {
- u, err := parseURL(str)
- if err != nil {
- return err
- }
-
- *r = SchemaURL(u.String())
- }
- }
- return nil
-}
-
-// SchemaProps describes a JSON schema (draft 4)
-type SchemaProps struct {
- ID string `json:"id,omitempty"`
- Ref Ref `json:"-"`
- Schema SchemaURL `json:"-"`
- Description string `json:"description,omitempty"`
- Type StringOrArray `json:"type,omitempty"`
- Nullable bool `json:"nullable,omitempty"`
- Format string `json:"format,omitempty"`
- Title string `json:"title,omitempty"`
- Default interface{} `json:"default,omitempty"`
- Maximum *float64 `json:"maximum,omitempty"`
- ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
- Minimum *float64 `json:"minimum,omitempty"`
- ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
- MaxLength *int64 `json:"maxLength,omitempty"`
- MinLength *int64 `json:"minLength,omitempty"`
- Pattern string `json:"pattern,omitempty"`
- MaxItems *int64 `json:"maxItems,omitempty"`
- MinItems *int64 `json:"minItems,omitempty"`
- UniqueItems bool `json:"uniqueItems,omitempty"`
- MultipleOf *float64 `json:"multipleOf,omitempty"`
- Enum []interface{} `json:"enum,omitempty"`
- MaxProperties *int64 `json:"maxProperties,omitempty"`
- MinProperties *int64 `json:"minProperties,omitempty"`
- Required []string `json:"required,omitempty"`
- Items *SchemaOrArray `json:"items,omitempty"`
- AllOf []Schema `json:"allOf,omitempty"`
- OneOf []Schema `json:"oneOf,omitempty"`
- AnyOf []Schema `json:"anyOf,omitempty"`
- Not *Schema `json:"not,omitempty"`
- Properties SchemaProperties `json:"properties,omitempty"`
- AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"`
- PatternProperties SchemaProperties `json:"patternProperties,omitempty"`
- Dependencies Dependencies `json:"dependencies,omitempty"`
- AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"`
- Definitions Definitions `json:"definitions,omitempty"`
-}
-
-// SwaggerSchemaProps are additional properties supported by swagger schemas, but not JSON-schema (draft 4)
-type SwaggerSchemaProps struct {
- Discriminator string `json:"discriminator,omitempty"`
- ReadOnly bool `json:"readOnly,omitempty"`
- XML *XMLObject `json:"xml,omitempty"`
- ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
- Example interface{} `json:"example,omitempty"`
-}
-
-// Schema the schema object allows the definition of input and output data types.
-// These types can be objects, but also primitives and arrays.
-// This object is based on the [JSON Schema Specification Draft 4](http://json-schema.org/)
-// and uses a predefined subset of it.
-// On top of this subset, there are extensions provided by this specification to allow for more complete documentation.
-//
-// For more information: http://goo.gl/8us55a#schemaObject
-type Schema struct {
- VendorExtensible
- SchemaProps
- SwaggerSchemaProps
- ExtraProps map[string]interface{} `json:"-"`
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (s Schema) JSONLookup(token string) (interface{}, error) {
- if ex, ok := s.Extensions[token]; ok {
- return &ex, nil
- }
-
- if ex, ok := s.ExtraProps[token]; ok {
- return &ex, nil
- }
-
- r, _, err := jsonpointer.GetForToken(s.SchemaProps, token)
- if r != nil || (err != nil && !strings.HasPrefix(err.Error(), "object has no field")) {
- return r, err
- }
- r, _, err = jsonpointer.GetForToken(s.SwaggerSchemaProps, token)
- return r, err
-}
-
-// WithID sets the id for this schema, allows for chaining
-func (s *Schema) WithID(id string) *Schema {
- s.ID = id
- return s
-}
-
-// WithTitle sets the title for this schema, allows for chaining
-func (s *Schema) WithTitle(title string) *Schema {
- s.Title = title
- return s
-}
-
-// WithDescription sets the description for this schema, allows for chaining
-func (s *Schema) WithDescription(description string) *Schema {
- s.Description = description
- return s
-}
-
-// WithProperties sets the properties for this schema
-func (s *Schema) WithProperties(schemas map[string]Schema) *Schema {
- s.Properties = schemas
- return s
-}
-
-// SetProperty sets a property on this schema
-func (s *Schema) SetProperty(name string, schema Schema) *Schema {
- if s.Properties == nil {
- s.Properties = make(map[string]Schema)
- }
- s.Properties[name] = schema
- return s
-}
-
-// WithAllOf sets the all of property
-func (s *Schema) WithAllOf(schemas ...Schema) *Schema {
- s.AllOf = schemas
- return s
-}
-
-// WithMaxProperties sets the max number of properties an object can have
-func (s *Schema) WithMaxProperties(max int64) *Schema {
- s.MaxProperties = &max
- return s
-}
-
-// WithMinProperties sets the min number of properties an object must have
-func (s *Schema) WithMinProperties(min int64) *Schema {
- s.MinProperties = &min
- return s
-}
-
-// Typed sets the type of this schema for a single value item
-func (s *Schema) Typed(tpe, format string) *Schema {
- s.Type = []string{tpe}
- s.Format = format
- return s
-}
-
-// AddType adds a type with potential format to the types for this schema
-func (s *Schema) AddType(tpe, format string) *Schema {
- s.Type = append(s.Type, tpe)
- if format != "" {
- s.Format = format
- }
- return s
-}
-
-// AsNullable flags this schema as nullable.
-func (s *Schema) AsNullable() *Schema {
- s.Nullable = true
- return s
-}
-
-// CollectionOf a fluent builder method for an array parameter
-func (s *Schema) CollectionOf(items Schema) *Schema {
- s.Type = []string{jsonArray}
- s.Items = &SchemaOrArray{Schema: &items}
- return s
-}
-
-// WithDefault sets the default value on this parameter
-func (s *Schema) WithDefault(defaultValue interface{}) *Schema {
- s.Default = defaultValue
- return s
-}
-
-// WithRequired flags this parameter as required
-func (s *Schema) WithRequired(items ...string) *Schema {
- s.Required = items
- return s
-}
-
-// AddRequired adds field names to the required properties array
-func (s *Schema) AddRequired(items ...string) *Schema {
- s.Required = append(s.Required, items...)
- return s
-}
-
-// WithMaxLength sets a max length value
-func (s *Schema) WithMaxLength(max int64) *Schema {
- s.MaxLength = &max
- return s
-}
-
-// WithMinLength sets a min length value
-func (s *Schema) WithMinLength(min int64) *Schema {
- s.MinLength = &min
- return s
-}
-
-// WithPattern sets a pattern value
-func (s *Schema) WithPattern(pattern string) *Schema {
- s.Pattern = pattern
- return s
-}
-
-// WithMultipleOf sets a multiple of value
-func (s *Schema) WithMultipleOf(number float64) *Schema {
- s.MultipleOf = &number
- return s
-}
-
-// WithMaximum sets a maximum number value
-func (s *Schema) WithMaximum(max float64, exclusive bool) *Schema {
- s.Maximum = &max
- s.ExclusiveMaximum = exclusive
- return s
-}
-
-// WithMinimum sets a minimum number value
-func (s *Schema) WithMinimum(min float64, exclusive bool) *Schema {
- s.Minimum = &min
- s.ExclusiveMinimum = exclusive
- return s
-}
-
-// WithEnum sets a the enum values (replace)
-func (s *Schema) WithEnum(values ...interface{}) *Schema {
- s.Enum = append([]interface{}{}, values...)
- return s
-}
-
-// WithMaxItems sets the max items
-func (s *Schema) WithMaxItems(size int64) *Schema {
- s.MaxItems = &size
- return s
-}
-
-// WithMinItems sets the min items
-func (s *Schema) WithMinItems(size int64) *Schema {
- s.MinItems = &size
- return s
-}
-
-// UniqueValues dictates that this array can only have unique items
-func (s *Schema) UniqueValues() *Schema {
- s.UniqueItems = true
- return s
-}
-
-// AllowDuplicates this array can have duplicates
-func (s *Schema) AllowDuplicates() *Schema {
- s.UniqueItems = false
- return s
-}
-
-// AddToAllOf adds a schema to the allOf property
-func (s *Schema) AddToAllOf(schemas ...Schema) *Schema {
- s.AllOf = append(s.AllOf, schemas...)
- return s
-}
-
-// WithDiscriminator sets the name of the discriminator field
-func (s *Schema) WithDiscriminator(discriminator string) *Schema {
- s.Discriminator = discriminator
- return s
-}
-
-// AsReadOnly flags this schema as readonly
-func (s *Schema) AsReadOnly() *Schema {
- s.ReadOnly = true
- return s
-}
-
-// AsWritable flags this schema as writeable (not read-only)
-func (s *Schema) AsWritable() *Schema {
- s.ReadOnly = false
- return s
-}
-
-// WithExample sets the example for this schema
-func (s *Schema) WithExample(example interface{}) *Schema {
- s.Example = example
- return s
-}
-
-// WithExternalDocs sets/removes the external docs for/from this schema.
-// When you pass empty strings as params the external documents will be removed.
-// When you pass non-empty string as one value then those values will be used on the external docs object.
-// So when you pass a non-empty description, you should also pass the url and vice versa.
-func (s *Schema) WithExternalDocs(description, url string) *Schema {
- if description == "" && url == "" {
- s.ExternalDocs = nil
- return s
- }
-
- if s.ExternalDocs == nil {
- s.ExternalDocs = &ExternalDocumentation{}
- }
- s.ExternalDocs.Description = description
- s.ExternalDocs.URL = url
- return s
-}
-
-// WithXMLName sets the xml name for the object
-func (s *Schema) WithXMLName(name string) *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Name = name
- return s
-}
-
-// WithXMLNamespace sets the xml namespace for the object
-func (s *Schema) WithXMLNamespace(namespace string) *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Namespace = namespace
- return s
-}
-
-// WithXMLPrefix sets the xml prefix for the object
-func (s *Schema) WithXMLPrefix(prefix string) *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Prefix = prefix
- return s
-}
-
-// AsXMLAttribute flags this object as xml attribute
-func (s *Schema) AsXMLAttribute() *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Attribute = true
- return s
-}
-
-// AsXMLElement flags this object as an xml node
-func (s *Schema) AsXMLElement() *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Attribute = false
- return s
-}
-
-// AsWrappedXML flags this object as wrapped, this is mostly useful for array types
-func (s *Schema) AsWrappedXML() *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Wrapped = true
- return s
-}
-
-// AsUnwrappedXML flags this object as an xml node
-func (s *Schema) AsUnwrappedXML() *Schema {
- if s.XML == nil {
- s.XML = new(XMLObject)
- }
- s.XML.Wrapped = false
- return s
-}
-
-// SetValidations defines all schema validations.
-//
-// NOTE: Required, ReadOnly, AllOf, AnyOf, OneOf and Not are not considered.
-func (s *Schema) SetValidations(val SchemaValidations) {
- s.Maximum = val.Maximum
- s.ExclusiveMaximum = val.ExclusiveMaximum
- s.Minimum = val.Minimum
- s.ExclusiveMinimum = val.ExclusiveMinimum
- s.MaxLength = val.MaxLength
- s.MinLength = val.MinLength
- s.Pattern = val.Pattern
- s.MaxItems = val.MaxItems
- s.MinItems = val.MinItems
- s.UniqueItems = val.UniqueItems
- s.MultipleOf = val.MultipleOf
- s.Enum = val.Enum
- s.MinProperties = val.MinProperties
- s.MaxProperties = val.MaxProperties
- s.PatternProperties = val.PatternProperties
-}
-
-// WithValidations is a fluent method to set schema validations
-func (s *Schema) WithValidations(val SchemaValidations) *Schema {
- s.SetValidations(val)
- return s
-}
-
-// Validations returns a clone of the validations for this schema
-func (s Schema) Validations() SchemaValidations {
- return SchemaValidations{
- CommonValidations: CommonValidations{
- Maximum: s.Maximum,
- ExclusiveMaximum: s.ExclusiveMaximum,
- Minimum: s.Minimum,
- ExclusiveMinimum: s.ExclusiveMinimum,
- MaxLength: s.MaxLength,
- MinLength: s.MinLength,
- Pattern: s.Pattern,
- MaxItems: s.MaxItems,
- MinItems: s.MinItems,
- UniqueItems: s.UniqueItems,
- MultipleOf: s.MultipleOf,
- Enum: s.Enum,
- },
- MinProperties: s.MinProperties,
- MaxProperties: s.MaxProperties,
- PatternProperties: s.PatternProperties,
- }
-}
-
-// MarshalJSON marshal this to JSON
-func (s Schema) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(s.SchemaProps)
- if err != nil {
- return nil, fmt.Errorf("schema props %v", err)
- }
- b2, err := json.Marshal(s.VendorExtensible)
- if err != nil {
- return nil, fmt.Errorf("vendor props %v", err)
- }
- b3, err := s.Ref.MarshalJSON()
- if err != nil {
- return nil, fmt.Errorf("ref prop %v", err)
- }
- b4, err := s.Schema.MarshalJSON()
- if err != nil {
- return nil, fmt.Errorf("schema prop %v", err)
- }
- b5, err := json.Marshal(s.SwaggerSchemaProps)
- if err != nil {
- return nil, fmt.Errorf("common validations %v", err)
- }
- var b6 []byte
- if s.ExtraProps != nil {
- jj, err := json.Marshal(s.ExtraProps)
- if err != nil {
- return nil, fmt.Errorf("extra props %v", err)
- }
- b6 = jj
- }
- return swag.ConcatJSON(b1, b2, b3, b4, b5, b6), nil
-}
-
-// UnmarshalJSON marshal this from JSON
-func (s *Schema) UnmarshalJSON(data []byte) error {
- props := struct {
- SchemaProps
- SwaggerSchemaProps
- }{}
- if err := json.Unmarshal(data, &props); err != nil {
- return err
- }
-
- sch := Schema{
- SchemaProps: props.SchemaProps,
- SwaggerSchemaProps: props.SwaggerSchemaProps,
- }
-
- var d map[string]interface{}
- if err := json.Unmarshal(data, &d); err != nil {
- return err
- }
-
- _ = sch.Ref.fromMap(d)
- _ = sch.Schema.fromMap(d)
-
- delete(d, "$ref")
- delete(d, "$schema")
- for _, pn := range swag.DefaultJSONNameProvider.GetJSONNames(s) {
- delete(d, pn)
- }
-
- for k, vv := range d {
- lk := strings.ToLower(k)
- if strings.HasPrefix(lk, "x-") {
- if sch.Extensions == nil {
- sch.Extensions = map[string]interface{}{}
- }
- sch.Extensions[k] = vv
- continue
- }
- if sch.ExtraProps == nil {
- sch.ExtraProps = map[string]interface{}{}
- }
- sch.ExtraProps[k] = vv
- }
-
- *s = sch
-
- return nil
-}
diff --git a/vendor/github.com/go-openapi/spec/schema_loader.go b/vendor/github.com/go-openapi/spec/schema_loader.go
deleted file mode 100644
index 0059b99aed..0000000000
--- a/vendor/github.com/go-openapi/spec/schema_loader.go
+++ /dev/null
@@ -1,331 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
- "fmt"
- "log"
- "net/url"
- "reflect"
- "strings"
-
- "github.com/go-openapi/swag"
-)
-
-// PathLoader is a function to use when loading remote refs.
-//
-// This is a package level default. It may be overridden or bypassed by
-// specifying the loader in ExpandOptions.
-//
-// NOTE: if you are using the go-openapi/loads package, it will override
-// this value with its own default (a loader to retrieve YAML documents as
-// well as JSON ones).
-var PathLoader = func(pth string) (json.RawMessage, error) {
- data, err := swag.LoadFromFileOrHTTP(pth)
- if err != nil {
- return nil, err
- }
- return json.RawMessage(data), nil
-}
-
-// resolverContext allows to share a context during spec processing.
-// At the moment, it just holds the index of circular references found.
-type resolverContext struct {
- // circulars holds all visited circular references, to shortcircuit $ref resolution.
- //
- // This structure is privately instantiated and needs not be locked against
- // concurrent access, unless we chose to implement a parallel spec walking.
- circulars map[string]bool
- basePath string
- loadDoc func(string) (json.RawMessage, error)
- rootID string
-}
-
-func newResolverContext(options *ExpandOptions) *resolverContext {
- expandOptions := optionsOrDefault(options)
-
- // path loader may be overridden by options
- var loader func(string) (json.RawMessage, error)
- if expandOptions.PathLoader == nil {
- loader = PathLoader
- } else {
- loader = expandOptions.PathLoader
- }
-
- return &resolverContext{
- circulars: make(map[string]bool),
- basePath: expandOptions.RelativeBase, // keep the root base path in context
- loadDoc: loader,
- }
-}
-
-type schemaLoader struct {
- root interface{}
- options *ExpandOptions
- cache ResolutionCache
- context *resolverContext
-}
-
-func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) *schemaLoader {
- if ref.IsRoot() || ref.HasFragmentOnly {
- return r
- }
-
- baseRef := MustCreateRef(basePath)
- currentRef := normalizeRef(&ref, basePath)
- if strings.HasPrefix(currentRef.String(), baseRef.String()) {
- return r
- }
-
- // set a new root against which to resolve
- rootURL := currentRef.GetURL()
- rootURL.Fragment = ""
- root, _ := r.cache.Get(rootURL.String())
-
- // shallow copy of resolver options to set a new RelativeBase when
- // traversing multiple documents
- newOptions := r.options
- newOptions.RelativeBase = rootURL.String()
-
- return defaultSchemaLoader(root, newOptions, r.cache, r.context)
-}
-
-func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string {
- if transitive != r {
- if transitive.options != nil && transitive.options.RelativeBase != "" {
- return normalizeBase(transitive.options.RelativeBase)
- }
- }
-
- return basePath
-}
-
-func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string) error {
- tgt := reflect.ValueOf(target)
- if tgt.Kind() != reflect.Ptr {
- return ErrResolveRefNeedsAPointer
- }
-
- if ref.GetURL() == nil {
- return nil
- }
-
- var (
- res interface{}
- data interface{}
- err error
- )
-
- // Resolve against the root if it isn't nil, and if ref is pointing at the root, or has a fragment only which means
- // it is pointing somewhere in the root.
- root := r.root
- if (ref.IsRoot() || ref.HasFragmentOnly) && root == nil && basePath != "" {
- if baseRef, erb := NewRef(basePath); erb == nil {
- root, _, _, _ = r.load(baseRef.GetURL())
- }
- }
-
- if (ref.IsRoot() || ref.HasFragmentOnly) && root != nil {
- data = root
- } else {
- baseRef := normalizeRef(ref, basePath)
- data, _, _, err = r.load(baseRef.GetURL())
- if err != nil {
- return err
- }
- }
-
- res = data
- if ref.String() != "" {
- res, _, err = ref.GetPointer().Get(data)
- if err != nil {
- return err
- }
- }
- return swag.DynamicJSONToStruct(res, target)
-}
-
-func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error) {
- debugLog("loading schema from url: %s", refURL)
- toFetch := *refURL
- toFetch.Fragment = ""
-
- var err error
- pth := toFetch.String()
- normalized := normalizeBase(pth)
- debugLog("loading doc from: %s", normalized)
-
- data, fromCache := r.cache.Get(normalized)
- if fromCache {
- return data, toFetch, fromCache, nil
- }
-
- b, err := r.context.loadDoc(normalized)
- if err != nil {
- return nil, url.URL{}, false, err
- }
-
- var doc interface{}
- if err := json.Unmarshal(b, &doc); err != nil {
- return nil, url.URL{}, false, err
- }
- r.cache.Set(normalized, doc)
-
- return doc, toFetch, fromCache, nil
-}
-
-// isCircular detects cycles in sequences of $ref.
-//
-// It relies on a private context (which needs not be locked).
-func (r *schemaLoader) isCircular(ref *Ref, basePath string, parentRefs ...string) (foundCycle bool) {
- normalizedRef := normalizeURI(ref.String(), basePath)
- if _, ok := r.context.circulars[normalizedRef]; ok {
- // circular $ref has been already detected in another explored cycle
- foundCycle = true
- return
- }
- foundCycle = swag.ContainsStrings(parentRefs, normalizedRef) // normalized windows url's are lower cased
- if foundCycle {
- r.context.circulars[normalizedRef] = true
- }
- return
-}
-
-// Resolve resolves a reference against basePath and stores the result in target.
-//
-// Resolve is not in charge of following references: it only resolves ref by following its URL.
-//
-// If the schema the ref is referring to holds nested refs, Resolve doesn't resolve them.
-//
-// If basePath is an empty string, ref is resolved against the root schema stored in the schemaLoader struct
-func (r *schemaLoader) Resolve(ref *Ref, target interface{}, basePath string) error {
- return r.resolveRef(ref, target, basePath)
-}
-
-func (r *schemaLoader) deref(input interface{}, parentRefs []string, basePath string) error {
- var ref *Ref
- switch refable := input.(type) {
- case *Schema:
- ref = &refable.Ref
- case *Parameter:
- ref = &refable.Ref
- case *Response:
- ref = &refable.Ref
- case *PathItem:
- ref = &refable.Ref
- default:
- return fmt.Errorf("unsupported type: %T: %w", input, ErrDerefUnsupportedType)
- }
-
- curRef := ref.String()
- if curRef == "" {
- return nil
- }
-
- normalizedRef := normalizeRef(ref, basePath)
- normalizedBasePath := normalizedRef.RemoteURI()
-
- if r.isCircular(normalizedRef, basePath, parentRefs...) {
- return nil
- }
-
- if err := r.resolveRef(ref, input, basePath); r.shouldStopOnError(err) {
- return err
- }
-
- if ref.String() == "" || ref.String() == curRef {
- // done with rereferencing
- return nil
- }
-
- parentRefs = append(parentRefs, normalizedRef.String())
- return r.deref(input, parentRefs, normalizedBasePath)
-}
-
-func (r *schemaLoader) shouldStopOnError(err error) bool {
- if err != nil && !r.options.ContinueOnError {
- return true
- }
-
- if err != nil {
- log.Println(err)
- }
-
- return false
-}
-
-func (r *schemaLoader) setSchemaID(target interface{}, id, basePath string) (string, string) {
- debugLog("schema has ID: %s", id)
-
- // handling the case when id is a folder
- // remember that basePath has to point to a file
- var refPath string
- if strings.HasSuffix(id, "/") {
- // ensure this is detected as a file, not a folder
- refPath = fmt.Sprintf("%s%s", id, "placeholder.json")
- } else {
- refPath = id
- }
-
- // updates the current base path
- // * important: ID can be a relative path
- // * registers target to be fetchable from the new base proposed by this id
- newBasePath := normalizeURI(refPath, basePath)
-
- // store found IDs for possible future reuse in $ref
- r.cache.Set(newBasePath, target)
-
- // the root document has an ID: all $ref relative to that ID may
- // be rebased relative to the root document
- if basePath == r.context.basePath {
- debugLog("root document is a schema with ID: %s (normalized as:%s)", id, newBasePath)
- r.context.rootID = newBasePath
- }
-
- return newBasePath, refPath
-}
-
-func defaultSchemaLoader(
- root interface{},
- expandOptions *ExpandOptions,
- cache ResolutionCache,
- context *resolverContext) *schemaLoader {
-
- if expandOptions == nil {
- expandOptions = &ExpandOptions{}
- }
-
- cache = cacheOrDefault(cache)
-
- if expandOptions.RelativeBase == "" {
- // if no relative base is provided, assume the root document
- // contains all $ref, or at least, that the relative documents
- // may be resolved from the current working directory.
- expandOptions.RelativeBase = baseForRoot(root, cache)
- }
- debugLog("effective expander options: %#v", expandOptions)
-
- if context == nil {
- context = newResolverContext(expandOptions)
- }
-
- return &schemaLoader{
- root: root,
- options: expandOptions,
- cache: cache,
- context: context,
- }
-}
diff --git a/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json
deleted file mode 100644
index bcbb84743e..0000000000
--- a/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json
+++ /dev/null
@@ -1,149 +0,0 @@
-{
- "id": "http://json-schema.org/draft-04/schema#",
- "$schema": "http://json-schema.org/draft-04/schema#",
- "description": "Core schema meta-schema",
- "definitions": {
- "schemaArray": {
- "type": "array",
- "minItems": 1,
- "items": { "$ref": "#" }
- },
- "positiveInteger": {
- "type": "integer",
- "minimum": 0
- },
- "positiveIntegerDefault0": {
- "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
- },
- "simpleTypes": {
- "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
- },
- "stringArray": {
- "type": "array",
- "items": { "type": "string" },
- "minItems": 1,
- "uniqueItems": true
- }
- },
- "type": "object",
- "properties": {
- "id": {
- "type": "string"
- },
- "$schema": {
- "type": "string"
- },
- "title": {
- "type": "string"
- },
- "description": {
- "type": "string"
- },
- "default": {},
- "multipleOf": {
- "type": "number",
- "minimum": 0,
- "exclusiveMinimum": true
- },
- "maximum": {
- "type": "number"
- },
- "exclusiveMaximum": {
- "type": "boolean",
- "default": false
- },
- "minimum": {
- "type": "number"
- },
- "exclusiveMinimum": {
- "type": "boolean",
- "default": false
- },
- "maxLength": { "$ref": "#/definitions/positiveInteger" },
- "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
- "pattern": {
- "type": "string",
- "format": "regex"
- },
- "additionalItems": {
- "anyOf": [
- { "type": "boolean" },
- { "$ref": "#" }
- ],
- "default": {}
- },
- "items": {
- "anyOf": [
- { "$ref": "#" },
- { "$ref": "#/definitions/schemaArray" }
- ],
- "default": {}
- },
- "maxItems": { "$ref": "#/definitions/positiveInteger" },
- "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
- "uniqueItems": {
- "type": "boolean",
- "default": false
- },
- "maxProperties": { "$ref": "#/definitions/positiveInteger" },
- "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
- "required": { "$ref": "#/definitions/stringArray" },
- "additionalProperties": {
- "anyOf": [
- { "type": "boolean" },
- { "$ref": "#" }
- ],
- "default": {}
- },
- "definitions": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "properties": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "patternProperties": {
- "type": "object",
- "additionalProperties": { "$ref": "#" },
- "default": {}
- },
- "dependencies": {
- "type": "object",
- "additionalProperties": {
- "anyOf": [
- { "$ref": "#" },
- { "$ref": "#/definitions/stringArray" }
- ]
- }
- },
- "enum": {
- "type": "array",
- "minItems": 1,
- "uniqueItems": true
- },
- "type": {
- "anyOf": [
- { "$ref": "#/definitions/simpleTypes" },
- {
- "type": "array",
- "items": { "$ref": "#/definitions/simpleTypes" },
- "minItems": 1,
- "uniqueItems": true
- }
- ]
- },
- "format": { "type": "string" },
- "allOf": { "$ref": "#/definitions/schemaArray" },
- "anyOf": { "$ref": "#/definitions/schemaArray" },
- "oneOf": { "$ref": "#/definitions/schemaArray" },
- "not": { "$ref": "#" }
- },
- "dependencies": {
- "exclusiveMaximum": [ "maximum" ],
- "exclusiveMinimum": [ "minimum" ]
- },
- "default": {}
-}
diff --git a/vendor/github.com/go-openapi/spec/schemas/v2/schema.json b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json
deleted file mode 100644
index ebe10ed32d..0000000000
--- a/vendor/github.com/go-openapi/spec/schemas/v2/schema.json
+++ /dev/null
@@ -1,1607 +0,0 @@
-{
- "title": "A JSON Schema for Swagger 2.0 API.",
- "id": "http://swagger.io/v2/schema.json#",
- "$schema": "http://json-schema.org/draft-04/schema#",
- "type": "object",
- "required": [
- "swagger",
- "info",
- "paths"
- ],
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "swagger": {
- "type": "string",
- "enum": [
- "2.0"
- ],
- "description": "The Swagger version of this document."
- },
- "info": {
- "$ref": "#/definitions/info"
- },
- "host": {
- "type": "string",
- "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$",
- "description": "The host (name or ip) of the API. Example: 'swagger.io'"
- },
- "basePath": {
- "type": "string",
- "pattern": "^/",
- "description": "The base path to the API. Example: '/api'."
- },
- "schemes": {
- "$ref": "#/definitions/schemesList"
- },
- "consumes": {
- "description": "A list of MIME types accepted by the API.",
- "allOf": [
- {
- "$ref": "#/definitions/mediaTypeList"
- }
- ]
- },
- "produces": {
- "description": "A list of MIME types the API can produce.",
- "allOf": [
- {
- "$ref": "#/definitions/mediaTypeList"
- }
- ]
- },
- "paths": {
- "$ref": "#/definitions/paths"
- },
- "definitions": {
- "$ref": "#/definitions/definitions"
- },
- "parameters": {
- "$ref": "#/definitions/parameterDefinitions"
- },
- "responses": {
- "$ref": "#/definitions/responseDefinitions"
- },
- "security": {
- "$ref": "#/definitions/security"
- },
- "securityDefinitions": {
- "$ref": "#/definitions/securityDefinitions"
- },
- "tags": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/tag"
- },
- "uniqueItems": true
- },
- "externalDocs": {
- "$ref": "#/definitions/externalDocs"
- }
- },
- "definitions": {
- "info": {
- "type": "object",
- "description": "General information about the API.",
- "required": [
- "version",
- "title"
- ],
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "title": {
- "type": "string",
- "description": "A unique and precise title of the API."
- },
- "version": {
- "type": "string",
- "description": "A semantic version number of the API."
- },
- "description": {
- "type": "string",
- "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed."
- },
- "termsOfService": {
- "type": "string",
- "description": "The terms of service for the API."
- },
- "contact": {
- "$ref": "#/definitions/contact"
- },
- "license": {
- "$ref": "#/definitions/license"
- }
- }
- },
- "contact": {
- "type": "object",
- "description": "Contact information for the owners of the API.",
- "additionalProperties": false,
- "properties": {
- "name": {
- "type": "string",
- "description": "The identifying name of the contact person/organization."
- },
- "url": {
- "type": "string",
- "description": "The URL pointing to the contact information.",
- "format": "uri"
- },
- "email": {
- "type": "string",
- "description": "The email address of the contact person/organization.",
- "format": "email"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "license": {
- "type": "object",
- "required": [
- "name"
- ],
- "additionalProperties": false,
- "properties": {
- "name": {
- "type": "string",
- "description": "The name of the license type. It's encouraged to use an OSI compatible license."
- },
- "url": {
- "type": "string",
- "description": "The URL pointing to the license.",
- "format": "uri"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "paths": {
- "type": "object",
- "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.",
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- },
- "^/": {
- "$ref": "#/definitions/pathItem"
- }
- },
- "additionalProperties": false
- },
- "definitions": {
- "type": "object",
- "additionalProperties": {
- "$ref": "#/definitions/schema"
- },
- "description": "One or more JSON objects describing the schemas being consumed and produced by the API."
- },
- "parameterDefinitions": {
- "type": "object",
- "additionalProperties": {
- "$ref": "#/definitions/parameter"
- },
- "description": "One or more JSON representations for parameters"
- },
- "responseDefinitions": {
- "type": "object",
- "additionalProperties": {
- "$ref": "#/definitions/response"
- },
- "description": "One or more JSON representations for responses"
- },
- "externalDocs": {
- "type": "object",
- "additionalProperties": false,
- "description": "information about external documentation",
- "required": [
- "url"
- ],
- "properties": {
- "description": {
- "type": "string"
- },
- "url": {
- "type": "string",
- "format": "uri"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "examples": {
- "type": "object",
- "additionalProperties": true
- },
- "mimeType": {
- "type": "string",
- "description": "The MIME type of the HTTP message."
- },
- "operation": {
- "type": "object",
- "required": [
- "responses"
- ],
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "tags": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- },
- "summary": {
- "type": "string",
- "description": "A brief summary of the operation."
- },
- "description": {
- "type": "string",
- "description": "A longer description of the operation, GitHub Flavored Markdown is allowed."
- },
- "externalDocs": {
- "$ref": "#/definitions/externalDocs"
- },
- "operationId": {
- "type": "string",
- "description": "A unique identifier of the operation."
- },
- "produces": {
- "description": "A list of MIME types the API can produce.",
- "allOf": [
- {
- "$ref": "#/definitions/mediaTypeList"
- }
- ]
- },
- "consumes": {
- "description": "A list of MIME types the API can consume.",
- "allOf": [
- {
- "$ref": "#/definitions/mediaTypeList"
- }
- ]
- },
- "parameters": {
- "$ref": "#/definitions/parametersList"
- },
- "responses": {
- "$ref": "#/definitions/responses"
- },
- "schemes": {
- "$ref": "#/definitions/schemesList"
- },
- "deprecated": {
- "type": "boolean",
- "default": false
- },
- "security": {
- "$ref": "#/definitions/security"
- }
- }
- },
- "pathItem": {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "$ref": {
- "type": "string"
- },
- "get": {
- "$ref": "#/definitions/operation"
- },
- "put": {
- "$ref": "#/definitions/operation"
- },
- "post": {
- "$ref": "#/definitions/operation"
- },
- "delete": {
- "$ref": "#/definitions/operation"
- },
- "options": {
- "$ref": "#/definitions/operation"
- },
- "head": {
- "$ref": "#/definitions/operation"
- },
- "patch": {
- "$ref": "#/definitions/operation"
- },
- "parameters": {
- "$ref": "#/definitions/parametersList"
- }
- }
- },
- "responses": {
- "type": "object",
- "description": "Response objects names can either be any valid HTTP status code or 'default'.",
- "minProperties": 1,
- "additionalProperties": false,
- "patternProperties": {
- "^([0-9]{3})$|^(default)$": {
- "$ref": "#/definitions/responseValue"
- },
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "not": {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- }
- },
- "responseValue": {
- "oneOf": [
- {
- "$ref": "#/definitions/response"
- },
- {
- "$ref": "#/definitions/jsonReference"
- }
- ]
- },
- "response": {
- "type": "object",
- "required": [
- "description"
- ],
- "properties": {
- "description": {
- "type": "string"
- },
- "schema": {
- "oneOf": [
- {
- "$ref": "#/definitions/schema"
- },
- {
- "$ref": "#/definitions/fileSchema"
- }
- ]
- },
- "headers": {
- "$ref": "#/definitions/headers"
- },
- "examples": {
- "$ref": "#/definitions/examples"
- }
- },
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "headers": {
- "type": "object",
- "additionalProperties": {
- "$ref": "#/definitions/header"
- }
- },
- "header": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "string",
- "number",
- "integer",
- "boolean",
- "array"
- ]
- },
- "format": {
- "type": "string"
- },
- "items": {
- "$ref": "#/definitions/primitivesItems"
- },
- "collectionFormat": {
- "$ref": "#/definitions/collectionFormat"
- },
- "default": {
- "$ref": "#/definitions/default"
- },
- "maximum": {
- "$ref": "#/definitions/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "#/definitions/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "#/definitions/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "#/definitions/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "#/definitions/maxLength"
- },
- "minLength": {
- "$ref": "#/definitions/minLength"
- },
- "pattern": {
- "$ref": "#/definitions/pattern"
- },
- "maxItems": {
- "$ref": "#/definitions/maxItems"
- },
- "minItems": {
- "$ref": "#/definitions/minItems"
- },
- "uniqueItems": {
- "$ref": "#/definitions/uniqueItems"
- },
- "enum": {
- "$ref": "#/definitions/enum"
- },
- "multipleOf": {
- "$ref": "#/definitions/multipleOf"
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "vendorExtension": {
- "description": "Any property starting with x- is valid.",
- "additionalProperties": true,
- "additionalItems": true
- },
- "bodyParameter": {
- "type": "object",
- "required": [
- "name",
- "in",
- "schema"
- ],
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "description": {
- "type": "string",
- "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
- },
- "name": {
- "type": "string",
- "description": "The name of the parameter."
- },
- "in": {
- "type": "string",
- "description": "Determines the location of the parameter.",
- "enum": [
- "body"
- ]
- },
- "required": {
- "type": "boolean",
- "description": "Determines whether or not this parameter is required or optional.",
- "default": false
- },
- "schema": {
- "$ref": "#/definitions/schema"
- }
- },
- "additionalProperties": false
- },
- "headerParameterSubSchema": {
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "required": {
- "type": "boolean",
- "description": "Determines whether or not this parameter is required or optional.",
- "default": false
- },
- "in": {
- "type": "string",
- "description": "Determines the location of the parameter.",
- "enum": [
- "header"
- ]
- },
- "description": {
- "type": "string",
- "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
- },
- "name": {
- "type": "string",
- "description": "The name of the parameter."
- },
- "type": {
- "type": "string",
- "enum": [
- "string",
- "number",
- "boolean",
- "integer",
- "array"
- ]
- },
- "format": {
- "type": "string"
- },
- "items": {
- "$ref": "#/definitions/primitivesItems"
- },
- "collectionFormat": {
- "$ref": "#/definitions/collectionFormat"
- },
- "default": {
- "$ref": "#/definitions/default"
- },
- "maximum": {
- "$ref": "#/definitions/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "#/definitions/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "#/definitions/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "#/definitions/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "#/definitions/maxLength"
- },
- "minLength": {
- "$ref": "#/definitions/minLength"
- },
- "pattern": {
- "$ref": "#/definitions/pattern"
- },
- "maxItems": {
- "$ref": "#/definitions/maxItems"
- },
- "minItems": {
- "$ref": "#/definitions/minItems"
- },
- "uniqueItems": {
- "$ref": "#/definitions/uniqueItems"
- },
- "enum": {
- "$ref": "#/definitions/enum"
- },
- "multipleOf": {
- "$ref": "#/definitions/multipleOf"
- }
- }
- },
- "queryParameterSubSchema": {
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "required": {
- "type": "boolean",
- "description": "Determines whether or not this parameter is required or optional.",
- "default": false
- },
- "in": {
- "type": "string",
- "description": "Determines the location of the parameter.",
- "enum": [
- "query"
- ]
- },
- "description": {
- "type": "string",
- "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
- },
- "name": {
- "type": "string",
- "description": "The name of the parameter."
- },
- "allowEmptyValue": {
- "type": "boolean",
- "default": false,
- "description": "allows sending a parameter by name only or with an empty value."
- },
- "type": {
- "type": "string",
- "enum": [
- "string",
- "number",
- "boolean",
- "integer",
- "array"
- ]
- },
- "format": {
- "type": "string"
- },
- "items": {
- "$ref": "#/definitions/primitivesItems"
- },
- "collectionFormat": {
- "$ref": "#/definitions/collectionFormatWithMulti"
- },
- "default": {
- "$ref": "#/definitions/default"
- },
- "maximum": {
- "$ref": "#/definitions/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "#/definitions/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "#/definitions/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "#/definitions/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "#/definitions/maxLength"
- },
- "minLength": {
- "$ref": "#/definitions/minLength"
- },
- "pattern": {
- "$ref": "#/definitions/pattern"
- },
- "maxItems": {
- "$ref": "#/definitions/maxItems"
- },
- "minItems": {
- "$ref": "#/definitions/minItems"
- },
- "uniqueItems": {
- "$ref": "#/definitions/uniqueItems"
- },
- "enum": {
- "$ref": "#/definitions/enum"
- },
- "multipleOf": {
- "$ref": "#/definitions/multipleOf"
- }
- }
- },
- "formDataParameterSubSchema": {
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "required": {
- "type": "boolean",
- "description": "Determines whether or not this parameter is required or optional.",
- "default": false
- },
- "in": {
- "type": "string",
- "description": "Determines the location of the parameter.",
- "enum": [
- "formData"
- ]
- },
- "description": {
- "type": "string",
- "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
- },
- "name": {
- "type": "string",
- "description": "The name of the parameter."
- },
- "allowEmptyValue": {
- "type": "boolean",
- "default": false,
- "description": "allows sending a parameter by name only or with an empty value."
- },
- "type": {
- "type": "string",
- "enum": [
- "string",
- "number",
- "boolean",
- "integer",
- "array",
- "file"
- ]
- },
- "format": {
- "type": "string"
- },
- "items": {
- "$ref": "#/definitions/primitivesItems"
- },
- "collectionFormat": {
- "$ref": "#/definitions/collectionFormatWithMulti"
- },
- "default": {
- "$ref": "#/definitions/default"
- },
- "maximum": {
- "$ref": "#/definitions/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "#/definitions/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "#/definitions/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "#/definitions/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "#/definitions/maxLength"
- },
- "minLength": {
- "$ref": "#/definitions/minLength"
- },
- "pattern": {
- "$ref": "#/definitions/pattern"
- },
- "maxItems": {
- "$ref": "#/definitions/maxItems"
- },
- "minItems": {
- "$ref": "#/definitions/minItems"
- },
- "uniqueItems": {
- "$ref": "#/definitions/uniqueItems"
- },
- "enum": {
- "$ref": "#/definitions/enum"
- },
- "multipleOf": {
- "$ref": "#/definitions/multipleOf"
- }
- }
- },
- "pathParameterSubSchema": {
- "additionalProperties": false,
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "required": [
- "required"
- ],
- "properties": {
- "required": {
- "type": "boolean",
- "enum": [
- true
- ],
- "description": "Determines whether or not this parameter is required or optional."
- },
- "in": {
- "type": "string",
- "description": "Determines the location of the parameter.",
- "enum": [
- "path"
- ]
- },
- "description": {
- "type": "string",
- "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed."
- },
- "name": {
- "type": "string",
- "description": "The name of the parameter."
- },
- "type": {
- "type": "string",
- "enum": [
- "string",
- "number",
- "boolean",
- "integer",
- "array"
- ]
- },
- "format": {
- "type": "string"
- },
- "items": {
- "$ref": "#/definitions/primitivesItems"
- },
- "collectionFormat": {
- "$ref": "#/definitions/collectionFormat"
- },
- "default": {
- "$ref": "#/definitions/default"
- },
- "maximum": {
- "$ref": "#/definitions/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "#/definitions/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "#/definitions/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "#/definitions/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "#/definitions/maxLength"
- },
- "minLength": {
- "$ref": "#/definitions/minLength"
- },
- "pattern": {
- "$ref": "#/definitions/pattern"
- },
- "maxItems": {
- "$ref": "#/definitions/maxItems"
- },
- "minItems": {
- "$ref": "#/definitions/minItems"
- },
- "uniqueItems": {
- "$ref": "#/definitions/uniqueItems"
- },
- "enum": {
- "$ref": "#/definitions/enum"
- },
- "multipleOf": {
- "$ref": "#/definitions/multipleOf"
- }
- }
- },
- "nonBodyParameter": {
- "type": "object",
- "required": [
- "name",
- "in",
- "type"
- ],
- "oneOf": [
- {
- "$ref": "#/definitions/headerParameterSubSchema"
- },
- {
- "$ref": "#/definitions/formDataParameterSubSchema"
- },
- {
- "$ref": "#/definitions/queryParameterSubSchema"
- },
- {
- "$ref": "#/definitions/pathParameterSubSchema"
- }
- ]
- },
- "parameter": {
- "oneOf": [
- {
- "$ref": "#/definitions/bodyParameter"
- },
- {
- "$ref": "#/definitions/nonBodyParameter"
- }
- ]
- },
- "schema": {
- "type": "object",
- "description": "A deterministic version of a JSON Schema object.",
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "properties": {
- "$ref": {
- "type": "string"
- },
- "format": {
- "type": "string"
- },
- "title": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/title"
- },
- "description": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/description"
- },
- "default": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/default"
- },
- "multipleOf": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf"
- },
- "maximum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
- },
- "minLength": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
- },
- "pattern": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern"
- },
- "maxItems": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
- },
- "minItems": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
- },
- "uniqueItems": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems"
- },
- "maxProperties": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
- },
- "minProperties": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
- },
- "required": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray"
- },
- "enum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/enum"
- },
- "additionalProperties": {
- "anyOf": [
- {
- "$ref": "#/definitions/schema"
- },
- {
- "type": "boolean"
- }
- ],
- "default": {}
- },
- "type": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/type"
- },
- "items": {
- "anyOf": [
- {
- "$ref": "#/definitions/schema"
- },
- {
- "type": "array",
- "minItems": 1,
- "items": {
- "$ref": "#/definitions/schema"
- }
- }
- ],
- "default": {}
- },
- "allOf": {
- "type": "array",
- "minItems": 1,
- "items": {
- "$ref": "#/definitions/schema"
- }
- },
- "properties": {
- "type": "object",
- "additionalProperties": {
- "$ref": "#/definitions/schema"
- },
- "default": {}
- },
- "discriminator": {
- "type": "string"
- },
- "readOnly": {
- "type": "boolean",
- "default": false
- },
- "xml": {
- "$ref": "#/definitions/xml"
- },
- "externalDocs": {
- "$ref": "#/definitions/externalDocs"
- },
- "example": {}
- },
- "additionalProperties": false
- },
- "fileSchema": {
- "type": "object",
- "description": "A deterministic version of a JSON Schema object.",
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- },
- "required": [
- "type"
- ],
- "properties": {
- "format": {
- "type": "string"
- },
- "title": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/title"
- },
- "description": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/description"
- },
- "default": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/default"
- },
- "required": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray"
- },
- "type": {
- "type": "string",
- "enum": [
- "file"
- ]
- },
- "readOnly": {
- "type": "boolean",
- "default": false
- },
- "externalDocs": {
- "$ref": "#/definitions/externalDocs"
- },
- "example": {}
- },
- "additionalProperties": false
- },
- "primitivesItems": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "string",
- "number",
- "integer",
- "boolean",
- "array"
- ]
- },
- "format": {
- "type": "string"
- },
- "items": {
- "$ref": "#/definitions/primitivesItems"
- },
- "collectionFormat": {
- "$ref": "#/definitions/collectionFormat"
- },
- "default": {
- "$ref": "#/definitions/default"
- },
- "maximum": {
- "$ref": "#/definitions/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "#/definitions/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "#/definitions/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "#/definitions/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "#/definitions/maxLength"
- },
- "minLength": {
- "$ref": "#/definitions/minLength"
- },
- "pattern": {
- "$ref": "#/definitions/pattern"
- },
- "maxItems": {
- "$ref": "#/definitions/maxItems"
- },
- "minItems": {
- "$ref": "#/definitions/minItems"
- },
- "uniqueItems": {
- "$ref": "#/definitions/uniqueItems"
- },
- "enum": {
- "$ref": "#/definitions/enum"
- },
- "multipleOf": {
- "$ref": "#/definitions/multipleOf"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "security": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/securityRequirement"
- },
- "uniqueItems": true
- },
- "securityRequirement": {
- "type": "object",
- "additionalProperties": {
- "type": "array",
- "items": {
- "type": "string"
- },
- "uniqueItems": true
- }
- },
- "xml": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "name": {
- "type": "string"
- },
- "namespace": {
- "type": "string"
- },
- "prefix": {
- "type": "string"
- },
- "attribute": {
- "type": "boolean",
- "default": false
- },
- "wrapped": {
- "type": "boolean",
- "default": false
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "tag": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "name"
- ],
- "properties": {
- "name": {
- "type": "string"
- },
- "description": {
- "type": "string"
- },
- "externalDocs": {
- "$ref": "#/definitions/externalDocs"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "securityDefinitions": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "$ref": "#/definitions/basicAuthenticationSecurity"
- },
- {
- "$ref": "#/definitions/apiKeySecurity"
- },
- {
- "$ref": "#/definitions/oauth2ImplicitSecurity"
- },
- {
- "$ref": "#/definitions/oauth2PasswordSecurity"
- },
- {
- "$ref": "#/definitions/oauth2ApplicationSecurity"
- },
- {
- "$ref": "#/definitions/oauth2AccessCodeSecurity"
- }
- ]
- }
- },
- "basicAuthenticationSecurity": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "basic"
- ]
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "apiKeySecurity": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type",
- "name",
- "in"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "apiKey"
- ]
- },
- "name": {
- "type": "string"
- },
- "in": {
- "type": "string",
- "enum": [
- "header",
- "query"
- ]
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "oauth2ImplicitSecurity": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type",
- "flow",
- "authorizationUrl"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "oauth2"
- ]
- },
- "flow": {
- "type": "string",
- "enum": [
- "implicit"
- ]
- },
- "scopes": {
- "$ref": "#/definitions/oauth2Scopes"
- },
- "authorizationUrl": {
- "type": "string",
- "format": "uri"
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "oauth2PasswordSecurity": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type",
- "flow",
- "tokenUrl"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "oauth2"
- ]
- },
- "flow": {
- "type": "string",
- "enum": [
- "password"
- ]
- },
- "scopes": {
- "$ref": "#/definitions/oauth2Scopes"
- },
- "tokenUrl": {
- "type": "string",
- "format": "uri"
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "oauth2ApplicationSecurity": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type",
- "flow",
- "tokenUrl"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "oauth2"
- ]
- },
- "flow": {
- "type": "string",
- "enum": [
- "application"
- ]
- },
- "scopes": {
- "$ref": "#/definitions/oauth2Scopes"
- },
- "tokenUrl": {
- "type": "string",
- "format": "uri"
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "oauth2AccessCodeSecurity": {
- "type": "object",
- "additionalProperties": false,
- "required": [
- "type",
- "flow",
- "authorizationUrl",
- "tokenUrl"
- ],
- "properties": {
- "type": {
- "type": "string",
- "enum": [
- "oauth2"
- ]
- },
- "flow": {
- "type": "string",
- "enum": [
- "accessCode"
- ]
- },
- "scopes": {
- "$ref": "#/definitions/oauth2Scopes"
- },
- "authorizationUrl": {
- "type": "string",
- "format": "uri"
- },
- "tokenUrl": {
- "type": "string",
- "format": "uri"
- },
- "description": {
- "type": "string"
- }
- },
- "patternProperties": {
- "^x-": {
- "$ref": "#/definitions/vendorExtension"
- }
- }
- },
- "oauth2Scopes": {
- "type": "object",
- "additionalProperties": {
- "type": "string"
- }
- },
- "mediaTypeList": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/mimeType"
- },
- "uniqueItems": true
- },
- "parametersList": {
- "type": "array",
- "description": "The parameters needed to send a valid API call.",
- "additionalItems": false,
- "items": {
- "oneOf": [
- {
- "$ref": "#/definitions/parameter"
- },
- {
- "$ref": "#/definitions/jsonReference"
- }
- ]
- },
- "uniqueItems": true
- },
- "schemesList": {
- "type": "array",
- "description": "The transfer protocol of the API.",
- "items": {
- "type": "string",
- "enum": [
- "http",
- "https",
- "ws",
- "wss"
- ]
- },
- "uniqueItems": true
- },
- "collectionFormat": {
- "type": "string",
- "enum": [
- "csv",
- "ssv",
- "tsv",
- "pipes"
- ],
- "default": "csv"
- },
- "collectionFormatWithMulti": {
- "type": "string",
- "enum": [
- "csv",
- "ssv",
- "tsv",
- "pipes",
- "multi"
- ],
- "default": "csv"
- },
- "title": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/title"
- },
- "description": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/description"
- },
- "default": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/default"
- },
- "multipleOf": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf"
- },
- "maximum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum"
- },
- "exclusiveMaximum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum"
- },
- "minimum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum"
- },
- "exclusiveMinimum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum"
- },
- "maxLength": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
- },
- "minLength": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
- },
- "pattern": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern"
- },
- "maxItems": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger"
- },
- "minItems": {
- "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0"
- },
- "uniqueItems": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems"
- },
- "enum": {
- "$ref": "http://json-schema.org/draft-04/schema#/properties/enum"
- },
- "jsonReference": {
- "type": "object",
- "required": [
- "$ref"
- ],
- "additionalProperties": false,
- "properties": {
- "$ref": {
- "type": "string"
- }
- }
- }
- }
-}
diff --git a/vendor/github.com/go-openapi/spec/security_scheme.go b/vendor/github.com/go-openapi/spec/security_scheme.go
deleted file mode 100644
index 9d0bdae908..0000000000
--- a/vendor/github.com/go-openapi/spec/security_scheme.go
+++ /dev/null
@@ -1,170 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-const (
- basic = "basic"
- apiKey = "apiKey"
- oauth2 = "oauth2"
- implicit = "implicit"
- password = "password"
- application = "application"
- accessCode = "accessCode"
-)
-
-// BasicAuth creates a basic auth security scheme
-func BasicAuth() *SecurityScheme {
- return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: basic}}
-}
-
-// APIKeyAuth creates an api key auth security scheme
-func APIKeyAuth(fieldName, valueSource string) *SecurityScheme {
- return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: apiKey, Name: fieldName, In: valueSource}}
-}
-
-// OAuth2Implicit creates an implicit flow oauth2 security scheme
-func OAuth2Implicit(authorizationURL string) *SecurityScheme {
- return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
- Type: oauth2,
- Flow: implicit,
- AuthorizationURL: authorizationURL,
- }}
-}
-
-// OAuth2Password creates a password flow oauth2 security scheme
-func OAuth2Password(tokenURL string) *SecurityScheme {
- return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
- Type: oauth2,
- Flow: password,
- TokenURL: tokenURL,
- }}
-}
-
-// OAuth2Application creates an application flow oauth2 security scheme
-func OAuth2Application(tokenURL string) *SecurityScheme {
- return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
- Type: oauth2,
- Flow: application,
- TokenURL: tokenURL,
- }}
-}
-
-// OAuth2AccessToken creates an access token flow oauth2 security scheme
-func OAuth2AccessToken(authorizationURL, tokenURL string) *SecurityScheme {
- return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
- Type: oauth2,
- Flow: accessCode,
- AuthorizationURL: authorizationURL,
- TokenURL: tokenURL,
- }}
-}
-
-// SecuritySchemeProps describes a swagger security scheme in the securityDefinitions section
-type SecuritySchemeProps struct {
- Description string `json:"description,omitempty"`
- Type string `json:"type"`
- Name string `json:"name,omitempty"` // api key
- In string `json:"in,omitempty"` // api key
- Flow string `json:"flow,omitempty"` // oauth2
- AuthorizationURL string `json:"authorizationUrl"` // oauth2
- TokenURL string `json:"tokenUrl,omitempty"` // oauth2
- Scopes map[string]string `json:"scopes,omitempty"` // oauth2
-}
-
-// AddScope adds a scope to this security scheme
-func (s *SecuritySchemeProps) AddScope(scope, description string) {
- if s.Scopes == nil {
- s.Scopes = make(map[string]string)
- }
- s.Scopes[scope] = description
-}
-
-// SecurityScheme allows the definition of a security scheme that can be used by the operations.
-// Supported schemes are basic authentication, an API key (either as a header or as a query parameter)
-// and OAuth2's common flows (implicit, password, application and access code).
-//
-// For more information: http://goo.gl/8us55a#securitySchemeObject
-type SecurityScheme struct {
- VendorExtensible
- SecuritySchemeProps
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (s SecurityScheme) JSONLookup(token string) (interface{}, error) {
- if ex, ok := s.Extensions[token]; ok {
- return &ex, nil
- }
-
- r, _, err := jsonpointer.GetForToken(s.SecuritySchemeProps, token)
- return r, err
-}
-
-// MarshalJSON marshal this to JSON
-func (s SecurityScheme) MarshalJSON() ([]byte, error) {
- var (
- b1 []byte
- err error
- )
-
- if s.Type == oauth2 && (s.Flow == "implicit" || s.Flow == "accessCode") {
- // when oauth2 for implicit or accessCode flows, empty AuthorizationURL is added as empty string
- b1, err = json.Marshal(s.SecuritySchemeProps)
- } else {
- // when not oauth2, empty AuthorizationURL should be omitted
- b1, err = json.Marshal(struct {
- Description string `json:"description,omitempty"`
- Type string `json:"type"`
- Name string `json:"name,omitempty"` // api key
- In string `json:"in,omitempty"` // api key
- Flow string `json:"flow,omitempty"` // oauth2
- AuthorizationURL string `json:"authorizationUrl,omitempty"` // oauth2
- TokenURL string `json:"tokenUrl,omitempty"` // oauth2
- Scopes map[string]string `json:"scopes,omitempty"` // oauth2
- }{
- Description: s.Description,
- Type: s.Type,
- Name: s.Name,
- In: s.In,
- Flow: s.Flow,
- AuthorizationURL: s.AuthorizationURL,
- TokenURL: s.TokenURL,
- Scopes: s.Scopes,
- })
- }
- if err != nil {
- return nil, err
- }
-
- b2, err := json.Marshal(s.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2), nil
-}
-
-// UnmarshalJSON marshal this from JSON
-func (s *SecurityScheme) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &s.SecuritySchemeProps); err != nil {
- return err
- }
- return json.Unmarshal(data, &s.VendorExtensible)
-}
diff --git a/vendor/github.com/go-openapi/spec/spec.go b/vendor/github.com/go-openapi/spec/spec.go
deleted file mode 100644
index 876aa12759..0000000000
--- a/vendor/github.com/go-openapi/spec/spec.go
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-)
-
-//go:generate curl -L --progress -o ./schemas/v2/schema.json http://swagger.io/v2/schema.json
-//go:generate curl -L --progress -o ./schemas/jsonschema-draft-04.json http://json-schema.org/draft-04/schema
-//go:generate go-bindata -pkg=spec -prefix=./schemas -ignore=.*\.md ./schemas/...
-//go:generate perl -pi -e s,Json,JSON,g bindata.go
-
-const (
- // SwaggerSchemaURL the url for the swagger 2.0 schema to validate specs
- SwaggerSchemaURL = "http://swagger.io/v2/schema.json#"
- // JSONSchemaURL the url for the json schema
- JSONSchemaURL = "http://json-schema.org/draft-04/schema#"
-)
-
-// MustLoadJSONSchemaDraft04 panics when Swagger20Schema returns an error
-func MustLoadJSONSchemaDraft04() *Schema {
- d, e := JSONSchemaDraft04()
- if e != nil {
- panic(e)
- }
- return d
-}
-
-// JSONSchemaDraft04 loads the json schema document for json shema draft04
-func JSONSchemaDraft04() (*Schema, error) {
- b, err := jsonschemaDraft04JSONBytes()
- if err != nil {
- return nil, err
- }
-
- schema := new(Schema)
- if err := json.Unmarshal(b, schema); err != nil {
- return nil, err
- }
- return schema, nil
-}
-
-// MustLoadSwagger20Schema panics when Swagger20Schema returns an error
-func MustLoadSwagger20Schema() *Schema {
- d, e := Swagger20Schema()
- if e != nil {
- panic(e)
- }
- return d
-}
-
-// Swagger20Schema loads the swagger 2.0 schema from the embedded assets
-func Swagger20Schema() (*Schema, error) {
-
- b, err := v2SchemaJSONBytes()
- if err != nil {
- return nil, err
- }
-
- schema := new(Schema)
- if err := json.Unmarshal(b, schema); err != nil {
- return nil, err
- }
- return schema, nil
-}
diff --git a/vendor/github.com/go-openapi/spec/swagger.go b/vendor/github.com/go-openapi/spec/swagger.go
deleted file mode 100644
index 1590fd1751..0000000000
--- a/vendor/github.com/go-openapi/spec/swagger.go
+++ /dev/null
@@ -1,448 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/json"
- "fmt"
- "strconv"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// Swagger this is the root document object for the API specification.
-// It combines what previously was the Resource Listing and API Declaration (version 1.2 and earlier)
-// together into one document.
-//
-// For more information: http://goo.gl/8us55a#swagger-object-
-type Swagger struct {
- VendorExtensible
- SwaggerProps
-}
-
-// JSONLookup look up a value by the json property name
-func (s Swagger) JSONLookup(token string) (interface{}, error) {
- if ex, ok := s.Extensions[token]; ok {
- return &ex, nil
- }
- r, _, err := jsonpointer.GetForToken(s.SwaggerProps, token)
- return r, err
-}
-
-// MarshalJSON marshals this swagger structure to json
-func (s Swagger) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(s.SwaggerProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(s.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2), nil
-}
-
-// UnmarshalJSON unmarshals a swagger spec from json
-func (s *Swagger) UnmarshalJSON(data []byte) error {
- var sw Swagger
- if err := json.Unmarshal(data, &sw.SwaggerProps); err != nil {
- return err
- }
- if err := json.Unmarshal(data, &sw.VendorExtensible); err != nil {
- return err
- }
- *s = sw
- return nil
-}
-
-// GobEncode provides a safe gob encoder for Swagger, including extensions
-func (s Swagger) GobEncode() ([]byte, error) {
- var b bytes.Buffer
- raw := struct {
- Props SwaggerProps
- Ext VendorExtensible
- }{
- Props: s.SwaggerProps,
- Ext: s.VendorExtensible,
- }
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
-}
-
-// GobDecode provides a safe gob decoder for Swagger, including extensions
-func (s *Swagger) GobDecode(b []byte) error {
- var raw struct {
- Props SwaggerProps
- Ext VendorExtensible
- }
- buf := bytes.NewBuffer(b)
- err := gob.NewDecoder(buf).Decode(&raw)
- if err != nil {
- return err
- }
- s.SwaggerProps = raw.Props
- s.VendorExtensible = raw.Ext
- return nil
-}
-
-// SwaggerProps captures the top-level properties of an Api specification
-//
-// NOTE: validation rules
-// - the scheme, when present must be from [http, https, ws, wss]
-// - BasePath must start with a leading "/"
-// - Paths is required
-type SwaggerProps struct {
- ID string `json:"id,omitempty"`
- Consumes []string `json:"consumes,omitempty"`
- Produces []string `json:"produces,omitempty"`
- Schemes []string `json:"schemes,omitempty"`
- Swagger string `json:"swagger,omitempty"`
- Info *Info `json:"info,omitempty"`
- Host string `json:"host,omitempty"`
- BasePath string `json:"basePath,omitempty"`
- Paths *Paths `json:"paths"`
- Definitions Definitions `json:"definitions,omitempty"`
- Parameters map[string]Parameter `json:"parameters,omitempty"`
- Responses map[string]Response `json:"responses,omitempty"`
- SecurityDefinitions SecurityDefinitions `json:"securityDefinitions,omitempty"`
- Security []map[string][]string `json:"security,omitempty"`
- Tags []Tag `json:"tags,omitempty"`
- ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
-}
-
-type swaggerPropsAlias SwaggerProps
-
-type gobSwaggerPropsAlias struct {
- Security []map[string]struct {
- List []string
- Pad bool
- }
- Alias *swaggerPropsAlias
- SecurityIsEmpty bool
-}
-
-// GobEncode provides a safe gob encoder for SwaggerProps, including empty security requirements
-func (o SwaggerProps) GobEncode() ([]byte, error) {
- raw := gobSwaggerPropsAlias{
- Alias: (*swaggerPropsAlias)(&o),
- }
-
- var b bytes.Buffer
- if o.Security == nil {
- // nil security requirement
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
- }
-
- if len(o.Security) == 0 {
- // empty, but non-nil security requirement
- raw.SecurityIsEmpty = true
- raw.Alias.Security = nil
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
- }
-
- raw.Security = make([]map[string]struct {
- List []string
- Pad bool
- }, 0, len(o.Security))
- for _, req := range o.Security {
- v := make(map[string]struct {
- List []string
- Pad bool
- }, len(req))
- for k, val := range req {
- v[k] = struct {
- List []string
- Pad bool
- }{
- List: val,
- }
- }
- raw.Security = append(raw.Security, v)
- }
-
- err := gob.NewEncoder(&b).Encode(raw)
- return b.Bytes(), err
-}
-
-// GobDecode provides a safe gob decoder for SwaggerProps, including empty security requirements
-func (o *SwaggerProps) GobDecode(b []byte) error {
- var raw gobSwaggerPropsAlias
-
- buf := bytes.NewBuffer(b)
- err := gob.NewDecoder(buf).Decode(&raw)
- if err != nil {
- return err
- }
- if raw.Alias == nil {
- return nil
- }
-
- switch {
- case raw.SecurityIsEmpty:
- // empty, but non-nil security requirement
- raw.Alias.Security = []map[string][]string{}
- case len(raw.Alias.Security) == 0:
- // nil security requirement
- raw.Alias.Security = nil
- default:
- raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security))
- for _, req := range raw.Security {
- v := make(map[string][]string, len(req))
- for k, val := range req {
- v[k] = make([]string, 0, len(val.List))
- v[k] = append(v[k], val.List...)
- }
- raw.Alias.Security = append(raw.Alias.Security, v)
- }
- }
-
- *o = *(*SwaggerProps)(raw.Alias)
- return nil
-}
-
-// Dependencies represent a dependencies property
-type Dependencies map[string]SchemaOrStringArray
-
-// SchemaOrBool represents a schema or boolean value, is biased towards true for the boolean property
-type SchemaOrBool struct {
- Allows bool
- Schema *Schema
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (s SchemaOrBool) JSONLookup(token string) (interface{}, error) {
- if token == "allows" {
- return s.Allows, nil
- }
- r, _, err := jsonpointer.GetForToken(s.Schema, token)
- return r, err
-}
-
-var jsTrue = []byte("true")
-var jsFalse = []byte("false")
-
-// MarshalJSON convert this object to JSON
-func (s SchemaOrBool) MarshalJSON() ([]byte, error) {
- if s.Schema != nil {
- return json.Marshal(s.Schema)
- }
-
- if s.Schema == nil && !s.Allows {
- return jsFalse, nil
- }
- return jsTrue, nil
-}
-
-// UnmarshalJSON converts this bool or schema object from a JSON structure
-func (s *SchemaOrBool) UnmarshalJSON(data []byte) error {
- var nw SchemaOrBool
- if len(data) > 0 {
- if data[0] == '{' {
- var sch Schema
- if err := json.Unmarshal(data, &sch); err != nil {
- return err
- }
- nw.Schema = &sch
- }
- nw.Allows = !bytes.Equal(data, []byte("false"))
- }
- *s = nw
- return nil
-}
-
-// SchemaOrStringArray represents a schema or a string array
-type SchemaOrStringArray struct {
- Schema *Schema
- Property []string
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (s SchemaOrStringArray) JSONLookup(token string) (interface{}, error) {
- r, _, err := jsonpointer.GetForToken(s.Schema, token)
- return r, err
-}
-
-// MarshalJSON converts this schema object or array into JSON structure
-func (s SchemaOrStringArray) MarshalJSON() ([]byte, error) {
- if len(s.Property) > 0 {
- return json.Marshal(s.Property)
- }
- if s.Schema != nil {
- return json.Marshal(s.Schema)
- }
- return []byte("null"), nil
-}
-
-// UnmarshalJSON converts this schema object or array from a JSON structure
-func (s *SchemaOrStringArray) UnmarshalJSON(data []byte) error {
- var first byte
- if len(data) > 1 {
- first = data[0]
- }
- var nw SchemaOrStringArray
- if first == '{' {
- var sch Schema
- if err := json.Unmarshal(data, &sch); err != nil {
- return err
- }
- nw.Schema = &sch
- }
- if first == '[' {
- if err := json.Unmarshal(data, &nw.Property); err != nil {
- return err
- }
- }
- *s = nw
- return nil
-}
-
-// Definitions contains the models explicitly defined in this spec
-// An object to hold data types that can be consumed and produced by operations.
-// These data types can be primitives, arrays or models.
-//
-// For more information: http://goo.gl/8us55a#definitionsObject
-type Definitions map[string]Schema
-
-// SecurityDefinitions a declaration of the security schemes available to be used in the specification.
-// This does not enforce the security schemes on the operations and only serves to provide
-// the relevant details for each scheme.
-//
-// For more information: http://goo.gl/8us55a#securityDefinitionsObject
-type SecurityDefinitions map[string]*SecurityScheme
-
-// StringOrArray represents a value that can either be a string
-// or an array of strings. Mainly here for serialization purposes
-type StringOrArray []string
-
-// Contains returns true when the value is contained in the slice
-func (s StringOrArray) Contains(value string) bool {
- for _, str := range s {
- if str == value {
- return true
- }
- }
- return false
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (s SchemaOrArray) JSONLookup(token string) (interface{}, error) {
- if _, err := strconv.Atoi(token); err == nil {
- r, _, err := jsonpointer.GetForToken(s.Schemas, token)
- return r, err
- }
- r, _, err := jsonpointer.GetForToken(s.Schema, token)
- return r, err
-}
-
-// UnmarshalJSON unmarshals this string or array object from a JSON array or JSON string
-func (s *StringOrArray) UnmarshalJSON(data []byte) error {
- var first byte
- if len(data) > 1 {
- first = data[0]
- }
-
- if first == '[' {
- var parsed []string
- if err := json.Unmarshal(data, &parsed); err != nil {
- return err
- }
- *s = StringOrArray(parsed)
- return nil
- }
-
- var single interface{}
- if err := json.Unmarshal(data, &single); err != nil {
- return err
- }
- if single == nil {
- return nil
- }
- switch v := single.(type) {
- case string:
- *s = StringOrArray([]string{v})
- return nil
- default:
- return fmt.Errorf("only string or array is allowed, not %T", single)
- }
-}
-
-// MarshalJSON converts this string or array to a JSON array or JSON string
-func (s StringOrArray) MarshalJSON() ([]byte, error) {
- if len(s) == 1 {
- return json.Marshal([]string(s)[0])
- }
- return json.Marshal([]string(s))
-}
-
-// SchemaOrArray represents a value that can either be a Schema
-// or an array of Schema. Mainly here for serialization purposes
-type SchemaOrArray struct {
- Schema *Schema
- Schemas []Schema
-}
-
-// Len returns the number of schemas in this property
-func (s SchemaOrArray) Len() int {
- if s.Schema != nil {
- return 1
- }
- return len(s.Schemas)
-}
-
-// ContainsType returns true when one of the schemas is of the specified type
-func (s *SchemaOrArray) ContainsType(name string) bool {
- if s.Schema != nil {
- return s.Schema.Type != nil && s.Schema.Type.Contains(name)
- }
- return false
-}
-
-// MarshalJSON converts this schema object or array into JSON structure
-func (s SchemaOrArray) MarshalJSON() ([]byte, error) {
- if len(s.Schemas) > 0 {
- return json.Marshal(s.Schemas)
- }
- return json.Marshal(s.Schema)
-}
-
-// UnmarshalJSON converts this schema object or array from a JSON structure
-func (s *SchemaOrArray) UnmarshalJSON(data []byte) error {
- var nw SchemaOrArray
- var first byte
- if len(data) > 1 {
- first = data[0]
- }
- if first == '{' {
- var sch Schema
- if err := json.Unmarshal(data, &sch); err != nil {
- return err
- }
- nw.Schema = &sch
- }
- if first == '[' {
- if err := json.Unmarshal(data, &nw.Schemas); err != nil {
- return err
- }
- }
- *s = nw
- return nil
-}
-
-// vim:set ft=go noet sts=2 sw=2 ts=2:
diff --git a/vendor/github.com/go-openapi/spec/tag.go b/vendor/github.com/go-openapi/spec/tag.go
deleted file mode 100644
index faa3d3de1e..0000000000
--- a/vendor/github.com/go-openapi/spec/tag.go
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-import (
- "encoding/json"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/swag"
-)
-
-// TagProps describe a tag entry in the top level tags section of a swagger spec
-type TagProps struct {
- Description string `json:"description,omitempty"`
- Name string `json:"name,omitempty"`
- ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
-}
-
-// NewTag creates a new tag
-func NewTag(name, description string, externalDocs *ExternalDocumentation) Tag {
- return Tag{TagProps: TagProps{Description: description, Name: name, ExternalDocs: externalDocs}}
-}
-
-// Tag allows adding meta data to a single tag that is used by the
-// [Operation Object](http://goo.gl/8us55a#operationObject).
-// It is not mandatory to have a Tag Object per tag used there.
-//
-// For more information: http://goo.gl/8us55a#tagObject
-type Tag struct {
- VendorExtensible
- TagProps
-}
-
-// JSONLookup implements an interface to customize json pointer lookup
-func (t Tag) JSONLookup(token string) (interface{}, error) {
- if ex, ok := t.Extensions[token]; ok {
- return &ex, nil
- }
-
- r, _, err := jsonpointer.GetForToken(t.TagProps, token)
- return r, err
-}
-
-// MarshalJSON marshal this to JSON
-func (t Tag) MarshalJSON() ([]byte, error) {
- b1, err := json.Marshal(t.TagProps)
- if err != nil {
- return nil, err
- }
- b2, err := json.Marshal(t.VendorExtensible)
- if err != nil {
- return nil, err
- }
- return swag.ConcatJSON(b1, b2), nil
-}
-
-// UnmarshalJSON marshal this from JSON
-func (t *Tag) UnmarshalJSON(data []byte) error {
- if err := json.Unmarshal(data, &t.TagProps); err != nil {
- return err
- }
- return json.Unmarshal(data, &t.VendorExtensible)
-}
diff --git a/vendor/github.com/go-openapi/spec/url_go19.go b/vendor/github.com/go-openapi/spec/url_go19.go
deleted file mode 100644
index 5bdfe40bcc..0000000000
--- a/vendor/github.com/go-openapi/spec/url_go19.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package spec
-
-import "net/url"
-
-func parseURL(s string) (*url.URL, error) {
- u, err := url.Parse(s)
- if err == nil {
- u.OmitHost = false
- }
- return u, err
-}
diff --git a/vendor/github.com/go-openapi/spec/validations.go b/vendor/github.com/go-openapi/spec/validations.go
deleted file mode 100644
index 6360a8ea77..0000000000
--- a/vendor/github.com/go-openapi/spec/validations.go
+++ /dev/null
@@ -1,215 +0,0 @@
-package spec
-
-// CommonValidations describe common JSON-schema validations
-type CommonValidations struct {
- Maximum *float64 `json:"maximum,omitempty"`
- ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
- Minimum *float64 `json:"minimum,omitempty"`
- ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
- MaxLength *int64 `json:"maxLength,omitempty"`
- MinLength *int64 `json:"minLength,omitempty"`
- Pattern string `json:"pattern,omitempty"`
- MaxItems *int64 `json:"maxItems,omitempty"`
- MinItems *int64 `json:"minItems,omitempty"`
- UniqueItems bool `json:"uniqueItems,omitempty"`
- MultipleOf *float64 `json:"multipleOf,omitempty"`
- Enum []interface{} `json:"enum,omitempty"`
-}
-
-// SetValidations defines all validations for a simple schema.
-//
-// NOTE: the input is the larger set of validations available for schemas.
-// For simple schemas, MinProperties and MaxProperties are ignored.
-func (v *CommonValidations) SetValidations(val SchemaValidations) {
- v.Maximum = val.Maximum
- v.ExclusiveMaximum = val.ExclusiveMaximum
- v.Minimum = val.Minimum
- v.ExclusiveMinimum = val.ExclusiveMinimum
- v.MaxLength = val.MaxLength
- v.MinLength = val.MinLength
- v.Pattern = val.Pattern
- v.MaxItems = val.MaxItems
- v.MinItems = val.MinItems
- v.UniqueItems = val.UniqueItems
- v.MultipleOf = val.MultipleOf
- v.Enum = val.Enum
-}
-
-type clearedValidation struct {
- Validation string
- Value interface{}
-}
-
-type clearedValidations []clearedValidation
-
-func (c clearedValidations) apply(cbs []func(string, interface{})) {
- for _, cb := range cbs {
- for _, cleared := range c {
- cb(cleared.Validation, cleared.Value)
- }
- }
-}
-
-// ClearNumberValidations clears all number validations.
-//
-// Some callbacks may be set by the caller to capture changed values.
-func (v *CommonValidations) ClearNumberValidations(cbs ...func(string, interface{})) {
- done := make(clearedValidations, 0, 5)
- defer func() {
- done.apply(cbs)
- }()
-
- if v.Minimum != nil {
- done = append(done, clearedValidation{Validation: "minimum", Value: v.Minimum})
- v.Minimum = nil
- }
- if v.Maximum != nil {
- done = append(done, clearedValidation{Validation: "maximum", Value: v.Maximum})
- v.Maximum = nil
- }
- if v.ExclusiveMaximum {
- done = append(done, clearedValidation{Validation: "exclusiveMaximum", Value: v.ExclusiveMaximum})
- v.ExclusiveMaximum = false
- }
- if v.ExclusiveMinimum {
- done = append(done, clearedValidation{Validation: "exclusiveMinimum", Value: v.ExclusiveMinimum})
- v.ExclusiveMinimum = false
- }
- if v.MultipleOf != nil {
- done = append(done, clearedValidation{Validation: "multipleOf", Value: v.MultipleOf})
- v.MultipleOf = nil
- }
-}
-
-// ClearStringValidations clears all string validations.
-//
-// Some callbacks may be set by the caller to capture changed values.
-func (v *CommonValidations) ClearStringValidations(cbs ...func(string, interface{})) {
- done := make(clearedValidations, 0, 3)
- defer func() {
- done.apply(cbs)
- }()
-
- if v.Pattern != "" {
- done = append(done, clearedValidation{Validation: "pattern", Value: v.Pattern})
- v.Pattern = ""
- }
- if v.MinLength != nil {
- done = append(done, clearedValidation{Validation: "minLength", Value: v.MinLength})
- v.MinLength = nil
- }
- if v.MaxLength != nil {
- done = append(done, clearedValidation{Validation: "maxLength", Value: v.MaxLength})
- v.MaxLength = nil
- }
-}
-
-// ClearArrayValidations clears all array validations.
-//
-// Some callbacks may be set by the caller to capture changed values.
-func (v *CommonValidations) ClearArrayValidations(cbs ...func(string, interface{})) {
- done := make(clearedValidations, 0, 3)
- defer func() {
- done.apply(cbs)
- }()
-
- if v.MaxItems != nil {
- done = append(done, clearedValidation{Validation: "maxItems", Value: v.MaxItems})
- v.MaxItems = nil
- }
- if v.MinItems != nil {
- done = append(done, clearedValidation{Validation: "minItems", Value: v.MinItems})
- v.MinItems = nil
- }
- if v.UniqueItems {
- done = append(done, clearedValidation{Validation: "uniqueItems", Value: v.UniqueItems})
- v.UniqueItems = false
- }
-}
-
-// Validations returns a clone of the validations for a simple schema.
-//
-// NOTE: in the context of simple schema objects, MinProperties, MaxProperties
-// and PatternProperties remain unset.
-func (v CommonValidations) Validations() SchemaValidations {
- return SchemaValidations{
- CommonValidations: v,
- }
-}
-
-// HasNumberValidations indicates if the validations are for numbers or integers
-func (v CommonValidations) HasNumberValidations() bool {
- return v.Maximum != nil || v.Minimum != nil || v.MultipleOf != nil
-}
-
-// HasStringValidations indicates if the validations are for strings
-func (v CommonValidations) HasStringValidations() bool {
- return v.MaxLength != nil || v.MinLength != nil || v.Pattern != ""
-}
-
-// HasArrayValidations indicates if the validations are for arrays
-func (v CommonValidations) HasArrayValidations() bool {
- return v.MaxItems != nil || v.MinItems != nil || v.UniqueItems
-}
-
-// HasEnum indicates if the validation includes some enum constraint
-func (v CommonValidations) HasEnum() bool {
- return len(v.Enum) > 0
-}
-
-// SchemaValidations describes the validation properties of a schema
-//
-// NOTE: at this moment, this is not embedded in SchemaProps because this would induce a breaking change
-// in the exported members: all initializers using litterals would fail.
-type SchemaValidations struct {
- CommonValidations
-
- PatternProperties SchemaProperties `json:"patternProperties,omitempty"`
- MaxProperties *int64 `json:"maxProperties,omitempty"`
- MinProperties *int64 `json:"minProperties,omitempty"`
-}
-
-// HasObjectValidations indicates if the validations are for objects
-func (v SchemaValidations) HasObjectValidations() bool {
- return v.MaxProperties != nil || v.MinProperties != nil || v.PatternProperties != nil
-}
-
-// SetValidations for schema validations
-func (v *SchemaValidations) SetValidations(val SchemaValidations) {
- v.CommonValidations.SetValidations(val)
- v.PatternProperties = val.PatternProperties
- v.MaxProperties = val.MaxProperties
- v.MinProperties = val.MinProperties
-}
-
-// Validations for a schema
-func (v SchemaValidations) Validations() SchemaValidations {
- val := v.CommonValidations.Validations()
- val.PatternProperties = v.PatternProperties
- val.MinProperties = v.MinProperties
- val.MaxProperties = v.MaxProperties
- return val
-}
-
-// ClearObjectValidations returns a clone of the validations with all object validations cleared.
-//
-// Some callbacks may be set by the caller to capture changed values.
-func (v *SchemaValidations) ClearObjectValidations(cbs ...func(string, interface{})) {
- done := make(clearedValidations, 0, 3)
- defer func() {
- done.apply(cbs)
- }()
-
- if v.MaxProperties != nil {
- done = append(done, clearedValidation{Validation: "maxProperties", Value: v.MaxProperties})
- v.MaxProperties = nil
- }
- if v.MinProperties != nil {
- done = append(done, clearedValidation{Validation: "minProperties", Value: v.MinProperties})
- v.MinProperties = nil
- }
- if v.PatternProperties != nil {
- done = append(done, clearedValidation{Validation: "patternProperties", Value: v.PatternProperties})
- v.PatternProperties = nil
- }
-}
diff --git a/vendor/github.com/go-openapi/spec/xml_object.go b/vendor/github.com/go-openapi/spec/xml_object.go
deleted file mode 100644
index 945a46703d..0000000000
--- a/vendor/github.com/go-openapi/spec/xml_object.go
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package spec
-
-// XMLObject a metadata object that allows for more fine-tuned XML model definitions.
-//
-// For more information: http://goo.gl/8us55a#xmlObject
-type XMLObject struct {
- Name string `json:"name,omitempty"`
- Namespace string `json:"namespace,omitempty"`
- Prefix string `json:"prefix,omitempty"`
- Attribute bool `json:"attribute,omitempty"`
- Wrapped bool `json:"wrapped,omitempty"`
-}
-
-// WithName sets the xml name for the object
-func (x *XMLObject) WithName(name string) *XMLObject {
- x.Name = name
- return x
-}
-
-// WithNamespace sets the xml namespace for the object
-func (x *XMLObject) WithNamespace(namespace string) *XMLObject {
- x.Namespace = namespace
- return x
-}
-
-// WithPrefix sets the xml prefix for the object
-func (x *XMLObject) WithPrefix(prefix string) *XMLObject {
- x.Prefix = prefix
- return x
-}
-
-// AsAttribute flags this object as xml attribute
-func (x *XMLObject) AsAttribute() *XMLObject {
- x.Attribute = true
- return x
-}
-
-// AsElement flags this object as an xml node
-func (x *XMLObject) AsElement() *XMLObject {
- x.Attribute = false
- return x
-}
-
-// AsWrapped flags this object as wrapped, this is mostly useful for array types
-func (x *XMLObject) AsWrapped() *XMLObject {
- x.Wrapped = true
- return x
-}
-
-// AsUnwrapped flags this object as an xml node
-func (x *XMLObject) AsUnwrapped() *XMLObject {
- x.Wrapped = false
- return x
-}
diff --git a/vendor/github.com/go-openapi/strfmt/.editorconfig b/vendor/github.com/go-openapi/strfmt/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/strfmt/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/strfmt/.gitattributes b/vendor/github.com/go-openapi/strfmt/.gitattributes
deleted file mode 100644
index d020be8ea4..0000000000
--- a/vendor/github.com/go-openapi/strfmt/.gitattributes
+++ /dev/null
@@ -1,2 +0,0 @@
-*.go text eol=lf
-
diff --git a/vendor/github.com/go-openapi/strfmt/.gitignore b/vendor/github.com/go-openapi/strfmt/.gitignore
deleted file mode 100644
index dd91ed6a04..0000000000
--- a/vendor/github.com/go-openapi/strfmt/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-secrets.yml
-coverage.out
diff --git a/vendor/github.com/go-openapi/strfmt/.golangci.yml b/vendor/github.com/go-openapi/strfmt/.golangci.yml
deleted file mode 100644
index 500630621f..0000000000
--- a/vendor/github.com/go-openapi/strfmt/.golangci.yml
+++ /dev/null
@@ -1,62 +0,0 @@
-version: "2"
-linters:
- default: all
- disable:
- - cyclop
- - depguard
- - errchkjson
- - errorlint
- - exhaustruct
- - forcetypeassert
- - funlen
- - gochecknoglobals
- - gochecknoinits
- - gocognit
- - godot
- - godox
- - gosmopolitan
- - inamedparam
- - ireturn
- - lll
- - musttag
- - nestif
- - nlreturn
- - nonamedreturns
- - paralleltest
- - testpackage
- - thelper
- - tparallel
- - unparam
- - varnamelen
- - whitespace
- - wrapcheck
- - wsl
- settings:
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
- gocyclo:
- min-complexity: 45
- exclusions:
- generated: lax
- presets:
- - comments
- - common-false-positives
- - legacy
- - std-error-handling
- paths:
- - third_party$
- - builtin$
- - examples$
-formatters:
- enable:
- - gofmt
- - goimports
- exclusions:
- generated: lax
- paths:
- - third_party$
- - builtin$
- - examples$
diff --git a/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/strfmt/LICENSE b/vendor/github.com/go-openapi/strfmt/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/strfmt/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/strfmt/README.md b/vendor/github.com/go-openapi/strfmt/README.md
deleted file mode 100644
index f6b39c6c56..0000000000
--- a/vendor/github.com/go-openapi/strfmt/README.md
+++ /dev/null
@@ -1,87 +0,0 @@
-# Strfmt [](https://github.com/go-openapi/strfmt/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/strfmt)
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/strfmt/master/LICENSE)
-[](http://godoc.org/github.com/go-openapi/strfmt)
-[](https://goreportcard.com/report/github.com/go-openapi/strfmt)
-
-This package exposes a registry of data types to support string formats in the go-openapi toolkit.
-
-strfmt represents a well known string format such as credit card or email. The go toolkit for OpenAPI specifications knows how to deal with those.
-
-## Supported data formats
-go-openapi/strfmt follows the swagger 2.0 specification with the following formats
-defined [here](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types).
-
-It also provides convenient extensions to go-openapi users.
-
-- [x] JSON-schema draft 4 formats
- - date-time
- - email
- - hostname
- - ipv4
- - ipv6
- - uri
-- [x] swagger 2.0 format extensions
- - binary
- - byte (e.g. base64 encoded string)
- - date (e.g. "1970-01-01")
- - password
-- [x] go-openapi custom format extensions
- - bsonobjectid (BSON objectID)
- - creditcard
- - duration (e.g. "3 weeks", "1ms")
- - hexcolor (e.g. "#FFFFFF")
- - isbn, isbn10, isbn13
- - mac (e.g "01:02:03:04:05:06")
- - rgbcolor (e.g. "rgb(100,100,100)")
- - ssn
- - uuid, uuid3, uuid4, uuid5
- - cidr (e.g. "192.0.2.1/24", "2001:db8:a0b:12f0::1/32")
- - ulid (e.g. "00000PP9HGSBSSDZ1JTEXBJ0PW", [spec](https://github.com/ulid/spec))
-
-> NOTE: as the name stands for, this package is intended to support string formatting only.
-> It does not provide validation for numerical values with swagger format extension for JSON types "number" or
-> "integer" (e.g. float, double, int32...).
-
-## Type conversion
-
-All types defined here are stringers and may be converted to strings with `.String()`.
-Note that most types defined by this package may be converted directly to string like `string(Email{})`.
-
-`Date` and `DateTime` may be converted directly to `time.Time` like `time.Time(Time{})`.
-Similarly, you can convert `Duration` to `time.Duration` as in `time.Duration(Duration{})`
-
-## Using pointers
-
-The `conv` subpackage provides helpers to convert the types to and from pointers, just like `go-openapi/swag` does
-with primitive types.
-
-## Format types
-Types defined in strfmt expose marshaling and validation capabilities.
-
-List of defined types:
-- Base64
-- CreditCard
-- Date
-- DateTime
-- Duration
-- Email
-- HexColor
-- Hostname
-- IPv4
-- IPv6
-- CIDR
-- ISBN
-- ISBN10
-- ISBN13
-- MAC
-- ObjectId
-- Password
-- RGBColor
-- SSN
-- URI
-- UUID
-- UUID3
-- UUID4
-- UUID5
-- [ULID](https://github.com/ulid/spec)
diff --git a/vendor/github.com/go-openapi/strfmt/bson.go b/vendor/github.com/go-openapi/strfmt/bson.go
deleted file mode 100644
index 685eaf63cb..0000000000
--- a/vendor/github.com/go-openapi/strfmt/bson.go
+++ /dev/null
@@ -1,165 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "fmt"
-
- "go.mongodb.org/mongo-driver/bson"
-
- "go.mongodb.org/mongo-driver/bson/bsontype"
- bsonprim "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-func init() {
- var id ObjectId
- // register this format in the default registry
- Default.Add("bsonobjectid", &id, IsBSONObjectID)
-}
-
-// IsBSONObjectID returns true when the string is a valid BSON.ObjectId
-func IsBSONObjectID(str string) bool {
- _, err := bsonprim.ObjectIDFromHex(str)
- return err == nil
-}
-
-// ObjectId represents a BSON object ID (alias to go.mongodb.org/mongo-driver/bson/primitive.ObjectID)
-//
-// swagger:strfmt bsonobjectid
-type ObjectId bsonprim.ObjectID //nolint:revive,stylecheck
-
-// NewObjectId creates a ObjectId from a Hex String
-func NewObjectId(hex string) ObjectId { //nolint:revive,stylecheck
- oid, err := bsonprim.ObjectIDFromHex(hex)
- if err != nil {
- panic(err)
- }
- return ObjectId(oid)
-}
-
-// MarshalText turns this instance into text
-func (id ObjectId) MarshalText() ([]byte, error) {
- oid := bsonprim.ObjectID(id)
- if oid == bsonprim.NilObjectID {
- return nil, nil
- }
- return []byte(oid.Hex()), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (id *ObjectId) UnmarshalText(data []byte) error { // validation is performed later on
- if len(data) == 0 {
- *id = ObjectId(bsonprim.NilObjectID)
- return nil
- }
- oidstr := string(data)
- oid, err := bsonprim.ObjectIDFromHex(oidstr)
- if err != nil {
- return err
- }
- *id = ObjectId(oid)
- return nil
-}
-
-// Scan read a value from a database driver
-func (id *ObjectId) Scan(raw interface{}) error {
- var data []byte
- switch v := raw.(type) {
- case []byte:
- data = v
- case string:
- data = []byte(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v: %w", v, ErrFormat)
- }
-
- return id.UnmarshalText(data)
-}
-
-// Value converts a value to a database driver value
-func (id ObjectId) Value() (driver.Value, error) {
- return driver.Value(bsonprim.ObjectID(id).Hex()), nil
-}
-
-func (id ObjectId) String() string {
- return bsonprim.ObjectID(id).Hex()
-}
-
-// MarshalJSON returns the ObjectId as JSON
-func (id ObjectId) MarshalJSON() ([]byte, error) {
- return bsonprim.ObjectID(id).MarshalJSON()
-}
-
-// UnmarshalJSON sets the ObjectId from JSON
-func (id *ObjectId) UnmarshalJSON(data []byte) error {
- var obj bsonprim.ObjectID
- if err := obj.UnmarshalJSON(data); err != nil {
- return err
- }
- *id = ObjectId(obj)
- return nil
-}
-
-// MarshalBSON renders the object id as a BSON document
-func (id ObjectId) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": bsonprim.ObjectID(id)})
-}
-
-// UnmarshalBSON reads the objectId from a BSON document
-func (id *ObjectId) UnmarshalBSON(data []byte) error {
- var obj struct {
- Data bsonprim.ObjectID
- }
- if err := bson.Unmarshal(data, &obj); err != nil {
- return err
- }
- *id = ObjectId(obj.Data)
- return nil
-}
-
-// MarshalBSONValue is an interface implemented by types that can marshal themselves
-// into a BSON document represented as bytes. The bytes returned must be a valid
-// BSON document if the error is nil.
-func (id ObjectId) MarshalBSONValue() (bsontype.Type, []byte, error) {
- oid := bsonprim.ObjectID(id)
- return bson.TypeObjectID, oid[:], nil
-}
-
-// UnmarshalBSONValue is an interface implemented by types that can unmarshal a
-// BSON value representation of themselves. The BSON bytes and type can be
-// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
-// wishes to retain the data after returning.
-func (id *ObjectId) UnmarshalBSONValue(_ bsontype.Type, data []byte) error {
- var oid bsonprim.ObjectID
- copy(oid[:], data)
- *id = ObjectId(oid)
- return nil
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (id *ObjectId) DeepCopyInto(out *ObjectId) {
- *out = *id
-}
-
-// DeepCopy copies the receiver into a new ObjectId.
-func (id *ObjectId) DeepCopy() *ObjectId {
- if id == nil {
- return nil
- }
- out := new(ObjectId)
- id.DeepCopyInto(out)
- return out
-}
diff --git a/vendor/github.com/go-openapi/strfmt/date.go b/vendor/github.com/go-openapi/strfmt/date.go
deleted file mode 100644
index a8f52ff354..0000000000
--- a/vendor/github.com/go-openapi/strfmt/date.go
+++ /dev/null
@@ -1,186 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "encoding/json"
- "fmt"
- "time"
-
- "go.mongodb.org/mongo-driver/bson"
-)
-
-func init() {
- d := Date{}
- // register this format in the default registry
- Default.Add("date", &d, IsDate)
-}
-
-// IsDate returns true when the string is a valid date
-func IsDate(str string) bool {
- _, err := time.Parse(RFC3339FullDate, str)
- return err == nil
-}
-
-const (
- // RFC3339FullDate represents a full-date as specified by RFC3339
- // See: http://goo.gl/xXOvVd
- RFC3339FullDate = "2006-01-02"
-)
-
-// Date represents a date from the API
-//
-// swagger:strfmt date
-type Date time.Time
-
-// String converts this date into a string
-func (d Date) String() string {
- return time.Time(d).Format(RFC3339FullDate)
-}
-
-// UnmarshalText parses a text representation into a date type
-func (d *Date) UnmarshalText(text []byte) error {
- if len(text) == 0 {
- return nil
- }
- dd, err := time.ParseInLocation(RFC3339FullDate, string(text), DefaultTimeLocation)
- if err != nil {
- return err
- }
- *d = Date(dd)
- return nil
-}
-
-// MarshalText serializes this date type to string
-func (d Date) MarshalText() ([]byte, error) {
- return []byte(d.String()), nil
-}
-
-// Scan scans a Date value from database driver type.
-func (d *Date) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- return d.UnmarshalText(v)
- case string:
- return d.UnmarshalText([]byte(v))
- case time.Time:
- *d = Date(v)
- return nil
- case nil:
- *d = Date{}
- return nil
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Date from: %#v: %w", v, ErrFormat)
- }
-}
-
-// Value converts Date to a primitive value ready to written to a database.
-func (d Date) Value() (driver.Value, error) {
- return driver.Value(d.String()), nil
-}
-
-// MarshalJSON returns the Date as JSON
-func (d Date) MarshalJSON() ([]byte, error) {
- return json.Marshal(time.Time(d).Format(RFC3339FullDate))
-}
-
-// UnmarshalJSON sets the Date from JSON
-func (d *Date) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var strdate string
- if err := json.Unmarshal(data, &strdate); err != nil {
- return err
- }
- tt, err := time.ParseInLocation(RFC3339FullDate, strdate, DefaultTimeLocation)
- if err != nil {
- return err
- }
- *d = Date(tt)
- return nil
-}
-
-func (d Date) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": d.String()})
-}
-
-func (d *Date) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- rd, err := time.ParseInLocation(RFC3339FullDate, data, DefaultTimeLocation)
- if err != nil {
- return err
- }
- *d = Date(rd)
- return nil
- }
-
- return fmt.Errorf("couldn't unmarshal bson bytes value as Date: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (d *Date) DeepCopyInto(out *Date) {
- *out = *d
-}
-
-// DeepCopy copies the receiver into a new Date.
-func (d *Date) DeepCopy() *Date {
- if d == nil {
- return nil
- }
- out := new(Date)
- d.DeepCopyInto(out)
- return out
-}
-
-// GobEncode implements the gob.GobEncoder interface.
-func (d Date) GobEncode() ([]byte, error) {
- return d.MarshalBinary()
-}
-
-// GobDecode implements the gob.GobDecoder interface.
-func (d *Date) GobDecode(data []byte) error {
- return d.UnmarshalBinary(data)
-}
-
-// MarshalBinary implements the encoding.BinaryMarshaler interface.
-func (d Date) MarshalBinary() ([]byte, error) {
- return time.Time(d).MarshalBinary()
-}
-
-// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
-func (d *Date) UnmarshalBinary(data []byte) error {
- var original time.Time
-
- err := original.UnmarshalBinary(data)
- if err != nil {
- return err
- }
-
- *d = Date(original)
-
- return nil
-}
-
-// Equal checks if two Date instances are equal
-func (d Date) Equal(d2 Date) bool {
- return time.Time(d).Equal(time.Time(d2))
-}
diff --git a/vendor/github.com/go-openapi/strfmt/default.go b/vendor/github.com/go-openapi/strfmt/default.go
deleted file mode 100644
index 0c9514dbd5..0000000000
--- a/vendor/github.com/go-openapi/strfmt/default.go
+++ /dev/null
@@ -1,2258 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "encoding/base64"
- "encoding/json"
- "fmt"
- "net/mail"
- "net/netip"
- "strconv"
- "strings"
-
- "github.com/asaskevich/govalidator"
- "github.com/google/uuid"
- "go.mongodb.org/mongo-driver/bson"
- "golang.org/x/net/idna"
-)
-
-const (
- // HostnamePattern http://json-schema.org/latest/json-schema-validation.html#anchor114.
- //
- // Deprecated: this package no longer uses regular expressions to validate hostnames.
- HostnamePattern = `^([a-zA-Z0-9\p{S}\p{L}]((-?[a-zA-Z0-9\p{S}\p{L}]{0,62})?)|([a-zA-Z0-9\p{S}\p{L}](([a-zA-Z0-9-\p{S}\p{L}]{0,61}[a-zA-Z0-9\p{S}\p{L}])?)(\.)){1,}([a-zA-Z0-9-\p{L}]){2,63})$`
-
- // json null type
- jsonNull = "null"
-)
-
-const (
- // UUIDPattern Regex for UUID that allows uppercase
- //
- // Deprecated: strfmt no longer uses regular expressions to validate UUIDs.
- UUIDPattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$)|(^[0-9a-f]{32}$)`
-
- // UUID3Pattern Regex for UUID3 that allows uppercase
- //
- // Deprecated: strfmt no longer uses regular expressions to validate UUIDs.
- UUID3Pattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$)|(^[0-9a-f]{12}3[0-9a-f]{3}?[0-9a-f]{16}$)`
-
- // UUID4Pattern Regex for UUID4 that allows uppercase
- //
- // Deprecated: strfmt no longer uses regular expressions to validate UUIDs.
- UUID4Pattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$)|(^[0-9a-f]{12}4[0-9a-f]{3}[89ab][0-9a-f]{15}$)`
-
- // UUID5Pattern Regex for UUID5 that allows uppercase
- //
- // Deprecated: strfmt no longer uses regular expressions to validate UUIDs.
- UUID5Pattern = `(?i)(^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$)|(^[0-9a-f]{12}5[0-9a-f]{3}[89ab][0-9a-f]{15}$)`
-)
-
-var idnaHostChecker = idna.New(
- idna.ValidateForRegistration(), // shorthand for [idna.StrictDomainName], [idna.ValidateLabels], [idna.VerifyDNSLength], [idna.BidiRule]
-)
-
-// IsHostname returns true when the string is a valid hostname.
-//
-// It follows the rules detailed at https://url.spec.whatwg.org/#concept-host-parser
-// and implemented by most modern web browsers.
-//
-// It supports IDNA rules regarding internationalized names with unicode.
-//
-// Besides:
-// * the empty string is not a valid host name
-// * a trailing dot is allowed in names and IPv4's (not IPv6)
-// * a host name can be a valid IPv4 (with decimal, octal or hexadecimal numbers) or IPv6 address
-// * IPv6 zones are disallowed
-// * top-level domains can be unicode (cf. https://www.iana.org/domains/root/db).
-//
-// NOTE: this validator doesn't check top-level domains against the IANA root database.
-// It merely ensures that a top-level domain in a FQDN is at least 2 code points long.
-func IsHostname(str string) bool {
- if len(str) == 0 {
- return false
- }
-
- // IP v6 check
- if ipv6Cleaned, found := strings.CutPrefix(str, "["); found {
- ipv6Cleaned, found = strings.CutSuffix(ipv6Cleaned, "]")
- if !found {
- return false
- }
-
- return isValidIPv6(ipv6Cleaned)
- }
-
- // IDNA check
- res, err := idnaHostChecker.ToASCII(strings.ToLower(str))
- if err != nil || res == "" {
- return false
- }
-
- parts := strings.Split(res, ".")
-
- // IP v4 check
- lastPart, lastIndex, shouldBeIPv4 := domainEndsAsNumber(parts)
- if shouldBeIPv4 {
- // domain ends in a number: must be an IPv4
- return isValidIPv4(parts[:lastIndex+1]) // if the last part is a trailing dot, remove it
- }
-
- // check TLD length (excluding trailing dot)
- const minTLDLength = 2
- if lastIndex > 0 && len(lastPart) < minTLDLength {
- return false
- }
-
- return true
-}
-
-// domainEndsAsNumber determines if a domain name ends with a decimal, octal or hex digit,
-// accounting for a possible trailing dot (the last part being empty in that case).
-//
-// It returns the last non-trailing dot part and if that part consists only of (dec/hex/oct) digits.
-func domainEndsAsNumber(parts []string) (lastPart string, lastIndex int, ok bool) {
- // NOTE: using ParseUint(x, 0, 32) is not an option, as the IPv4 format supported why WHATWG
- // doesn't support notations such as "0b1001" (binary digits) or "0o666" (alternate notation for octal digits).
- lastIndex = len(parts) - 1
- lastPart = parts[lastIndex]
- if len(lastPart) == 0 {
- // trailing dot
- if len(parts) == 1 { // dot-only string: normally already ruled out by the IDNA check above
- return lastPart, lastIndex, false
- }
-
- lastIndex--
- lastPart = parts[lastIndex]
- }
-
- if startOfHexDigit(lastPart) {
- for _, b := range []byte(lastPart[2:]) {
- if !isHexDigit(b) {
- return lastPart, lastIndex, false
- }
- }
-
- return lastPart, lastIndex, true
- }
-
- // check for decimal and octal
- for _, b := range []byte(lastPart) {
- if !isASCIIDigit(b) {
- return lastPart, lastIndex, false
- }
- }
-
- return lastPart, lastIndex, true
-}
-
-func startOfHexDigit(str string) bool {
- return strings.HasPrefix(str, "0x") // the input has already been lower-cased
-}
-
-func startOfOctalDigit(str string) bool {
- if str == "0" {
- // a single "0" is considered decimal
- return false
- }
-
- return strings.HasPrefix(str, "0")
-}
-
-func isValidIPv6(str string) bool {
- // disallow empty ipv6 address
- if len(str) == 0 {
- return false
- }
-
- addr, err := netip.ParseAddr(str)
- if err != nil {
- return false
- }
-
- if !addr.Is6() {
- return false
- }
-
- // explicit desupport of IPv6 zones
- if addr.Zone() != "" {
- return false
- }
-
- return true
-}
-
-// isValidIPv4 parses an IPv4 with deciaml, hex or octal digit parts.
-//
-// We can't rely on [netip.ParseAddr] because we may get a mix of decimal, octal and hex digits.
-//
-// Examples of valid addresses not supported by [netip.ParseAddr] or [net.ParseIP]:
-//
-// "192.0x00A80001"
-// "0300.0250.0340.001"
-// "1.0x.1.1"
-//
-// But not:
-//
-// "0b1010.2.3.4"
-// "0o07.2.3.4"
-func isValidIPv4(parts []string) bool {
- // NOTE: using ParseUint(x, 0, 32) is not an option, even though it would simplify this code a lot.
- // The IPv4 format supported why WHATWG doesn't support notations such as "0b1001" (binary digits)
- // or "0o666" (alternate notation for octal digits).
- const (
- maxPartsInIPv4 = 4
- maxDigitsInPart = 11 // max size of a 4-bytes hex or octal digit
- )
-
- if len(parts) == 0 || len(parts) > maxPartsInIPv4 {
- return false
- }
-
- // we call this when we know that the last part is a digit part, so len(lastPart)>0
-
- digits := make([]uint64, 0, maxPartsInIPv4)
- for _, part := range parts {
- if len(part) == 0 { // empty part: this case has normally been already ruled out by the IDNA check above
- return false
- }
-
- if len(part) > maxDigitsInPart { // whether decimal, octal or hex, an address can't exceed that length
- return false
- }
-
- if !isASCIIDigit(part[0]) { // start of an IPv4 part is always a digit
- return false
- }
-
- switch {
- case startOfHexDigit(part):
- const hexDigitOffset = 2
- hexString := part[hexDigitOffset:]
- if len(hexString) == 0 { // 0x part: assume 0
- digits = append(digits, 0)
-
- continue
- }
-
- hexDigit, err := strconv.ParseUint(hexString, 16, 32)
- if err != nil {
- return false
- }
-
- digits = append(digits, hexDigit)
-
- continue
-
- case startOfOctalDigit(part):
- const octDigitOffset = 1
- octString := part[octDigitOffset:] // we know that this is not empty
- octDigit, err := strconv.ParseUint(octString, 8, 32)
- if err != nil {
- return false
- }
-
- digits = append(digits, octDigit)
-
- default: // assume decimal digits (0-255)
- // we know that we don't have a leading 0 (would have been caught by octal digit)
- decDigit, err := strconv.ParseUint(part, 10, 8)
- if err != nil {
- return false
- }
-
- digits = append(digits, decDigit)
- }
- }
-
- // now check the digits: the last digit may encompass several parts of the address
- lastDigit := digits[len(digits)-1]
- if lastDigit > uint64(1)< 1 {
- const maxUint8 = uint64(^uint8(0))
-
- for i := 0; i < len(digits)-2; i++ {
- if digits[i] > maxUint8 {
- return false
- }
- }
- }
-
- return true
-}
-
-func isHexDigit(c byte) bool {
- switch {
- case '0' <= c && c <= '9':
- return true
- case 'a' <= c && c <= 'f': // assume the input string to be lower case
- return true
- }
- return false
-}
-
-func isASCIIDigit(c byte) bool {
- return c >= '0' && c <= '9'
-}
-
-// IsUUID returns true is the string matches a UUID (in any version, including v6 and v7), upper case is allowed
-func IsUUID(str string) bool {
- _, err := uuid.Parse(str)
- return err == nil
-}
-
-const (
- uuidV3 = 3
- uuidV4 = 4
- uuidV5 = 5
-)
-
-// IsUUID3 returns true is the string matches a UUID v3, upper case is allowed
-func IsUUID3(str string) bool {
- id, err := uuid.Parse(str)
- return err == nil && id.Version() == uuid.Version(uuidV3)
-}
-
-// IsUUID4 returns true is the string matches a UUID v4, upper case is allowed
-func IsUUID4(str string) bool {
- id, err := uuid.Parse(str)
- return err == nil && id.Version() == uuid.Version(uuidV4)
-}
-
-// IsUUID5 returns true is the string matches a UUID v5, upper case is allowed
-func IsUUID5(str string) bool {
- id, err := uuid.Parse(str)
- return err == nil && id.Version() == uuid.Version(uuidV5)
-}
-
-// IsEmail validates an email address.
-func IsEmail(str string) bool {
- addr, e := mail.ParseAddress(str)
- return e == nil && addr.Address != ""
-}
-
-func init() {
- // register formats in the default registry:
- // - byte
- // - creditcard
- // - email
- // - hexcolor
- // - hostname
- // - ipv4
- // - ipv6
- // - cidr
- // - isbn
- // - isbn10
- // - isbn13
- // - mac
- // - password
- // - rgbcolor
- // - ssn
- // - uri
- // - uuid
- // - uuid3
- // - uuid4
- // - uuid5
- u := URI("")
- Default.Add("uri", &u, govalidator.IsRequestURI)
-
- eml := Email("")
- Default.Add("email", &eml, IsEmail)
-
- hn := Hostname("")
- Default.Add("hostname", &hn, IsHostname)
-
- ip4 := IPv4("")
- Default.Add("ipv4", &ip4, govalidator.IsIPv4)
-
- ip6 := IPv6("")
- Default.Add("ipv6", &ip6, govalidator.IsIPv6)
-
- cidr := CIDR("")
- Default.Add("cidr", &cidr, govalidator.IsCIDR)
-
- mac := MAC("")
- Default.Add("mac", &mac, govalidator.IsMAC)
-
- uid := UUID("")
- Default.Add("uuid", &uid, IsUUID)
-
- uid3 := UUID3("")
- Default.Add("uuid3", &uid3, IsUUID3)
-
- uid4 := UUID4("")
- Default.Add("uuid4", &uid4, IsUUID4)
-
- uid5 := UUID5("")
- Default.Add("uuid5", &uid5, IsUUID5)
-
- isbn := ISBN("")
- Default.Add("isbn", &isbn, func(str string) bool { return govalidator.IsISBN10(str) || govalidator.IsISBN13(str) })
-
- isbn10 := ISBN10("")
- Default.Add("isbn10", &isbn10, govalidator.IsISBN10)
-
- isbn13 := ISBN13("")
- Default.Add("isbn13", &isbn13, govalidator.IsISBN13)
-
- cc := CreditCard("")
- Default.Add("creditcard", &cc, govalidator.IsCreditCard)
-
- ssn := SSN("")
- Default.Add("ssn", &ssn, govalidator.IsSSN)
-
- hc := HexColor("")
- Default.Add("hexcolor", &hc, govalidator.IsHexcolor)
-
- rc := RGBColor("")
- Default.Add("rgbcolor", &rc, govalidator.IsRGBcolor)
-
- b64 := Base64([]byte(nil))
- Default.Add("byte", &b64, govalidator.IsBase64)
-
- pw := Password("")
- Default.Add("password", &pw, func(_ string) bool { return true })
-}
-
-// Base64 represents a base64 encoded string, using URLEncoding alphabet
-//
-// swagger:strfmt byte
-type Base64 []byte
-
-// MarshalText turns this instance into text
-func (b Base64) MarshalText() ([]byte, error) {
- enc := base64.URLEncoding
- src := []byte(b)
- buf := make([]byte, enc.EncodedLen(len(src)))
- enc.Encode(buf, src)
- return buf, nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (b *Base64) UnmarshalText(data []byte) error { // validation is performed later on
- enc := base64.URLEncoding
- dbuf := make([]byte, enc.DecodedLen(len(data)))
-
- n, err := enc.Decode(dbuf, data)
- if err != nil {
- return err
- }
-
- *b = dbuf[:n]
- return nil
-}
-
-// Scan read a value from a database driver
-func (b *Base64) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- dbuf := make([]byte, base64.StdEncoding.DecodedLen(len(v)))
- n, err := base64.StdEncoding.Decode(dbuf, v)
- if err != nil {
- return err
- }
- *b = dbuf[:n]
- case string:
- vv, err := base64.StdEncoding.DecodeString(v)
- if err != nil {
- return err
- }
- *b = Base64(vv)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Base64 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (b Base64) Value() (driver.Value, error) {
- return driver.Value(b.String()), nil
-}
-
-func (b Base64) String() string {
- return base64.StdEncoding.EncodeToString([]byte(b))
-}
-
-// MarshalJSON returns the Base64 as JSON
-func (b Base64) MarshalJSON() ([]byte, error) {
- return json.Marshal(b.String())
-}
-
-// UnmarshalJSON sets the Base64 from JSON
-func (b *Base64) UnmarshalJSON(data []byte) error {
- var b64str string
- if err := json.Unmarshal(data, &b64str); err != nil {
- return err
- }
- vb, err := base64.StdEncoding.DecodeString(b64str)
- if err != nil {
- return err
- }
- *b = Base64(vb)
- return nil
-}
-
-// MarshalBSON document from this value
-func (b Base64) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": b.String()})
-}
-
-// UnmarshalBSON document into this value
-func (b *Base64) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if bd, ok := m["data"].(string); ok {
- vb, err := base64.StdEncoding.DecodeString(bd)
- if err != nil {
- return err
- }
- *b = Base64(vb)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as base64: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (b *Base64) DeepCopyInto(out *Base64) {
- *out = *b
-}
-
-// DeepCopy copies the receiver into a new Base64.
-func (b *Base64) DeepCopy() *Base64 {
- if b == nil {
- return nil
- }
- out := new(Base64)
- b.DeepCopyInto(out)
- return out
-}
-
-// URI represents the uri string format as specified by the json schema spec
-//
-// swagger:strfmt uri
-type URI string
-
-// MarshalText turns this instance into text
-func (u URI) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *URI) UnmarshalText(data []byte) error { // validation is performed later on
- *u = URI(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *URI) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = URI(string(v))
- case string:
- *u = URI(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u URI) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u URI) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the URI as JSON
-func (u URI) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the URI from JSON
-func (u *URI) UnmarshalJSON(data []byte) error {
- var uristr string
- if err := json.Unmarshal(data, &uristr); err != nil {
- return err
- }
- *u = URI(uristr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u URI) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *URI) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = URI(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as uri: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *URI) DeepCopyInto(out *URI) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new URI.
-func (u *URI) DeepCopy() *URI {
- if u == nil {
- return nil
- }
- out := new(URI)
- u.DeepCopyInto(out)
- return out
-}
-
-// Email represents the email string format as specified by the json schema spec
-//
-// swagger:strfmt email
-type Email string
-
-// MarshalText turns this instance into text
-func (e Email) MarshalText() ([]byte, error) {
- return []byte(string(e)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (e *Email) UnmarshalText(data []byte) error { // validation is performed later on
- *e = Email(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (e *Email) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *e = Email(string(v))
- case string:
- *e = Email(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Email from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (e Email) Value() (driver.Value, error) {
- return driver.Value(string(e)), nil
-}
-
-func (e Email) String() string {
- return string(e)
-}
-
-// MarshalJSON returns the Email as JSON
-func (e Email) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(e))
-}
-
-// UnmarshalJSON sets the Email from JSON
-func (e *Email) UnmarshalJSON(data []byte) error {
- var estr string
- if err := json.Unmarshal(data, &estr); err != nil {
- return err
- }
- *e = Email(estr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (e Email) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": e.String()})
-}
-
-// UnmarshalBSON document into this value
-func (e *Email) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *e = Email(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as email: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (e *Email) DeepCopyInto(out *Email) {
- *out = *e
-}
-
-// DeepCopy copies the receiver into a new Email.
-func (e *Email) DeepCopy() *Email {
- if e == nil {
- return nil
- }
- out := new(Email)
- e.DeepCopyInto(out)
- return out
-}
-
-// Hostname represents the hostname string format as specified by the json schema spec
-//
-// swagger:strfmt hostname
-type Hostname string
-
-// MarshalText turns this instance into text
-func (h Hostname) MarshalText() ([]byte, error) {
- return []byte(string(h)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (h *Hostname) UnmarshalText(data []byte) error { // validation is performed later on
- *h = Hostname(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (h *Hostname) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *h = Hostname(string(v))
- case string:
- *h = Hostname(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Hostname from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (h Hostname) Value() (driver.Value, error) {
- return driver.Value(string(h)), nil
-}
-
-func (h Hostname) String() string {
- return string(h)
-}
-
-// MarshalJSON returns the Hostname as JSON
-func (h Hostname) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(h))
-}
-
-// UnmarshalJSON sets the Hostname from JSON
-func (h *Hostname) UnmarshalJSON(data []byte) error {
- var hstr string
- if err := json.Unmarshal(data, &hstr); err != nil {
- return err
- }
- *h = Hostname(hstr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (h Hostname) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": h.String()})
-}
-
-// UnmarshalBSON document into this value
-func (h *Hostname) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *h = Hostname(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as hostname: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (h *Hostname) DeepCopyInto(out *Hostname) {
- *out = *h
-}
-
-// DeepCopy copies the receiver into a new Hostname.
-func (h *Hostname) DeepCopy() *Hostname {
- if h == nil {
- return nil
- }
- out := new(Hostname)
- h.DeepCopyInto(out)
- return out
-}
-
-// IPv4 represents an IP v4 address
-//
-// swagger:strfmt ipv4
-type IPv4 string
-
-// MarshalText turns this instance into text
-func (u IPv4) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *IPv4) UnmarshalText(data []byte) error { // validation is performed later on
- *u = IPv4(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *IPv4) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = IPv4(string(v))
- case string:
- *u = IPv4(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.IPv4 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u IPv4) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u IPv4) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the IPv4 as JSON
-func (u IPv4) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the IPv4 from JSON
-func (u *IPv4) UnmarshalJSON(data []byte) error {
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = IPv4(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u IPv4) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *IPv4) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = IPv4(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as ipv4: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *IPv4) DeepCopyInto(out *IPv4) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new IPv4.
-func (u *IPv4) DeepCopy() *IPv4 {
- if u == nil {
- return nil
- }
- out := new(IPv4)
- u.DeepCopyInto(out)
- return out
-}
-
-// IPv6 represents an IP v6 address
-//
-// swagger:strfmt ipv6
-type IPv6 string
-
-// MarshalText turns this instance into text
-func (u IPv6) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *IPv6) UnmarshalText(data []byte) error { // validation is performed later on
- *u = IPv6(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *IPv6) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = IPv6(string(v))
- case string:
- *u = IPv6(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.IPv6 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u IPv6) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u IPv6) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the IPv6 as JSON
-func (u IPv6) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the IPv6 from JSON
-func (u *IPv6) UnmarshalJSON(data []byte) error {
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = IPv6(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u IPv6) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *IPv6) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = IPv6(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as ipv6: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *IPv6) DeepCopyInto(out *IPv6) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new IPv6.
-func (u *IPv6) DeepCopy() *IPv6 {
- if u == nil {
- return nil
- }
- out := new(IPv6)
- u.DeepCopyInto(out)
- return out
-}
-
-// CIDR represents a Classless Inter-Domain Routing notation
-//
-// swagger:strfmt cidr
-type CIDR string
-
-// MarshalText turns this instance into text
-func (u CIDR) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *CIDR) UnmarshalText(data []byte) error { // validation is performed later on
- *u = CIDR(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *CIDR) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = CIDR(string(v))
- case string:
- *u = CIDR(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.CIDR from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u CIDR) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u CIDR) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the CIDR as JSON
-func (u CIDR) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the CIDR from JSON
-func (u *CIDR) UnmarshalJSON(data []byte) error {
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = CIDR(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u CIDR) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *CIDR) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = CIDR(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as CIDR: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *CIDR) DeepCopyInto(out *CIDR) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new CIDR.
-func (u *CIDR) DeepCopy() *CIDR {
- if u == nil {
- return nil
- }
- out := new(CIDR)
- u.DeepCopyInto(out)
- return out
-}
-
-// MAC represents a 48 bit MAC address
-//
-// swagger:strfmt mac
-type MAC string
-
-// MarshalText turns this instance into text
-func (u MAC) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *MAC) UnmarshalText(data []byte) error { // validation is performed later on
- *u = MAC(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *MAC) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = MAC(string(v))
- case string:
- *u = MAC(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.IPv4 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u MAC) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u MAC) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the MAC as JSON
-func (u MAC) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the MAC from JSON
-func (u *MAC) UnmarshalJSON(data []byte) error {
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = MAC(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u MAC) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *MAC) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = MAC(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as MAC: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *MAC) DeepCopyInto(out *MAC) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new MAC.
-func (u *MAC) DeepCopy() *MAC {
- if u == nil {
- return nil
- }
- out := new(MAC)
- u.DeepCopyInto(out)
- return out
-}
-
-// UUID represents a uuid string format
-//
-// swagger:strfmt uuid
-type UUID string
-
-// MarshalText turns this instance into text
-func (u UUID) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID(string(v))
- case string:
- *u = UUID(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the UUID as JSON
-func (u UUID) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the UUID from JSON
-func (u *UUID) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = UUID(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u UUID) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *UUID) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = UUID(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as UUID: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *UUID) DeepCopyInto(out *UUID) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new UUID.
-func (u *UUID) DeepCopy() *UUID {
- if u == nil {
- return nil
- }
- out := new(UUID)
- u.DeepCopyInto(out)
- return out
-}
-
-// UUID3 represents a uuid3 string format
-//
-// swagger:strfmt uuid3
-type UUID3 string
-
-// MarshalText turns this instance into text
-func (u UUID3) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID3) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID3(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID3) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID3(string(v))
- case string:
- *u = UUID3(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID3 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID3) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID3) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the UUID as JSON
-func (u UUID3) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the UUID from JSON
-func (u *UUID3) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = UUID3(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u UUID3) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *UUID3) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = UUID3(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as UUID3: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *UUID3) DeepCopyInto(out *UUID3) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new UUID3.
-func (u *UUID3) DeepCopy() *UUID3 {
- if u == nil {
- return nil
- }
- out := new(UUID3)
- u.DeepCopyInto(out)
- return out
-}
-
-// UUID4 represents a uuid4 string format
-//
-// swagger:strfmt uuid4
-type UUID4 string
-
-// MarshalText turns this instance into text
-func (u UUID4) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID4) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID4(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID4) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID4(string(v))
- case string:
- *u = UUID4(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID4 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID4) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID4) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the UUID as JSON
-func (u UUID4) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the UUID from JSON
-func (u *UUID4) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = UUID4(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u UUID4) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *UUID4) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = UUID4(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as UUID4: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *UUID4) DeepCopyInto(out *UUID4) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new UUID4.
-func (u *UUID4) DeepCopy() *UUID4 {
- if u == nil {
- return nil
- }
- out := new(UUID4)
- u.DeepCopyInto(out)
- return out
-}
-
-// UUID5 represents a uuid5 string format
-//
-// swagger:strfmt uuid5
-type UUID5 string
-
-// MarshalText turns this instance into text
-func (u UUID5) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID5) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID5(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID5) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID5(string(v))
- case string:
- *u = UUID5(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID5 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID5) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID5) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the UUID as JSON
-func (u UUID5) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the UUID from JSON
-func (u *UUID5) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = UUID5(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u UUID5) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *UUID5) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = UUID5(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as UUID5: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *UUID5) DeepCopyInto(out *UUID5) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new UUID5.
-func (u *UUID5) DeepCopy() *UUID5 {
- if u == nil {
- return nil
- }
- out := new(UUID5)
- u.DeepCopyInto(out)
- return out
-}
-
-// ISBN represents an isbn string format
-//
-// swagger:strfmt isbn
-type ISBN string
-
-// MarshalText turns this instance into text
-func (u ISBN) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ISBN) UnmarshalText(data []byte) error { // validation is performed later on
- *u = ISBN(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *ISBN) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = ISBN(string(v))
- case string:
- *u = ISBN(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.ISBN from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u ISBN) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u ISBN) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the ISBN as JSON
-func (u ISBN) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the ISBN from JSON
-func (u *ISBN) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = ISBN(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u ISBN) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *ISBN) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = ISBN(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as ISBN: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *ISBN) DeepCopyInto(out *ISBN) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new ISBN.
-func (u *ISBN) DeepCopy() *ISBN {
- if u == nil {
- return nil
- }
- out := new(ISBN)
- u.DeepCopyInto(out)
- return out
-}
-
-// ISBN10 represents an isbn 10 string format
-//
-// swagger:strfmt isbn10
-type ISBN10 string
-
-// MarshalText turns this instance into text
-func (u ISBN10) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ISBN10) UnmarshalText(data []byte) error { // validation is performed later on
- *u = ISBN10(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *ISBN10) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = ISBN10(string(v))
- case string:
- *u = ISBN10(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.ISBN10 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u ISBN10) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u ISBN10) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the ISBN10 as JSON
-func (u ISBN10) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the ISBN10 from JSON
-func (u *ISBN10) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = ISBN10(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u ISBN10) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *ISBN10) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = ISBN10(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as ISBN10: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *ISBN10) DeepCopyInto(out *ISBN10) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new ISBN10.
-func (u *ISBN10) DeepCopy() *ISBN10 {
- if u == nil {
- return nil
- }
- out := new(ISBN10)
- u.DeepCopyInto(out)
- return out
-}
-
-// ISBN13 represents an isbn 13 string format
-//
-// swagger:strfmt isbn13
-type ISBN13 string
-
-// MarshalText turns this instance into text
-func (u ISBN13) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ISBN13) UnmarshalText(data []byte) error { // validation is performed later on
- *u = ISBN13(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *ISBN13) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = ISBN13(string(v))
- case string:
- *u = ISBN13(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.ISBN13 from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u ISBN13) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u ISBN13) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the ISBN13 as JSON
-func (u ISBN13) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the ISBN13 from JSON
-func (u *ISBN13) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = ISBN13(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u ISBN13) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *ISBN13) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = ISBN13(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as ISBN13: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *ISBN13) DeepCopyInto(out *ISBN13) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new ISBN13.
-func (u *ISBN13) DeepCopy() *ISBN13 {
- if u == nil {
- return nil
- }
- out := new(ISBN13)
- u.DeepCopyInto(out)
- return out
-}
-
-// CreditCard represents a credit card string format
-//
-// swagger:strfmt creditcard
-type CreditCard string
-
-// MarshalText turns this instance into text
-func (u CreditCard) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *CreditCard) UnmarshalText(data []byte) error { // validation is performed later on
- *u = CreditCard(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *CreditCard) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = CreditCard(string(v))
- case string:
- *u = CreditCard(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.CreditCard from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u CreditCard) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u CreditCard) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the CreditCard as JSON
-func (u CreditCard) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the CreditCard from JSON
-func (u *CreditCard) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = CreditCard(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u CreditCard) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *CreditCard) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = CreditCard(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as CreditCard: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *CreditCard) DeepCopyInto(out *CreditCard) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new CreditCard.
-func (u *CreditCard) DeepCopy() *CreditCard {
- if u == nil {
- return nil
- }
- out := new(CreditCard)
- u.DeepCopyInto(out)
- return out
-}
-
-// SSN represents a social security string format
-//
-// swagger:strfmt ssn
-type SSN string
-
-// MarshalText turns this instance into text
-func (u SSN) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *SSN) UnmarshalText(data []byte) error { // validation is performed later on
- *u = SSN(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *SSN) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = SSN(string(v))
- case string:
- *u = SSN(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.SSN from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u SSN) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u SSN) String() string {
- return string(u)
-}
-
-// MarshalJSON returns the SSN as JSON
-func (u SSN) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(u))
-}
-
-// UnmarshalJSON sets the SSN from JSON
-func (u *SSN) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *u = SSN(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (u SSN) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *SSN) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *u = SSN(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as SSN: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *SSN) DeepCopyInto(out *SSN) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new SSN.
-func (u *SSN) DeepCopy() *SSN {
- if u == nil {
- return nil
- }
- out := new(SSN)
- u.DeepCopyInto(out)
- return out
-}
-
-// HexColor represents a hex color string format
-//
-// swagger:strfmt hexcolor
-type HexColor string
-
-// MarshalText turns this instance into text
-func (h HexColor) MarshalText() ([]byte, error) {
- return []byte(string(h)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (h *HexColor) UnmarshalText(data []byte) error { // validation is performed later on
- *h = HexColor(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (h *HexColor) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *h = HexColor(string(v))
- case string:
- *h = HexColor(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.HexColor from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (h HexColor) Value() (driver.Value, error) {
- return driver.Value(string(h)), nil
-}
-
-func (h HexColor) String() string {
- return string(h)
-}
-
-// MarshalJSON returns the HexColor as JSON
-func (h HexColor) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(h))
-}
-
-// UnmarshalJSON sets the HexColor from JSON
-func (h *HexColor) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *h = HexColor(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (h HexColor) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": h.String()})
-}
-
-// UnmarshalBSON document into this value
-func (h *HexColor) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *h = HexColor(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as HexColor: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (h *HexColor) DeepCopyInto(out *HexColor) {
- *out = *h
-}
-
-// DeepCopy copies the receiver into a new HexColor.
-func (h *HexColor) DeepCopy() *HexColor {
- if h == nil {
- return nil
- }
- out := new(HexColor)
- h.DeepCopyInto(out)
- return out
-}
-
-// RGBColor represents a RGB color string format
-//
-// swagger:strfmt rgbcolor
-type RGBColor string
-
-// MarshalText turns this instance into text
-func (r RGBColor) MarshalText() ([]byte, error) {
- return []byte(string(r)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (r *RGBColor) UnmarshalText(data []byte) error { // validation is performed later on
- *r = RGBColor(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (r *RGBColor) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *r = RGBColor(string(v))
- case string:
- *r = RGBColor(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.RGBColor from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (r RGBColor) Value() (driver.Value, error) {
- return driver.Value(string(r)), nil
-}
-
-func (r RGBColor) String() string {
- return string(r)
-}
-
-// MarshalJSON returns the RGBColor as JSON
-func (r RGBColor) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(r))
-}
-
-// UnmarshalJSON sets the RGBColor from JSON
-func (r *RGBColor) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *r = RGBColor(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (r RGBColor) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": r.String()})
-}
-
-// UnmarshalBSON document into this value
-func (r *RGBColor) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *r = RGBColor(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as RGBColor: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (r *RGBColor) DeepCopyInto(out *RGBColor) {
- *out = *r
-}
-
-// DeepCopy copies the receiver into a new RGBColor.
-func (r *RGBColor) DeepCopy() *RGBColor {
- if r == nil {
- return nil
- }
- out := new(RGBColor)
- r.DeepCopyInto(out)
- return out
-}
-
-// Password represents a password.
-// This has no validations and is mainly used as a marker for UI components.
-//
-// swagger:strfmt password
-type Password string
-
-// MarshalText turns this instance into text
-func (r Password) MarshalText() ([]byte, error) {
- return []byte(string(r)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (r *Password) UnmarshalText(data []byte) error { // validation is performed later on
- *r = Password(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (r *Password) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *r = Password(string(v))
- case string:
- *r = Password(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Password from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (r Password) Value() (driver.Value, error) {
- return driver.Value(string(r)), nil
-}
-
-func (r Password) String() string {
- return string(r)
-}
-
-// MarshalJSON returns the Password as JSON
-func (r Password) MarshalJSON() ([]byte, error) {
- return json.Marshal(string(r))
-}
-
-// UnmarshalJSON sets the Password from JSON
-func (r *Password) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- *r = Password(ustr)
- return nil
-}
-
-// MarshalBSON document from this value
-func (r Password) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": r.String()})
-}
-
-// UnmarshalBSON document into this value
-func (r *Password) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- *r = Password(ud)
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as Password: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (r *Password) DeepCopyInto(out *Password) {
- *out = *r
-}
-
-// DeepCopy copies the receiver into a new Password.
-func (r *Password) DeepCopy() *Password {
- if r == nil {
- return nil
- }
- out := new(Password)
- r.DeepCopyInto(out)
- return out
-}
diff --git a/vendor/github.com/go-openapi/strfmt/doc.go b/vendor/github.com/go-openapi/strfmt/doc.go
deleted file mode 100644
index 41aebe6d51..0000000000
--- a/vendor/github.com/go-openapi/strfmt/doc.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package strfmt contains custom string formats
-//
-// TODO: add info on how to define and register a custom format
-package strfmt
diff --git a/vendor/github.com/go-openapi/strfmt/duration.go b/vendor/github.com/go-openapi/strfmt/duration.go
deleted file mode 100644
index 7b14a6909b..0000000000
--- a/vendor/github.com/go-openapi/strfmt/duration.go
+++ /dev/null
@@ -1,224 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "encoding/json"
- "fmt"
- "regexp"
- "strconv"
- "strings"
- "time"
-
- "go.mongodb.org/mongo-driver/bson"
-)
-
-func init() {
- d := Duration(0)
- // register this format in the default registry
- Default.Add("duration", &d, IsDuration)
-}
-
-const (
- hoursInDay = 24
- daysInWeek = 7
-)
-
-var (
- timeUnits = [][]string{
- {"ns", "nano"},
- {"us", "µs", "micro"},
- {"ms", "milli"},
- {"s", "sec"},
- {"m", "min"},
- {"h", "hr", "hour"},
- {"d", "day"},
- {"w", "wk", "week"},
- }
-
- timeMultiplier = map[string]time.Duration{
- "ns": time.Nanosecond,
- "us": time.Microsecond,
- "ms": time.Millisecond,
- "s": time.Second,
- "m": time.Minute,
- "h": time.Hour,
- "d": hoursInDay * time.Hour,
- "w": hoursInDay * daysInWeek * time.Hour,
- }
-
- durationMatcher = regexp.MustCompile(`(((?:-\s?)?\d+)\s*([A-Za-zµ]+))`)
-)
-
-// IsDuration returns true if the provided string is a valid duration
-func IsDuration(str string) bool {
- _, err := ParseDuration(str)
- return err == nil
-}
-
-// Duration represents a duration
-//
-// Duration stores a period of time as a nanosecond count, with the largest
-// repesentable duration being approximately 290 years.
-//
-// swagger:strfmt duration
-type Duration time.Duration
-
-// MarshalText turns this instance into text
-func (d Duration) MarshalText() ([]byte, error) {
- return []byte(time.Duration(d).String()), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (d *Duration) UnmarshalText(data []byte) error { // validation is performed later on
- dd, err := ParseDuration(string(data))
- if err != nil {
- return err
- }
- *d = Duration(dd)
- return nil
-}
-
-// ParseDuration parses a duration from a string, compatible with scala duration syntax
-func ParseDuration(cand string) (time.Duration, error) {
- if dur, err := time.ParseDuration(cand); err == nil {
- return dur, nil
- }
-
- var dur time.Duration
- ok := false
- for _, match := range durationMatcher.FindAllStringSubmatch(cand, -1) {
-
- // remove possible leading - and spaces
- value, negative := strings.CutPrefix(match[2], "-")
-
- // if the string is a valid duration, parse it
- factor, err := strconv.Atoi(strings.TrimSpace(value)) // converts string to int
- if err != nil {
- return 0, err
- }
-
- if negative {
- factor = -factor
- }
-
- unit := strings.ToLower(strings.TrimSpace(match[3]))
-
- for _, variants := range timeUnits {
- last := len(variants) - 1
- multiplier := timeMultiplier[variants[0]]
-
- for i, variant := range variants {
- if (last == i && strings.HasPrefix(unit, variant)) || strings.EqualFold(variant, unit) {
- ok = true
- dur += (time.Duration(factor) * multiplier)
- }
- }
- }
- }
-
- if ok {
- return dur, nil
- }
- return 0, fmt.Errorf("unable to parse %s as duration: %w", cand, ErrFormat)
-}
-
-// Scan reads a Duration value from database driver type.
-func (d *Duration) Scan(raw interface{}) error {
- switch v := raw.(type) {
- // TODO: case []byte: // ?
- case int64:
- *d = Duration(v)
- case float64:
- *d = Duration(int64(v))
- case nil:
- *d = Duration(0)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Duration from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts Duration to a primitive value ready to be written to a database.
-func (d Duration) Value() (driver.Value, error) {
- return driver.Value(int64(d)), nil
-}
-
-// String converts this duration to a string
-func (d Duration) String() string {
- return time.Duration(d).String()
-}
-
-// MarshalJSON returns the Duration as JSON
-func (d Duration) MarshalJSON() ([]byte, error) {
- return json.Marshal(time.Duration(d).String())
-}
-
-// UnmarshalJSON sets the Duration from JSON
-func (d *Duration) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
-
- var dstr string
- if err := json.Unmarshal(data, &dstr); err != nil {
- return err
- }
- tt, err := ParseDuration(dstr)
- if err != nil {
- return err
- }
- *d = Duration(tt)
- return nil
-}
-
-func (d Duration) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": d.String()})
-}
-
-func (d *Duration) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- rd, err := ParseDuration(data)
- if err != nil {
- return err
- }
- *d = Duration(rd)
- return nil
- }
-
- return fmt.Errorf("couldn't unmarshal bson bytes value as Date: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (d *Duration) DeepCopyInto(out *Duration) {
- *out = *d
-}
-
-// DeepCopy copies the receiver into a new Duration.
-func (d *Duration) DeepCopy() *Duration {
- if d == nil {
- return nil
- }
- out := new(Duration)
- d.DeepCopyInto(out)
- return out
-}
diff --git a/vendor/github.com/go-openapi/strfmt/errors.go b/vendor/github.com/go-openapi/strfmt/errors.go
deleted file mode 100644
index 9a9240363d..0000000000
--- a/vendor/github.com/go-openapi/strfmt/errors.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package strfmt
-
-type strfmtError string
-
-// ErrFormat is an error raised by the strfmt package
-const ErrFormat strfmtError = "format error"
-
-func (e strfmtError) Error() string {
- return string(e)
-}
diff --git a/vendor/github.com/go-openapi/strfmt/format.go b/vendor/github.com/go-openapi/strfmt/format.go
deleted file mode 100644
index 75d4b6065c..0000000000
--- a/vendor/github.com/go-openapi/strfmt/format.go
+++ /dev/null
@@ -1,326 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "encoding"
- "fmt"
- "reflect"
- "strings"
- "sync"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-viper/mapstructure/v2"
-)
-
-// Default is the default formats registry
-var Default = NewSeededFormats(nil, nil)
-
-// Validator represents a validator for a string format.
-type Validator func(string) bool
-
-// Format represents a string format.
-//
-// All implementations of Format provide a string representation and text
-// marshaling/unmarshaling interface to be used by encoders (e.g. encoding/json).
-type Format interface {
- String() string
- encoding.TextMarshaler
- encoding.TextUnmarshaler
-}
-
-// Registry is a registry of string formats, with a validation method.
-type Registry interface {
- Add(string, Format, Validator) bool
- DelByName(string) bool
- GetType(string) (reflect.Type, bool)
- ContainsName(string) bool
- Validates(string, string) bool
- Parse(string, string) (interface{}, error)
- MapStructureHookFunc() mapstructure.DecodeHookFunc
-}
-
-type knownFormat struct {
- Name string
- OrigName string
- Type reflect.Type
- Validator Validator
-}
-
-// NameNormalizer is a function that normalizes a format name.
-type NameNormalizer func(string) string
-
-// DefaultNameNormalizer removes all dashes
-func DefaultNameNormalizer(name string) string {
- return strings.ReplaceAll(name, "-", "")
-}
-
-type defaultFormats struct {
- sync.Mutex
- data []knownFormat
- normalizeName NameNormalizer
-}
-
-// NewFormats creates a new formats registry seeded with the values from the default
-func NewFormats() Registry {
- //nolint:forcetypeassert
- return NewSeededFormats(Default.(*defaultFormats).data, nil)
-}
-
-// NewSeededFormats creates a new formats registry
-func NewSeededFormats(seeds []knownFormat, normalizer NameNormalizer) Registry {
- if normalizer == nil {
- normalizer = DefaultNameNormalizer
- }
- // copy here, don't modify original
- d := append([]knownFormat(nil), seeds...)
- return &defaultFormats{
- data: d,
- normalizeName: normalizer,
- }
-}
-
-// MapStructureHookFunc is a decode hook function for mapstructure
-func (f *defaultFormats) MapStructureHookFunc() mapstructure.DecodeHookFunc {
- return func(from reflect.Type, to reflect.Type, obj interface{}) (interface{}, error) {
- if from.Kind() != reflect.String {
- return obj, nil
- }
- data, ok := obj.(string)
- if !ok {
- return nil, fmt.Errorf("failed to cast %+v to string: %w", obj, ErrFormat)
- }
-
- for _, v := range f.data {
- tpe, _ := f.GetType(v.Name)
- if to == tpe {
- switch v.Name {
- case "date":
- d, err := time.ParseInLocation(RFC3339FullDate, data, DefaultTimeLocation)
- if err != nil {
- return nil, err
- }
- return Date(d), nil
- case "datetime":
- input := data
- if len(input) == 0 {
- return nil, fmt.Errorf("empty string is an invalid datetime format: %w", ErrFormat)
- }
- return ParseDateTime(input)
- case "duration":
- dur, err := ParseDuration(data)
- if err != nil {
- return nil, err
- }
- return Duration(dur), nil
- case "uri":
- return URI(data), nil
- case "email":
- return Email(data), nil
- case "uuid":
- return UUID(data), nil
- case "uuid3":
- return UUID3(data), nil
- case "uuid4":
- return UUID4(data), nil
- case "uuid5":
- return UUID5(data), nil
- case "hostname":
- return Hostname(data), nil
- case "ipv4":
- return IPv4(data), nil
- case "ipv6":
- return IPv6(data), nil
- case "cidr":
- return CIDR(data), nil
- case "mac":
- return MAC(data), nil
- case "isbn":
- return ISBN(data), nil
- case "isbn10":
- return ISBN10(data), nil
- case "isbn13":
- return ISBN13(data), nil
- case "creditcard":
- return CreditCard(data), nil
- case "ssn":
- return SSN(data), nil
- case "hexcolor":
- return HexColor(data), nil
- case "rgbcolor":
- return RGBColor(data), nil
- case "byte":
- return Base64(data), nil
- case "password":
- return Password(data), nil
- case "ulid":
- ulid, err := ParseULID(data)
- if err != nil {
- return nil, err
- }
- return ulid, nil
- default:
- return nil, errors.InvalidTypeName(v.Name)
- }
- }
- }
- return data, nil
- }
-}
-
-// Add adds a new format, return true if this was a new item instead of a replacement
-func (f *defaultFormats) Add(name string, strfmt Format, validator Validator) bool {
- f.Lock()
- defer f.Unlock()
-
- nme := f.normalizeName(name)
-
- tpe := reflect.TypeOf(strfmt)
- if tpe.Kind() == reflect.Ptr {
- tpe = tpe.Elem()
- }
-
- for i := range f.data {
- v := &f.data[i]
- if v.Name == nme {
- v.Type = tpe
- v.Validator = validator
- return false
- }
- }
-
- // turns out it's new after all
- f.data = append(f.data, knownFormat{Name: nme, OrigName: name, Type: tpe, Validator: validator})
- return true
-}
-
-// GetType gets the type for the specified name
-func (f *defaultFormats) GetType(name string) (reflect.Type, bool) {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- return v.Type, true
- }
- }
- return nil, false
-}
-
-// DelByName removes the format by the specified name, returns true when an item was actually removed
-func (f *defaultFormats) DelByName(name string) bool {
- f.Lock()
- defer f.Unlock()
-
- nme := f.normalizeName(name)
-
- for i, v := range f.data {
- if v.Name == nme {
- f.data[i] = knownFormat{} // release
- f.data = append(f.data[:i], f.data[i+1:]...)
- return true
- }
- }
- return false
-}
-
-// DelByFormat removes the specified format, returns true when an item was actually removed
-func (f *defaultFormats) DelByFormat(strfmt Format) bool {
- f.Lock()
- defer f.Unlock()
-
- tpe := reflect.TypeOf(strfmt)
- if tpe.Kind() == reflect.Ptr {
- tpe = tpe.Elem()
- }
-
- for i, v := range f.data {
- if v.Type == tpe {
- f.data[i] = knownFormat{} // release
- f.data = append(f.data[:i], f.data[i+1:]...)
- return true
- }
- }
- return false
-}
-
-// ContainsName returns true if this registry contains the specified name
-func (f *defaultFormats) ContainsName(name string) bool {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- return true
- }
- }
- return false
-}
-
-// ContainsFormat returns true if this registry contains the specified format
-func (f *defaultFormats) ContainsFormat(strfmt Format) bool {
- f.Lock()
- defer f.Unlock()
- tpe := reflect.TypeOf(strfmt)
- if tpe.Kind() == reflect.Ptr {
- tpe = tpe.Elem()
- }
-
- for _, v := range f.data {
- if v.Type == tpe {
- return true
- }
- }
- return false
-}
-
-// Validates passed data against format.
-//
-// Note that the format name is automatically normalized, e.g. one may
-// use "date-time" to use the "datetime" format validator.
-func (f *defaultFormats) Validates(name, data string) bool {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- return v.Validator(data)
- }
- }
- return false
-}
-
-// Parse a string into the appropriate format representation type.
-//
-// E.g. parsing a string a "date" will return a Date type.
-func (f *defaultFormats) Parse(name, data string) (interface{}, error) {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- nw := reflect.New(v.Type).Interface()
- if dec, ok := nw.(encoding.TextUnmarshaler); ok {
- if err := dec.UnmarshalText([]byte(data)); err != nil {
- return nil, err
- }
- return nw, nil
- }
- return nil, errors.InvalidTypeName(name)
- }
- }
- return nil, errors.InvalidTypeName(name)
-}
diff --git a/vendor/github.com/go-openapi/strfmt/time.go b/vendor/github.com/go-openapi/strfmt/time.go
deleted file mode 100644
index 7952abacc8..0000000000
--- a/vendor/github.com/go-openapi/strfmt/time.go
+++ /dev/null
@@ -1,337 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "encoding/binary"
- "encoding/json"
- "fmt"
- "regexp"
- "strings"
- "time"
-
- "go.mongodb.org/mongo-driver/bson"
-
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-var (
- // UnixZero sets the zero unix UTC timestamp we want to compare against.
- //
- // Unix 0 for an EST timezone is not equivalent to a UTC timezone.
- UnixZero = time.Unix(0, 0).UTC()
-)
-
-func init() {
- dt := DateTime{}
- Default.Add("datetime", &dt, IsDateTime)
-}
-
-// IsDateTime returns true when the string is a valid date-time.
-//
-// JSON datetime format consist of a date and a time separated by a "T", e.g. 2012-04-23T18:25:43.511Z.
-func IsDateTime(str string) bool {
- const (
- minDateTimeLength = 4
- minParts = 2
- )
- if len(str) < minDateTimeLength {
- return false
- }
- s := strings.Split(strings.ToLower(str), "t")
- if len(s) < minParts || !IsDate(s[0]) {
- return false
- }
-
- matches := rxDateTime.FindAllStringSubmatch(s[1], -1)
- if len(matches) == 0 || len(matches[0]) == 0 {
- return false
- }
- m := matches[0]
- res := m[1] <= "23" && m[2] <= "59" && m[3] <= "59"
- return res
-}
-
-const (
- // RFC3339Millis represents a ISO8601 format to millis instead of to nanos
- RFC3339Millis = "2006-01-02T15:04:05.000Z07:00"
- // RFC3339MillisNoColon represents a ISO8601 format to millis instead of to nanos
- RFC3339MillisNoColon = "2006-01-02T15:04:05.000Z0700"
- // RFC3339Micro represents a ISO8601 format to micro instead of to nano
- RFC3339Micro = "2006-01-02T15:04:05.000000Z07:00"
- // RFC3339MicroNoColon represents a ISO8601 format to micro instead of to nano
- RFC3339MicroNoColon = "2006-01-02T15:04:05.000000Z0700"
- // ISO8601LocalTime represents a ISO8601 format to ISO8601 in local time (no timezone)
- ISO8601LocalTime = "2006-01-02T15:04:05"
- // ISO8601TimeWithReducedPrecision represents a ISO8601 format with reduced precision (dropped secs)
- ISO8601TimeWithReducedPrecision = "2006-01-02T15:04Z"
- // ISO8601TimeWithReducedPrecisionLocaltime represents a ISO8601 format with reduced precision and no timezone (dropped seconds + no timezone)
- ISO8601TimeWithReducedPrecisionLocaltime = "2006-01-02T15:04"
- // ISO8601TimeUniversalSortableDateTimePattern represents a ISO8601 universal sortable date time pattern.
- ISO8601TimeUniversalSortableDateTimePattern = "2006-01-02 15:04:05"
- // short form of ISO8601TimeUniversalSortableDateTimePattern
- ISO8601TimeUniversalSortableDateTimePatternShortForm = "2006-01-02"
- // DateTimePattern pattern to match for the date-time format from http://tools.ietf.org/html/rfc3339#section-5.6
- DateTimePattern = `^([0-9]{2}):([0-9]{2}):([0-9]{2})(.[0-9]+)?(z|([+-][0-9]{2}:[0-9]{2}))$`
-)
-
-var (
- rxDateTime = regexp.MustCompile(DateTimePattern)
-
- // DateTimeFormats is the collection of formats used by ParseDateTime()
- DateTimeFormats = []string{RFC3339Micro, RFC3339MicroNoColon, RFC3339Millis, RFC3339MillisNoColon, time.RFC3339, time.RFC3339Nano, ISO8601LocalTime, ISO8601TimeWithReducedPrecision, ISO8601TimeWithReducedPrecisionLocaltime, ISO8601TimeUniversalSortableDateTimePattern, ISO8601TimeUniversalSortableDateTimePatternShortForm}
-
- // MarshalFormat sets the time resolution format used for marshaling time (set to milliseconds)
- MarshalFormat = RFC3339Millis
-
- // NormalizeTimeForMarshal provides a normalization function on time before marshalling (e.g. time.UTC).
- // By default, the time value is not changed.
- NormalizeTimeForMarshal = func(t time.Time) time.Time { return t }
-
- // DefaultTimeLocation provides a location for a time when the time zone is not encoded in the string (ex: ISO8601 Local variants).
- DefaultTimeLocation = time.UTC
-)
-
-// ParseDateTime parses a string that represents an ISO8601 time or a unix epoch
-func ParseDateTime(data string) (DateTime, error) {
- if data == "" {
- return NewDateTime(), nil
- }
- var lastError error
- for _, layout := range DateTimeFormats {
- dd, err := time.ParseInLocation(layout, data, DefaultTimeLocation)
- if err != nil {
- lastError = err
- continue
- }
- return DateTime(dd), nil
- }
- return DateTime{}, lastError
-}
-
-// DateTime is a time but it serializes to ISO8601 format with millis.
-//
-// It knows how to read 3 different variations of a RFC3339 date time.
-// Most APIs we encounter want either millisecond or second precision times.
-// This just tries to make it worry-free.
-//
-// swagger:strfmt date-time
-type DateTime time.Time
-
-// NewDateTime is a representation of the UNIX epoch (January 1, 1970 00:00:00 UTC) for the [DateTime] type.
-//
-// Notice that this is not the zero value of the [DateTime] type.
-//
-// You may use [DateTime.IsUNIXZero] to check against this value.
-func NewDateTime() DateTime {
- return DateTime(time.Unix(0, 0).UTC())
-}
-
-// MakeDateTime is a representation of the zero value of the [DateTime] type (January 1, year 1, 00:00:00 UTC).
-//
-// You may use [Datetime.IsZero] to check against this value.
-func MakeDateTime() DateTime {
- return DateTime(time.Time{})
-}
-
-// String converts this time to a string
-func (t DateTime) String() string {
- return NormalizeTimeForMarshal(time.Time(t)).Format(MarshalFormat)
-}
-
-// IsZero returns whether the date time is a zero value
-func (t DateTime) IsZero() bool {
- return time.Time(t).IsZero()
-}
-
-// IsUnixZerom returns whether the date time is equivalent to time.Unix(0, 0).UTC().
-func (t DateTime) IsUnixZero() bool {
- return time.Time(t) == UnixZero
-}
-
-// MarshalText implements the text marshaller interface
-func (t DateTime) MarshalText() ([]byte, error) {
- return []byte(t.String()), nil
-}
-
-// UnmarshalText implements the text unmarshaller interface
-func (t *DateTime) UnmarshalText(text []byte) error {
- tt, err := ParseDateTime(string(text))
- if err != nil {
- return err
- }
- *t = tt
- return nil
-}
-
-// Scan scans a DateTime value from database driver type.
-func (t *DateTime) Scan(raw interface{}) error {
- // TODO: case int64: and case float64: ?
- switch v := raw.(type) {
- case []byte:
- return t.UnmarshalText(v)
- case string:
- return t.UnmarshalText([]byte(v))
- case time.Time:
- *t = DateTime(v)
- case nil:
- *t = DateTime{}
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.DateTime from: %#v: %w", v, ErrFormat)
- }
-
- return nil
-}
-
-// Value converts DateTime to a primitive value ready to written to a database.
-func (t DateTime) Value() (driver.Value, error) {
- return driver.Value(t.String()), nil
-}
-
-// MarshalJSON returns the DateTime as JSON
-func (t DateTime) MarshalJSON() ([]byte, error) {
- return json.Marshal(NormalizeTimeForMarshal(time.Time(t)).Format(MarshalFormat))
-}
-
-// UnmarshalJSON sets the DateTime from JSON
-func (t *DateTime) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
-
- var tstr string
- if err := json.Unmarshal(data, &tstr); err != nil {
- return err
- }
- tt, err := ParseDateTime(tstr)
- if err != nil {
- return err
- }
- *t = tt
- return nil
-}
-
-// MarshalBSON renders the DateTime as a BSON document
-func (t DateTime) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": t})
-}
-
-// UnmarshalBSON reads the DateTime from a BSON document
-func (t *DateTime) UnmarshalBSON(data []byte) error {
- var obj struct {
- Data DateTime
- }
-
- if err := bson.Unmarshal(data, &obj); err != nil {
- return err
- }
-
- *t = obj.Data
-
- return nil
-}
-
-const bsonDateLength = 8
-
-// MarshalBSONValue is an interface implemented by types that can marshal themselves
-// into a BSON document represented as bytes. The bytes returned must be a valid
-// BSON document if the error is nil.
-//
-// Marshals a DateTime as a bsontype.DateTime, an int64 representing
-// milliseconds since epoch.
-func (t DateTime) MarshalBSONValue() (bsontype.Type, []byte, error) {
- // UnixNano cannot be used directly, the result of calling UnixNano on the zero
- // Time is undefined. That's why we use time.Nanosecond() instead.
- tNorm := NormalizeTimeForMarshal(time.Time(t))
- i64 := tNorm.UnixMilli()
-
- buf := make([]byte, bsonDateLength)
- // int64 -> uint64 conversion is safe here
- binary.LittleEndian.PutUint64(buf, uint64(i64)) //nolint:gosec
-
- return bson.TypeDateTime, buf, nil
-}
-
-// UnmarshalBSONValue is an interface implemented by types that can unmarshal a
-// BSON value representation of themselves. The BSON bytes and type can be
-// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
-// wishes to retain the data after returning.
-func (t *DateTime) UnmarshalBSONValue(tpe bsontype.Type, data []byte) error {
- if tpe == bson.TypeNull {
- *t = DateTime{}
- return nil
- }
-
- if len(data) != bsonDateLength {
- return fmt.Errorf("bson date field length not exactly 8 bytes: %w", ErrFormat)
- }
-
- // it's ok to get negative values after conversion
- i64 := int64(binary.LittleEndian.Uint64(data)) //nolint:gosec
- // TODO: Use bsonprim.DateTime.Time() method
- *t = DateTime(time.UnixMilli(i64))
-
- return nil
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (t *DateTime) DeepCopyInto(out *DateTime) {
- *out = *t
-}
-
-// DeepCopy copies the receiver into a new DateTime.
-func (t *DateTime) DeepCopy() *DateTime {
- if t == nil {
- return nil
- }
- out := new(DateTime)
- t.DeepCopyInto(out)
- return out
-}
-
-// GobEncode implements the gob.GobEncoder interface.
-func (t DateTime) GobEncode() ([]byte, error) {
- return t.MarshalBinary()
-}
-
-// GobDecode implements the gob.GobDecoder interface.
-func (t *DateTime) GobDecode(data []byte) error {
- return t.UnmarshalBinary(data)
-}
-
-// MarshalBinary implements the encoding.BinaryMarshaler interface.
-func (t DateTime) MarshalBinary() ([]byte, error) {
- return NormalizeTimeForMarshal(time.Time(t)).MarshalBinary()
-}
-
-// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
-func (t *DateTime) UnmarshalBinary(data []byte) error {
- var original time.Time
-
- err := original.UnmarshalBinary(data)
- if err != nil {
- return err
- }
-
- *t = DateTime(original)
-
- return nil
-}
-
-// Equal checks if two DateTime instances are equal using time.Time's Equal method
-func (t DateTime) Equal(t2 DateTime) bool {
- return time.Time(t).Equal(time.Time(t2))
-}
diff --git a/vendor/github.com/go-openapi/strfmt/ulid.go b/vendor/github.com/go-openapi/strfmt/ulid.go
deleted file mode 100644
index 434eb01baa..0000000000
--- a/vendor/github.com/go-openapi/strfmt/ulid.go
+++ /dev/null
@@ -1,229 +0,0 @@
-package strfmt
-
-import (
- cryptorand "crypto/rand"
- "database/sql/driver"
- "encoding/json"
- "fmt"
- "io"
- "sync"
-
- "github.com/oklog/ulid"
- "go.mongodb.org/mongo-driver/bson"
-)
-
-// ULID represents a ulid string format
-// ref:
-//
-// https://github.com/ulid/spec
-//
-// impl:
-//
-// https://github.com/oklog/ulid
-//
-// swagger:strfmt ulid
-type ULID struct {
- ulid.ULID
-}
-
-var (
- ulidEntropyPool = sync.Pool{
- New: func() interface{} {
- return cryptorand.Reader
- },
- }
-
- ULIDScanDefaultFunc = func(raw interface{}) (ULID, error) {
- u := NewULIDZero()
- switch x := raw.(type) {
- case nil:
- // zerp ulid
- return u, nil
- case string:
- if x == "" {
- // zero ulid
- return u, nil
- }
- return u, u.UnmarshalText([]byte(x))
- case []byte:
- return u, u.UnmarshalText(x)
- }
-
- return u, fmt.Errorf("cannot sql.Scan() strfmt.ULID from: %#v: %w", raw, ulid.ErrScanValue)
- }
-
- // ULIDScanOverrideFunc allows you to override the Scan method of the ULID type
- ULIDScanOverrideFunc = ULIDScanDefaultFunc
-
- ULIDValueDefaultFunc = func(u ULID) (driver.Value, error) {
- return driver.Value(u.String()), nil
- }
-
- // ULIDValueOverrideFunc allows you to override the Value method of the ULID type
- ULIDValueOverrideFunc = ULIDValueDefaultFunc
-)
-
-func init() {
- // register formats in the default registry:
- // - ulid
- ulid := ULID{}
- Default.Add("ulid", &ulid, IsULID)
-}
-
-// IsULID checks if provided string is ULID format
-// Be noticed that this function considers overflowed ULID as non-ulid.
-// For more details see https://github.com/ulid/spec
-func IsULID(str string) bool {
- _, err := ulid.ParseStrict(str)
- return err == nil
-}
-
-// ParseULID parses a string that represents an valid ULID
-func ParseULID(str string) (ULID, error) {
- var u ULID
-
- return u, u.UnmarshalText([]byte(str))
-}
-
-// NewULIDZero returns a zero valued ULID type
-func NewULIDZero() ULID {
- return ULID{}
-}
-
-// NewULID generates new unique ULID value and a error if any
-func NewULID() (ULID, error) {
- var u ULID
-
- obj := ulidEntropyPool.Get()
- entropy, ok := obj.(io.Reader)
- if !ok {
- return u, fmt.Errorf("failed to cast %+v to io.Reader: %w", obj, ErrFormat)
- }
-
- id, err := ulid.New(ulid.Now(), entropy)
- if err != nil {
- return u, err
- }
- ulidEntropyPool.Put(entropy)
-
- u.ULID = id
- return u, nil
-}
-
-// GetULID returns underlying instance of ULID
-func (u *ULID) GetULID() interface{} {
- return u.ULID
-}
-
-// MarshalText returns this instance into text
-func (u ULID) MarshalText() ([]byte, error) {
- return u.ULID.MarshalText()
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ULID) UnmarshalText(data []byte) error { // validation is performed later on
- return u.ULID.UnmarshalText(data)
-}
-
-// Scan reads a value from a database driver
-func (u *ULID) Scan(raw interface{}) error {
- ul, err := ULIDScanOverrideFunc(raw)
- if err == nil {
- *u = ul
- }
- return err
-}
-
-// Value converts a value to a database driver value
-func (u ULID) Value() (driver.Value, error) {
- return ULIDValueOverrideFunc(u)
-}
-
-func (u ULID) String() string {
- return u.ULID.String()
-}
-
-// MarshalJSON returns the ULID as JSON
-func (u ULID) MarshalJSON() ([]byte, error) {
- return json.Marshal(u.String())
-}
-
-// UnmarshalJSON sets the ULID from JSON
-func (u *ULID) UnmarshalJSON(data []byte) error {
- if string(data) == jsonNull {
- return nil
- }
- var ustr string
- if err := json.Unmarshal(data, &ustr); err != nil {
- return err
- }
- id, err := ulid.ParseStrict(ustr)
- if err != nil {
- return fmt.Errorf("couldn't parse JSON value as ULID: %w", err)
- }
- u.ULID = id
- return nil
-}
-
-// MarshalBSON document from this value
-func (u ULID) MarshalBSON() ([]byte, error) {
- return bson.Marshal(bson.M{"data": u.String()})
-}
-
-// UnmarshalBSON document into this value
-func (u *ULID) UnmarshalBSON(data []byte) error {
- var m bson.M
- if err := bson.Unmarshal(data, &m); err != nil {
- return err
- }
-
- if ud, ok := m["data"].(string); ok {
- id, err := ulid.ParseStrict(ud)
- if err != nil {
- return fmt.Errorf("couldn't parse bson bytes as ULID: %w: %w", err, ErrFormat)
- }
- u.ULID = id
- return nil
- }
- return fmt.Errorf("couldn't unmarshal bson bytes as ULID: %w", ErrFormat)
-}
-
-// DeepCopyInto copies the receiver and writes its value into out.
-func (u *ULID) DeepCopyInto(out *ULID) {
- *out = *u
-}
-
-// DeepCopy copies the receiver into a new ULID.
-func (u *ULID) DeepCopy() *ULID {
- if u == nil {
- return nil
- }
- out := new(ULID)
- u.DeepCopyInto(out)
- return out
-}
-
-// GobEncode implements the gob.GobEncoder interface.
-func (u ULID) GobEncode() ([]byte, error) {
- return u.ULID.MarshalBinary()
-}
-
-// GobDecode implements the gob.GobDecoder interface.
-func (u *ULID) GobDecode(data []byte) error {
- return u.ULID.UnmarshalBinary(data)
-}
-
-// MarshalBinary implements the encoding.BinaryMarshaler interface.
-func (u ULID) MarshalBinary() ([]byte, error) {
- return u.ULID.MarshalBinary()
-}
-
-// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
-func (u *ULID) UnmarshalBinary(data []byte) error {
- return u.ULID.UnmarshalBinary(data)
-}
-
-// Equal checks if two ULID instances are equal by their underlying type
-func (u ULID) Equal(other ULID) bool {
- return u.ULID == other.ULID
-}
diff --git a/vendor/github.com/go-openapi/swag/.editorconfig b/vendor/github.com/go-openapi/swag/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/swag/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/swag/.gitattributes b/vendor/github.com/go-openapi/swag/.gitattributes
deleted file mode 100644
index 49ad52766a..0000000000
--- a/vendor/github.com/go-openapi/swag/.gitattributes
+++ /dev/null
@@ -1,2 +0,0 @@
-# gofmt always uses LF, whereas Git uses CRLF on Windows.
-*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/swag/.gitignore b/vendor/github.com/go-openapi/swag/.gitignore
deleted file mode 100644
index c4b1b64f04..0000000000
--- a/vendor/github.com/go-openapi/swag/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-secrets.yml
-vendor
-Godeps
-.idea
-*.out
diff --git a/vendor/github.com/go-openapi/swag/.golangci.yml b/vendor/github.com/go-openapi/swag/.golangci.yml
deleted file mode 100644
index d2fafb8a2b..0000000000
--- a/vendor/github.com/go-openapi/swag/.golangci.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-linters-settings:
- gocyclo:
- min-complexity: 45
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - recvcheck
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- #- deadcode
- #- interfacer
- #- scopelint
- #- varcheck
- #- structcheck
- #- golint
- #- nosnakecase
- #- maligned
- #- goerr113
- #- ifshort
- #- gomnd
- #- exhaustivestruct
diff --git a/vendor/github.com/go-openapi/swag/BENCHMARK.md b/vendor/github.com/go-openapi/swag/BENCHMARK.md
deleted file mode 100644
index e7f28ed6b7..0000000000
--- a/vendor/github.com/go-openapi/swag/BENCHMARK.md
+++ /dev/null
@@ -1,52 +0,0 @@
-# Benchmarks
-
-## Name mangling utilities
-
-```bash
-go test -bench XXX -run XXX -benchtime 30s
-```
-
-### Benchmarks at b3e7a5386f996177e4808f11acb2aa93a0f660df
-
-```
-goos: linux
-goarch: amd64
-pkg: github.com/go-openapi/swag
-cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz
-BenchmarkToXXXName/ToGoName-4 862623 44101 ns/op 10450 B/op 732 allocs/op
-BenchmarkToXXXName/ToVarName-4 853656 40728 ns/op 10468 B/op 734 allocs/op
-BenchmarkToXXXName/ToFileName-4 1268312 27813 ns/op 9785 B/op 617 allocs/op
-BenchmarkToXXXName/ToCommandName-4 1276322 27903 ns/op 9785 B/op 617 allocs/op
-BenchmarkToXXXName/ToHumanNameLower-4 895334 40354 ns/op 10472 B/op 731 allocs/op
-BenchmarkToXXXName/ToHumanNameTitle-4 882441 40678 ns/op 10566 B/op 749 allocs/op
-```
-
-### Benchmarks after PR #79
-
-~ x10 performance improvement and ~ /100 memory allocations.
-
-```
-goos: linux
-goarch: amd64
-pkg: github.com/go-openapi/swag
-cpu: Intel(R) Core(TM) i5-6200U CPU @ 2.30GHz
-BenchmarkToXXXName/ToGoName-4 9595830 3991 ns/op 42 B/op 5 allocs/op
-BenchmarkToXXXName/ToVarName-4 9194276 3984 ns/op 62 B/op 7 allocs/op
-BenchmarkToXXXName/ToFileName-4 17002711 2123 ns/op 147 B/op 7 allocs/op
-BenchmarkToXXXName/ToCommandName-4 16772926 2111 ns/op 147 B/op 7 allocs/op
-BenchmarkToXXXName/ToHumanNameLower-4 9788331 3749 ns/op 92 B/op 6 allocs/op
-BenchmarkToXXXName/ToHumanNameTitle-4 9188260 3941 ns/op 104 B/op 6 allocs/op
-```
-
-```
-goos: linux
-goarch: amd64
-pkg: github.com/go-openapi/swag
-cpu: AMD Ryzen 7 5800X 8-Core Processor
-BenchmarkToXXXName/ToGoName-16 18527378 1972 ns/op 42 B/op 5 allocs/op
-BenchmarkToXXXName/ToVarName-16 15552692 2093 ns/op 62 B/op 7 allocs/op
-BenchmarkToXXXName/ToFileName-16 32161176 1117 ns/op 147 B/op 7 allocs/op
-BenchmarkToXXXName/ToCommandName-16 32256634 1137 ns/op 147 B/op 7 allocs/op
-BenchmarkToXXXName/ToHumanNameLower-16 18599661 1946 ns/op 92 B/op 6 allocs/op
-BenchmarkToXXXName/ToHumanNameTitle-16 17581353 2054 ns/op 105 B/op 6 allocs/op
-```
diff --git a/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/swag/LICENSE b/vendor/github.com/go-openapi/swag/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/swag/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/swag/README.md b/vendor/github.com/go-openapi/swag/README.md
deleted file mode 100644
index a729222998..0000000000
--- a/vendor/github.com/go-openapi/swag/README.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# Swag [](https://github.com/go-openapi/swag/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/swag)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/swag)
-[](https://goreportcard.com/report/github.com/go-openapi/swag)
-
-Contains a bunch of helper functions for go-openapi and go-swagger projects.
-
-You may also use it standalone for your projects.
-
-* convert between value and pointers for builtin types
-* convert from string to builtin types (wraps strconv)
-* fast json concatenation
-* search in path
-* load from file or http
-* name mangling
-
-
-This repo has only few dependencies outside of the standard library:
-
-* YAML utilities depend on `gopkg.in/yaml.v3`
-* `github.com/mailru/easyjson v0.7.7`
diff --git a/vendor/github.com/go-openapi/swag/convert.go b/vendor/github.com/go-openapi/swag/convert.go
deleted file mode 100644
index fc085aeb8e..0000000000
--- a/vendor/github.com/go-openapi/swag/convert.go
+++ /dev/null
@@ -1,208 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "math"
- "strconv"
- "strings"
-)
-
-// same as ECMA Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER
-const (
- maxJSONFloat = float64(1<<53 - 1) // 9007199254740991.0 2^53 - 1
- minJSONFloat = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1
- epsilon float64 = 1e-9
-)
-
-// IsFloat64AJSONInteger allow for integers [-2^53, 2^53-1] inclusive
-func IsFloat64AJSONInteger(f float64) bool {
- if math.IsNaN(f) || math.IsInf(f, 0) || f < minJSONFloat || f > maxJSONFloat {
- return false
- }
- fa := math.Abs(f)
- g := float64(uint64(f))
- ga := math.Abs(g)
-
- diff := math.Abs(f - g)
-
- // more info: https://floating-point-gui.de/errors/comparison/#look-out-for-edge-cases
- switch {
- case f == g: // best case
- return true
- case f == float64(int64(f)) || f == float64(uint64(f)): // optimistic case
- return true
- case f == 0 || g == 0 || diff < math.SmallestNonzeroFloat64: // very close to 0 values
- return diff < (epsilon * math.SmallestNonzeroFloat64)
- }
- // check the relative error
- return diff/math.Min(fa+ga, math.MaxFloat64) < epsilon
-}
-
-var evaluatesAsTrue map[string]struct{}
-
-func init() {
- evaluatesAsTrue = map[string]struct{}{
- "true": {},
- "1": {},
- "yes": {},
- "ok": {},
- "y": {},
- "on": {},
- "selected": {},
- "checked": {},
- "t": {},
- "enabled": {},
- }
-}
-
-// ConvertBool turn a string into a boolean
-func ConvertBool(str string) (bool, error) {
- _, ok := evaluatesAsTrue[strings.ToLower(str)]
- return ok, nil
-}
-
-// ConvertFloat32 turn a string into a float32
-func ConvertFloat32(str string) (float32, error) {
- f, err := strconv.ParseFloat(str, 32)
- if err != nil {
- return 0, err
- }
- return float32(f), nil
-}
-
-// ConvertFloat64 turn a string into a float64
-func ConvertFloat64(str string) (float64, error) {
- return strconv.ParseFloat(str, 64)
-}
-
-// ConvertInt8 turn a string into an int8
-func ConvertInt8(str string) (int8, error) {
- i, err := strconv.ParseInt(str, 10, 8)
- if err != nil {
- return 0, err
- }
- return int8(i), nil
-}
-
-// ConvertInt16 turn a string into an int16
-func ConvertInt16(str string) (int16, error) {
- i, err := strconv.ParseInt(str, 10, 16)
- if err != nil {
- return 0, err
- }
- return int16(i), nil
-}
-
-// ConvertInt32 turn a string into an int32
-func ConvertInt32(str string) (int32, error) {
- i, err := strconv.ParseInt(str, 10, 32)
- if err != nil {
- return 0, err
- }
- return int32(i), nil
-}
-
-// ConvertInt64 turn a string into an int64
-func ConvertInt64(str string) (int64, error) {
- return strconv.ParseInt(str, 10, 64)
-}
-
-// ConvertUint8 turn a string into an uint8
-func ConvertUint8(str string) (uint8, error) {
- i, err := strconv.ParseUint(str, 10, 8)
- if err != nil {
- return 0, err
- }
- return uint8(i), nil
-}
-
-// ConvertUint16 turn a string into an uint16
-func ConvertUint16(str string) (uint16, error) {
- i, err := strconv.ParseUint(str, 10, 16)
- if err != nil {
- return 0, err
- }
- return uint16(i), nil
-}
-
-// ConvertUint32 turn a string into an uint32
-func ConvertUint32(str string) (uint32, error) {
- i, err := strconv.ParseUint(str, 10, 32)
- if err != nil {
- return 0, err
- }
- return uint32(i), nil
-}
-
-// ConvertUint64 turn a string into an uint64
-func ConvertUint64(str string) (uint64, error) {
- return strconv.ParseUint(str, 10, 64)
-}
-
-// FormatBool turns a boolean into a string
-func FormatBool(value bool) string {
- return strconv.FormatBool(value)
-}
-
-// FormatFloat32 turns a float32 into a string
-func FormatFloat32(value float32) string {
- return strconv.FormatFloat(float64(value), 'f', -1, 32)
-}
-
-// FormatFloat64 turns a float64 into a string
-func FormatFloat64(value float64) string {
- return strconv.FormatFloat(value, 'f', -1, 64)
-}
-
-// FormatInt8 turns an int8 into a string
-func FormatInt8(value int8) string {
- return strconv.FormatInt(int64(value), 10)
-}
-
-// FormatInt16 turns an int16 into a string
-func FormatInt16(value int16) string {
- return strconv.FormatInt(int64(value), 10)
-}
-
-// FormatInt32 turns an int32 into a string
-func FormatInt32(value int32) string {
- return strconv.Itoa(int(value))
-}
-
-// FormatInt64 turns an int64 into a string
-func FormatInt64(value int64) string {
- return strconv.FormatInt(value, 10)
-}
-
-// FormatUint8 turns an uint8 into a string
-func FormatUint8(value uint8) string {
- return strconv.FormatUint(uint64(value), 10)
-}
-
-// FormatUint16 turns an uint16 into a string
-func FormatUint16(value uint16) string {
- return strconv.FormatUint(uint64(value), 10)
-}
-
-// FormatUint32 turns an uint32 into a string
-func FormatUint32(value uint32) string {
- return strconv.FormatUint(uint64(value), 10)
-}
-
-// FormatUint64 turns an uint64 into a string
-func FormatUint64(value uint64) string {
- return strconv.FormatUint(value, 10)
-}
diff --git a/vendor/github.com/go-openapi/swag/convert_types.go b/vendor/github.com/go-openapi/swag/convert_types.go
deleted file mode 100644
index c49cc473a8..0000000000
--- a/vendor/github.com/go-openapi/swag/convert_types.go
+++ /dev/null
@@ -1,730 +0,0 @@
-package swag
-
-import "time"
-
-// This file was taken from the aws go sdk
-
-// String returns a pointer to of the string value passed in.
-func String(v string) *string {
- return &v
-}
-
-// StringValue returns the value of the string pointer passed in or
-// "" if the pointer is nil.
-func StringValue(v *string) string {
- if v != nil {
- return *v
- }
- return ""
-}
-
-// StringSlice converts a slice of string values into a slice of
-// string pointers
-func StringSlice(src []string) []*string {
- dst := make([]*string, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// StringValueSlice converts a slice of string pointers into a slice of
-// string values
-func StringValueSlice(src []*string) []string {
- dst := make([]string, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// StringMap converts a string map of string values into a string
-// map of string pointers
-func StringMap(src map[string]string) map[string]*string {
- dst := make(map[string]*string)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// StringValueMap converts a string map of string pointers into a string
-// map of string values
-func StringValueMap(src map[string]*string) map[string]string {
- dst := make(map[string]string)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Bool returns a pointer to of the bool value passed in.
-func Bool(v bool) *bool {
- return &v
-}
-
-// BoolValue returns the value of the bool pointer passed in or
-// false if the pointer is nil.
-func BoolValue(v *bool) bool {
- if v != nil {
- return *v
- }
- return false
-}
-
-// BoolSlice converts a slice of bool values into a slice of
-// bool pointers
-func BoolSlice(src []bool) []*bool {
- dst := make([]*bool, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// BoolValueSlice converts a slice of bool pointers into a slice of
-// bool values
-func BoolValueSlice(src []*bool) []bool {
- dst := make([]bool, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// BoolMap converts a string map of bool values into a string
-// map of bool pointers
-func BoolMap(src map[string]bool) map[string]*bool {
- dst := make(map[string]*bool)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// BoolValueMap converts a string map of bool pointers into a string
-// map of bool values
-func BoolValueMap(src map[string]*bool) map[string]bool {
- dst := make(map[string]bool)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Int returns a pointer to of the int value passed in.
-func Int(v int) *int {
- return &v
-}
-
-// IntValue returns the value of the int pointer passed in or
-// 0 if the pointer is nil.
-func IntValue(v *int) int {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// IntSlice converts a slice of int values into a slice of
-// int pointers
-func IntSlice(src []int) []*int {
- dst := make([]*int, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// IntValueSlice converts a slice of int pointers into a slice of
-// int values
-func IntValueSlice(src []*int) []int {
- dst := make([]int, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// IntMap converts a string map of int values into a string
-// map of int pointers
-func IntMap(src map[string]int) map[string]*int {
- dst := make(map[string]*int)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// IntValueMap converts a string map of int pointers into a string
-// map of int values
-func IntValueMap(src map[string]*int) map[string]int {
- dst := make(map[string]int)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Int32 returns a pointer to of the int32 value passed in.
-func Int32(v int32) *int32 {
- return &v
-}
-
-// Int32Value returns the value of the int32 pointer passed in or
-// 0 if the pointer is nil.
-func Int32Value(v *int32) int32 {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// Int32Slice converts a slice of int32 values into a slice of
-// int32 pointers
-func Int32Slice(src []int32) []*int32 {
- dst := make([]*int32, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// Int32ValueSlice converts a slice of int32 pointers into a slice of
-// int32 values
-func Int32ValueSlice(src []*int32) []int32 {
- dst := make([]int32, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// Int32Map converts a string map of int32 values into a string
-// map of int32 pointers
-func Int32Map(src map[string]int32) map[string]*int32 {
- dst := make(map[string]*int32)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// Int32ValueMap converts a string map of int32 pointers into a string
-// map of int32 values
-func Int32ValueMap(src map[string]*int32) map[string]int32 {
- dst := make(map[string]int32)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Int64 returns a pointer to of the int64 value passed in.
-func Int64(v int64) *int64 {
- return &v
-}
-
-// Int64Value returns the value of the int64 pointer passed in or
-// 0 if the pointer is nil.
-func Int64Value(v *int64) int64 {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// Int64Slice converts a slice of int64 values into a slice of
-// int64 pointers
-func Int64Slice(src []int64) []*int64 {
- dst := make([]*int64, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// Int64ValueSlice converts a slice of int64 pointers into a slice of
-// int64 values
-func Int64ValueSlice(src []*int64) []int64 {
- dst := make([]int64, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// Int64Map converts a string map of int64 values into a string
-// map of int64 pointers
-func Int64Map(src map[string]int64) map[string]*int64 {
- dst := make(map[string]*int64)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// Int64ValueMap converts a string map of int64 pointers into a string
-// map of int64 values
-func Int64ValueMap(src map[string]*int64) map[string]int64 {
- dst := make(map[string]int64)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Uint16 returns a pointer to of the uint16 value passed in.
-func Uint16(v uint16) *uint16 {
- return &v
-}
-
-// Uint16Value returns the value of the uint16 pointer passed in or
-// 0 if the pointer is nil.
-func Uint16Value(v *uint16) uint16 {
- if v != nil {
- return *v
- }
-
- return 0
-}
-
-// Uint16Slice converts a slice of uint16 values into a slice of
-// uint16 pointers
-func Uint16Slice(src []uint16) []*uint16 {
- dst := make([]*uint16, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
-
- return dst
-}
-
-// Uint16ValueSlice converts a slice of uint16 pointers into a slice of
-// uint16 values
-func Uint16ValueSlice(src []*uint16) []uint16 {
- dst := make([]uint16, len(src))
-
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
-
- return dst
-}
-
-// Uint16Map converts a string map of uint16 values into a string
-// map of uint16 pointers
-func Uint16Map(src map[string]uint16) map[string]*uint16 {
- dst := make(map[string]*uint16)
-
- for k, val := range src {
- v := val
- dst[k] = &v
- }
-
- return dst
-}
-
-// Uint16ValueMap converts a string map of uint16 pointers into a string
-// map of uint16 values
-func Uint16ValueMap(src map[string]*uint16) map[string]uint16 {
- dst := make(map[string]uint16)
-
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
-
- return dst
-}
-
-// Uint returns a pointer to of the uint value passed in.
-func Uint(v uint) *uint {
- return &v
-}
-
-// UintValue returns the value of the uint pointer passed in or
-// 0 if the pointer is nil.
-func UintValue(v *uint) uint {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// UintSlice converts a slice of uint values into a slice of
-// uint pointers
-func UintSlice(src []uint) []*uint {
- dst := make([]*uint, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// UintValueSlice converts a slice of uint pointers into a slice of
-// uint values
-func UintValueSlice(src []*uint) []uint {
- dst := make([]uint, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// UintMap converts a string map of uint values into a string
-// map of uint pointers
-func UintMap(src map[string]uint) map[string]*uint {
- dst := make(map[string]*uint)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// UintValueMap converts a string map of uint pointers into a string
-// map of uint values
-func UintValueMap(src map[string]*uint) map[string]uint {
- dst := make(map[string]uint)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Uint32 returns a pointer to of the uint32 value passed in.
-func Uint32(v uint32) *uint32 {
- return &v
-}
-
-// Uint32Value returns the value of the uint32 pointer passed in or
-// 0 if the pointer is nil.
-func Uint32Value(v *uint32) uint32 {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// Uint32Slice converts a slice of uint32 values into a slice of
-// uint32 pointers
-func Uint32Slice(src []uint32) []*uint32 {
- dst := make([]*uint32, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// Uint32ValueSlice converts a slice of uint32 pointers into a slice of
-// uint32 values
-func Uint32ValueSlice(src []*uint32) []uint32 {
- dst := make([]uint32, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// Uint32Map converts a string map of uint32 values into a string
-// map of uint32 pointers
-func Uint32Map(src map[string]uint32) map[string]*uint32 {
- dst := make(map[string]*uint32)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// Uint32ValueMap converts a string map of uint32 pointers into a string
-// map of uint32 values
-func Uint32ValueMap(src map[string]*uint32) map[string]uint32 {
- dst := make(map[string]uint32)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Uint64 returns a pointer to of the uint64 value passed in.
-func Uint64(v uint64) *uint64 {
- return &v
-}
-
-// Uint64Value returns the value of the uint64 pointer passed in or
-// 0 if the pointer is nil.
-func Uint64Value(v *uint64) uint64 {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// Uint64Slice converts a slice of uint64 values into a slice of
-// uint64 pointers
-func Uint64Slice(src []uint64) []*uint64 {
- dst := make([]*uint64, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// Uint64ValueSlice converts a slice of uint64 pointers into a slice of
-// uint64 values
-func Uint64ValueSlice(src []*uint64) []uint64 {
- dst := make([]uint64, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// Uint64Map converts a string map of uint64 values into a string
-// map of uint64 pointers
-func Uint64Map(src map[string]uint64) map[string]*uint64 {
- dst := make(map[string]*uint64)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// Uint64ValueMap converts a string map of uint64 pointers into a string
-// map of uint64 values
-func Uint64ValueMap(src map[string]*uint64) map[string]uint64 {
- dst := make(map[string]uint64)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Float32 returns a pointer to of the float32 value passed in.
-func Float32(v float32) *float32 {
- return &v
-}
-
-// Float32Value returns the value of the float32 pointer passed in or
-// 0 if the pointer is nil.
-func Float32Value(v *float32) float32 {
- if v != nil {
- return *v
- }
-
- return 0
-}
-
-// Float32Slice converts a slice of float32 values into a slice of
-// float32 pointers
-func Float32Slice(src []float32) []*float32 {
- dst := make([]*float32, len(src))
-
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
-
- return dst
-}
-
-// Float32ValueSlice converts a slice of float32 pointers into a slice of
-// float32 values
-func Float32ValueSlice(src []*float32) []float32 {
- dst := make([]float32, len(src))
-
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
-
- return dst
-}
-
-// Float32Map converts a string map of float32 values into a string
-// map of float32 pointers
-func Float32Map(src map[string]float32) map[string]*float32 {
- dst := make(map[string]*float32)
-
- for k, val := range src {
- v := val
- dst[k] = &v
- }
-
- return dst
-}
-
-// Float32ValueMap converts a string map of float32 pointers into a string
-// map of float32 values
-func Float32ValueMap(src map[string]*float32) map[string]float32 {
- dst := make(map[string]float32)
-
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
-
- return dst
-}
-
-// Float64 returns a pointer to of the float64 value passed in.
-func Float64(v float64) *float64 {
- return &v
-}
-
-// Float64Value returns the value of the float64 pointer passed in or
-// 0 if the pointer is nil.
-func Float64Value(v *float64) float64 {
- if v != nil {
- return *v
- }
- return 0
-}
-
-// Float64Slice converts a slice of float64 values into a slice of
-// float64 pointers
-func Float64Slice(src []float64) []*float64 {
- dst := make([]*float64, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// Float64ValueSlice converts a slice of float64 pointers into a slice of
-// float64 values
-func Float64ValueSlice(src []*float64) []float64 {
- dst := make([]float64, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// Float64Map converts a string map of float64 values into a string
-// map of float64 pointers
-func Float64Map(src map[string]float64) map[string]*float64 {
- dst := make(map[string]*float64)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// Float64ValueMap converts a string map of float64 pointers into a string
-// map of float64 values
-func Float64ValueMap(src map[string]*float64) map[string]float64 {
- dst := make(map[string]float64)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
-
-// Time returns a pointer to of the time.Time value passed in.
-func Time(v time.Time) *time.Time {
- return &v
-}
-
-// TimeValue returns the value of the time.Time pointer passed in or
-// time.Time{} if the pointer is nil.
-func TimeValue(v *time.Time) time.Time {
- if v != nil {
- return *v
- }
- return time.Time{}
-}
-
-// TimeSlice converts a slice of time.Time values into a slice of
-// time.Time pointers
-func TimeSlice(src []time.Time) []*time.Time {
- dst := make([]*time.Time, len(src))
- for i := 0; i < len(src); i++ {
- dst[i] = &(src[i])
- }
- return dst
-}
-
-// TimeValueSlice converts a slice of time.Time pointers into a slice of
-// time.Time values
-func TimeValueSlice(src []*time.Time) []time.Time {
- dst := make([]time.Time, len(src))
- for i := 0; i < len(src); i++ {
- if src[i] != nil {
- dst[i] = *(src[i])
- }
- }
- return dst
-}
-
-// TimeMap converts a string map of time.Time values into a string
-// map of time.Time pointers
-func TimeMap(src map[string]time.Time) map[string]*time.Time {
- dst := make(map[string]*time.Time)
- for k, val := range src {
- v := val
- dst[k] = &v
- }
- return dst
-}
-
-// TimeValueMap converts a string map of time.Time pointers into a string
-// map of time.Time values
-func TimeValueMap(src map[string]*time.Time) map[string]time.Time {
- dst := make(map[string]time.Time)
- for k, val := range src {
- if val != nil {
- dst[k] = *val
- }
- }
- return dst
-}
diff --git a/vendor/github.com/go-openapi/swag/doc.go b/vendor/github.com/go-openapi/swag/doc.go
deleted file mode 100644
index 55094cb74c..0000000000
--- a/vendor/github.com/go-openapi/swag/doc.go
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/*
-Package swag contains a bunch of helper functions for go-openapi and go-swagger projects.
-
-You may also use it standalone for your projects.
-
- - convert between value and pointers for builtin types
- - convert from string to builtin types (wraps strconv)
- - fast json concatenation
- - search in path
- - load from file or http
- - name mangling
-
-This repo has only few dependencies outside of the standard library:
-
- - YAML utilities depend on gopkg.in/yaml.v2
-*/
-package swag
diff --git a/vendor/github.com/go-openapi/swag/errors.go b/vendor/github.com/go-openapi/swag/errors.go
deleted file mode 100644
index 6c67fbf92e..0000000000
--- a/vendor/github.com/go-openapi/swag/errors.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package swag
-
-type swagError string
-
-const (
- // ErrYAML is an error raised by YAML utilities
- ErrYAML swagError = "yaml error"
-
- // ErrLoader is an error raised by the file loader utility
- ErrLoader swagError = "loader error"
-)
-
-func (e swagError) Error() string {
- return string(e)
-}
diff --git a/vendor/github.com/go-openapi/swag/file.go b/vendor/github.com/go-openapi/swag/file.go
deleted file mode 100644
index 16accc55f8..0000000000
--- a/vendor/github.com/go-openapi/swag/file.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import "mime/multipart"
-
-// File represents an uploaded file.
-type File struct {
- Data multipart.File
- Header *multipart.FileHeader
-}
-
-// Read bytes from the file
-func (f *File) Read(p []byte) (n int, err error) {
- return f.Data.Read(p)
-}
-
-// Close the file
-func (f *File) Close() error {
- return f.Data.Close()
-}
diff --git a/vendor/github.com/go-openapi/swag/initialism_index.go b/vendor/github.com/go-openapi/swag/initialism_index.go
deleted file mode 100644
index 20a359bb60..0000000000
--- a/vendor/github.com/go-openapi/swag/initialism_index.go
+++ /dev/null
@@ -1,202 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "sort"
- "strings"
- "sync"
-)
-
-var (
- // commonInitialisms are common acronyms that are kept as whole uppercased words.
- commonInitialisms *indexOfInitialisms
-
- // initialisms is a slice of sorted initialisms
- initialisms []string
-
- // a copy of initialisms pre-baked as []rune
- initialismsRunes [][]rune
- initialismsUpperCased [][]rune
-
- isInitialism func(string) bool
-
- maxAllocMatches int
-)
-
-func init() {
- // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769
- configuredInitialisms := map[string]bool{
- "ACL": true,
- "API": true,
- "ASCII": true,
- "CPU": true,
- "CSS": true,
- "DNS": true,
- "EOF": true,
- "GUID": true,
- "HTML": true,
- "HTTPS": true,
- "HTTP": true,
- "ID": true,
- "IP": true,
- "IPv4": true,
- "IPv6": true,
- "JSON": true,
- "LHS": true,
- "OAI": true,
- "QPS": true,
- "RAM": true,
- "RHS": true,
- "RPC": true,
- "SLA": true,
- "SMTP": true,
- "SQL": true,
- "SSH": true,
- "TCP": true,
- "TLS": true,
- "TTL": true,
- "UDP": true,
- "UI": true,
- "UID": true,
- "UUID": true,
- "URI": true,
- "URL": true,
- "UTF8": true,
- "VM": true,
- "XML": true,
- "XMPP": true,
- "XSRF": true,
- "XSS": true,
- }
-
- // a thread-safe index of initialisms
- commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms)
- initialisms = commonInitialisms.sorted()
- initialismsRunes = asRunes(initialisms)
- initialismsUpperCased = asUpperCased(initialisms)
- maxAllocMatches = maxAllocHeuristic(initialismsRunes)
-
- // a test function
- isInitialism = commonInitialisms.isInitialism
-}
-
-func asRunes(in []string) [][]rune {
- out := make([][]rune, len(in))
- for i, initialism := range in {
- out[i] = []rune(initialism)
- }
-
- return out
-}
-
-func asUpperCased(in []string) [][]rune {
- out := make([][]rune, len(in))
-
- for i, initialism := range in {
- out[i] = []rune(upper(trim(initialism)))
- }
-
- return out
-}
-
-func maxAllocHeuristic(in [][]rune) int {
- heuristic := make(map[rune]int)
- for _, initialism := range in {
- heuristic[initialism[0]]++
- }
-
- var maxAlloc int
- for _, val := range heuristic {
- if val > maxAlloc {
- maxAlloc = val
- }
- }
-
- return maxAlloc
-}
-
-// AddInitialisms add additional initialisms
-func AddInitialisms(words ...string) {
- for _, word := range words {
- // commonInitialisms[upper(word)] = true
- commonInitialisms.add(upper(word))
- }
- // sort again
- initialisms = commonInitialisms.sorted()
- initialismsRunes = asRunes(initialisms)
- initialismsUpperCased = asUpperCased(initialisms)
-}
-
-// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
-// Since go1.9, this may be implemented with sync.Map.
-type indexOfInitialisms struct {
- sortMutex *sync.Mutex
- index *sync.Map
-}
-
-func newIndexOfInitialisms() *indexOfInitialisms {
- return &indexOfInitialisms{
- sortMutex: new(sync.Mutex),
- index: new(sync.Map),
- }
-}
-
-func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
- m.sortMutex.Lock()
- defer m.sortMutex.Unlock()
- for k, v := range initial {
- m.index.Store(k, v)
- }
- return m
-}
-
-func (m *indexOfInitialisms) isInitialism(key string) bool {
- _, ok := m.index.Load(key)
- return ok
-}
-
-func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
- m.index.Store(key, true)
- return m
-}
-
-func (m *indexOfInitialisms) sorted() (result []string) {
- m.sortMutex.Lock()
- defer m.sortMutex.Unlock()
- m.index.Range(func(key, _ interface{}) bool {
- k := key.(string)
- result = append(result, k)
- return true
- })
- sort.Sort(sort.Reverse(byInitialism(result)))
- return
-}
-
-type byInitialism []string
-
-func (s byInitialism) Len() int {
- return len(s)
-}
-func (s byInitialism) Swap(i, j int) {
- s[i], s[j] = s[j], s[i]
-}
-func (s byInitialism) Less(i, j int) bool {
- if len(s[i]) != len(s[j]) {
- return len(s[i]) < len(s[j])
- }
-
- return strings.Compare(s[i], s[j]) > 0
-}
diff --git a/vendor/github.com/go-openapi/swag/json.go b/vendor/github.com/go-openapi/swag/json.go
deleted file mode 100644
index c7caa9908f..0000000000
--- a/vendor/github.com/go-openapi/swag/json.go
+++ /dev/null
@@ -1,313 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "bytes"
- "encoding/json"
- "log"
- "reflect"
- "strings"
- "sync"
-
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
-)
-
-// nullJSON represents a JSON object with null type
-var nullJSON = []byte("null")
-
-// DefaultJSONNameProvider the default cache for types
-var DefaultJSONNameProvider = NewNameProvider()
-
-const comma = byte(',')
-
-var closers map[byte]byte
-
-func init() {
- closers = map[byte]byte{
- '{': '}',
- '[': ']',
- }
-}
-
-type ejMarshaler interface {
- MarshalEasyJSON(w *jwriter.Writer)
-}
-
-type ejUnmarshaler interface {
- UnmarshalEasyJSON(w *jlexer.Lexer)
-}
-
-// WriteJSON writes json data, prefers finding an appropriate interface to short-circuit the marshaler
-// so it takes the fastest option available.
-func WriteJSON(data interface{}) ([]byte, error) {
- if d, ok := data.(ejMarshaler); ok {
- jw := new(jwriter.Writer)
- d.MarshalEasyJSON(jw)
- return jw.BuildBytes()
- }
- if d, ok := data.(json.Marshaler); ok {
- return d.MarshalJSON()
- }
- return json.Marshal(data)
-}
-
-// ReadJSON reads json data, prefers finding an appropriate interface to short-circuit the unmarshaler
-// so it takes the fastest option available
-func ReadJSON(data []byte, value interface{}) error {
- trimmedData := bytes.Trim(data, "\x00")
- if d, ok := value.(ejUnmarshaler); ok {
- jl := &jlexer.Lexer{Data: trimmedData}
- d.UnmarshalEasyJSON(jl)
- return jl.Error()
- }
- if d, ok := value.(json.Unmarshaler); ok {
- return d.UnmarshalJSON(trimmedData)
- }
- return json.Unmarshal(trimmedData, value)
-}
-
-// DynamicJSONToStruct converts an untyped json structure into a struct
-func DynamicJSONToStruct(data interface{}, target interface{}) error {
- // TODO: convert straight to a json typed map (mergo + iterate?)
- b, err := WriteJSON(data)
- if err != nil {
- return err
- }
- return ReadJSON(b, target)
-}
-
-// ConcatJSON concatenates multiple json objects efficiently
-func ConcatJSON(blobs ...[]byte) []byte {
- if len(blobs) == 0 {
- return nil
- }
-
- last := len(blobs) - 1
- for blobs[last] == nil || bytes.Equal(blobs[last], nullJSON) {
- // strips trailing null objects
- last--
- if last < 0 {
- // there was nothing but "null"s or nil...
- return nil
- }
- }
- if last == 0 {
- return blobs[0]
- }
-
- var opening, closing byte
- var idx, a int
- buf := bytes.NewBuffer(nil)
-
- for i, b := range blobs[:last+1] {
- if b == nil || bytes.Equal(b, nullJSON) {
- // a null object is in the list: skip it
- continue
- }
- if len(b) > 0 && opening == 0 { // is this an array or an object?
- opening, closing = b[0], closers[b[0]]
- }
-
- if opening != '{' && opening != '[' {
- continue // don't know how to concatenate non container objects
- }
-
- const minLengthIfNotEmpty = 3
- if len(b) < minLengthIfNotEmpty { // yep empty but also the last one, so closing this thing
- if i == last && a > 0 {
- if err := buf.WriteByte(closing); err != nil {
- log.Println(err)
- }
- }
- continue
- }
-
- idx = 0
- if a > 0 { // we need to join with a comma for everything beyond the first non-empty item
- if err := buf.WriteByte(comma); err != nil {
- log.Println(err)
- }
- idx = 1 // this is not the first or the last so we want to drop the leading bracket
- }
-
- if i != last { // not the last one, strip brackets
- if _, err := buf.Write(b[idx : len(b)-1]); err != nil {
- log.Println(err)
- }
- } else { // last one, strip only the leading bracket
- if _, err := buf.Write(b[idx:]); err != nil {
- log.Println(err)
- }
- }
- a++
- }
- // somehow it ended up being empty, so provide a default value
- if buf.Len() == 0 {
- if err := buf.WriteByte(opening); err != nil {
- log.Println(err)
- }
- if err := buf.WriteByte(closing); err != nil {
- log.Println(err)
- }
- }
- return buf.Bytes()
-}
-
-// ToDynamicJSON turns an object into a properly JSON typed structure
-func ToDynamicJSON(data interface{}) interface{} {
- // TODO: convert straight to a json typed map (mergo + iterate?)
- b, err := json.Marshal(data)
- if err != nil {
- log.Println(err)
- }
- var res interface{}
- if err := json.Unmarshal(b, &res); err != nil {
- log.Println(err)
- }
- return res
-}
-
-// FromDynamicJSON turns an object into a properly JSON typed structure
-func FromDynamicJSON(data, target interface{}) error {
- b, err := json.Marshal(data)
- if err != nil {
- log.Println(err)
- }
- return json.Unmarshal(b, target)
-}
-
-// NameProvider represents an object capable of translating from go property names
-// to json property names
-// This type is thread-safe.
-type NameProvider struct {
- lock *sync.Mutex
- index map[reflect.Type]nameIndex
-}
-
-type nameIndex struct {
- jsonNames map[string]string
- goNames map[string]string
-}
-
-// NewNameProvider creates a new name provider
-func NewNameProvider() *NameProvider {
- return &NameProvider{
- lock: &sync.Mutex{},
- index: make(map[reflect.Type]nameIndex),
- }
-}
-
-func buildnameIndex(tpe reflect.Type, idx, reverseIdx map[string]string) {
- for i := 0; i < tpe.NumField(); i++ {
- targetDes := tpe.Field(i)
-
- if targetDes.PkgPath != "" { // unexported
- continue
- }
-
- if targetDes.Anonymous { // walk embedded structures tree down first
- buildnameIndex(targetDes.Type, idx, reverseIdx)
- continue
- }
-
- if tag := targetDes.Tag.Get("json"); tag != "" {
-
- parts := strings.Split(tag, ",")
- if len(parts) == 0 {
- continue
- }
-
- nm := parts[0]
- if nm == "-" {
- continue
- }
- if nm == "" { // empty string means we want to use the Go name
- nm = targetDes.Name
- }
-
- idx[nm] = targetDes.Name
- reverseIdx[targetDes.Name] = nm
- }
- }
-}
-
-func newNameIndex(tpe reflect.Type) nameIndex {
- var idx = make(map[string]string, tpe.NumField())
- var reverseIdx = make(map[string]string, tpe.NumField())
-
- buildnameIndex(tpe, idx, reverseIdx)
- return nameIndex{jsonNames: idx, goNames: reverseIdx}
-}
-
-// GetJSONNames gets all the json property names for a type
-func (n *NameProvider) GetJSONNames(subject interface{}) []string {
- n.lock.Lock()
- defer n.lock.Unlock()
- tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
- names, ok := n.index[tpe]
- if !ok {
- names = n.makeNameIndex(tpe)
- }
-
- res := make([]string, 0, len(names.jsonNames))
- for k := range names.jsonNames {
- res = append(res, k)
- }
- return res
-}
-
-// GetJSONName gets the json name for a go property name
-func (n *NameProvider) GetJSONName(subject interface{}, name string) (string, bool) {
- tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
- return n.GetJSONNameForType(tpe, name)
-}
-
-// GetJSONNameForType gets the json name for a go property name on a given type
-func (n *NameProvider) GetJSONNameForType(tpe reflect.Type, name string) (string, bool) {
- n.lock.Lock()
- defer n.lock.Unlock()
- names, ok := n.index[tpe]
- if !ok {
- names = n.makeNameIndex(tpe)
- }
- nme, ok := names.goNames[name]
- return nme, ok
-}
-
-func (n *NameProvider) makeNameIndex(tpe reflect.Type) nameIndex {
- names := newNameIndex(tpe)
- n.index[tpe] = names
- return names
-}
-
-// GetGoName gets the go name for a json property name
-func (n *NameProvider) GetGoName(subject interface{}, name string) (string, bool) {
- tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
- return n.GetGoNameForType(tpe, name)
-}
-
-// GetGoNameForType gets the go name for a given type for a json property name
-func (n *NameProvider) GetGoNameForType(tpe reflect.Type, name string) (string, bool) {
- n.lock.Lock()
- defer n.lock.Unlock()
- names, ok := n.index[tpe]
- if !ok {
- names = n.makeNameIndex(tpe)
- }
- nme, ok := names.jsonNames[name]
- return nme, ok
-}
diff --git a/vendor/github.com/go-openapi/swag/loading.go b/vendor/github.com/go-openapi/swag/loading.go
deleted file mode 100644
index 658a24b789..0000000000
--- a/vendor/github.com/go-openapi/swag/loading.go
+++ /dev/null
@@ -1,176 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "fmt"
- "io"
- "log"
- "net/http"
- "net/url"
- "os"
- "path"
- "path/filepath"
- "runtime"
- "strings"
- "time"
-)
-
-// LoadHTTPTimeout the default timeout for load requests
-var LoadHTTPTimeout = 30 * time.Second
-
-// LoadHTTPBasicAuthUsername the username to use when load requests require basic auth
-var LoadHTTPBasicAuthUsername = ""
-
-// LoadHTTPBasicAuthPassword the password to use when load requests require basic auth
-var LoadHTTPBasicAuthPassword = ""
-
-// LoadHTTPCustomHeaders an optional collection of custom HTTP headers for load requests
-var LoadHTTPCustomHeaders = map[string]string{}
-
-// LoadFromFileOrHTTP loads the bytes from a file or a remote http server based on the path passed in
-func LoadFromFileOrHTTP(pth string) ([]byte, error) {
- return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(pth)
-}
-
-// LoadFromFileOrHTTPWithTimeout loads the bytes from a file or a remote http server based on the path passed in
-// timeout arg allows for per request overriding of the request timeout
-func LoadFromFileOrHTTPWithTimeout(pth string, timeout time.Duration) ([]byte, error) {
- return LoadStrategy(pth, os.ReadFile, loadHTTPBytes(timeout))(pth)
-}
-
-// LoadStrategy returns a loader function for a given path or URI.
-//
-// The load strategy returns the remote load for any path starting with `http`.
-// So this works for any URI with a scheme `http` or `https`.
-//
-// The fallback strategy is to call the local loader.
-//
-// The local loader takes a local file system path (absolute or relative) as argument,
-// or alternatively a `file://...` URI, **without host** (see also below for windows).
-//
-// There are a few liberalities, initially intended to be tolerant regarding the URI syntax,
-// especially on windows.
-//
-// Before the local loader is called, the given path is transformed:
-// - percent-encoded characters are unescaped
-// - simple paths (e.g. `./folder/file`) are passed as-is
-// - on windows, occurrences of `/` are replaced by `\`, so providing a relative path such a `folder/file` works too.
-//
-// For paths provided as URIs with the "file" scheme, please note that:
-// - `file://` is simply stripped.
-// This means that the host part of the URI is not parsed at all.
-// For example, `file:///folder/file" becomes "/folder/file`,
-// but `file://localhost/folder/file` becomes `localhost/folder/file` on unix systems.
-// Similarly, `file://./folder/file` yields `./folder/file`.
-// - on windows, `file://...` can take a host so as to specify an UNC share location.
-//
-// Reminder about windows-specifics:
-// - `file://host/folder/file` becomes an UNC path like `\\host\folder\file` (no port specification is supported)
-// - `file:///c:/folder/file` becomes `C:\folder\file`
-// - `file://c:/folder/file` is tolerated (without leading `/`) and becomes `c:\folder\file`
-func LoadStrategy(pth string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) {
- if strings.HasPrefix(pth, "http") {
- return remote
- }
-
- return func(p string) ([]byte, error) {
- upth, err := url.PathUnescape(p)
- if err != nil {
- return nil, err
- }
-
- if !strings.HasPrefix(p, `file://`) {
- // regular file path provided: just normalize slashes
- return local(filepath.FromSlash(upth))
- }
-
- if runtime.GOOS != "windows" {
- // crude processing: this leaves full URIs with a host with a (mostly) unexpected result
- upth = strings.TrimPrefix(upth, `file://`)
-
- return local(filepath.FromSlash(upth))
- }
-
- // windows-only pre-processing of file://... URIs
-
- // support for canonical file URIs on windows.
- u, err := url.Parse(filepath.ToSlash(upth))
- if err != nil {
- return nil, err
- }
-
- if u.Host != "" {
- // assume UNC name (volume share)
- // NOTE: UNC port not yet supported
-
- // when the "host" segment is a drive letter:
- // file://C:/folder/... => C:\folder
- upth = path.Clean(strings.Join([]string{u.Host, u.Path}, `/`))
- if !strings.HasSuffix(u.Host, ":") && u.Host[0] != '.' {
- // tolerance: if we have a leading dot, this can't be a host
- // file://host/share/folder\... ==> \\host\share\path\folder
- upth = "//" + upth
- }
- } else {
- // no host, let's figure out if this is a drive letter
- upth = strings.TrimPrefix(upth, `file://`)
- first, _, _ := strings.Cut(strings.TrimPrefix(u.Path, "/"), "/")
- if strings.HasSuffix(first, ":") {
- // drive letter in the first segment:
- // file:///c:/folder/... ==> strip the leading slash
- upth = strings.TrimPrefix(upth, `/`)
- }
- }
-
- return local(filepath.FromSlash(upth))
- }
-}
-
-func loadHTTPBytes(timeout time.Duration) func(path string) ([]byte, error) {
- return func(path string) ([]byte, error) {
- client := &http.Client{Timeout: timeout}
- req, err := http.NewRequest(http.MethodGet, path, nil) //nolint:noctx
- if err != nil {
- return nil, err
- }
-
- if LoadHTTPBasicAuthUsername != "" && LoadHTTPBasicAuthPassword != "" {
- req.SetBasicAuth(LoadHTTPBasicAuthUsername, LoadHTTPBasicAuthPassword)
- }
-
- for key, val := range LoadHTTPCustomHeaders {
- req.Header.Set(key, val)
- }
-
- resp, err := client.Do(req)
- defer func() {
- if resp != nil {
- if e := resp.Body.Close(); e != nil {
- log.Println(e)
- }
- }
- }()
- if err != nil {
- return nil, err
- }
-
- if resp.StatusCode != http.StatusOK {
- return nil, fmt.Errorf("could not access document at %q [%s]: %w", path, resp.Status, ErrLoader)
- }
-
- return io.ReadAll(resp.Body)
- }
-}
diff --git a/vendor/github.com/go-openapi/swag/name_lexem.go b/vendor/github.com/go-openapi/swag/name_lexem.go
deleted file mode 100644
index 8bb64ac32f..0000000000
--- a/vendor/github.com/go-openapi/swag/name_lexem.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "unicode"
- "unicode/utf8"
-)
-
-type (
- lexemKind uint8
-
- nameLexem struct {
- original string
- matchedInitialism string
- kind lexemKind
- }
-)
-
-const (
- lexemKindCasualName lexemKind = iota
- lexemKindInitialismName
-)
-
-func newInitialismNameLexem(original, matchedInitialism string) nameLexem {
- return nameLexem{
- kind: lexemKindInitialismName,
- original: original,
- matchedInitialism: matchedInitialism,
- }
-}
-
-func newCasualNameLexem(original string) nameLexem {
- return nameLexem{
- kind: lexemKindCasualName,
- original: original,
- }
-}
-
-func (l nameLexem) GetUnsafeGoName() string {
- if l.kind == lexemKindInitialismName {
- return l.matchedInitialism
- }
-
- var (
- first rune
- rest string
- )
-
- for i, orig := range l.original {
- if i == 0 {
- first = orig
- continue
- }
-
- if i > 0 {
- rest = l.original[i:]
- break
- }
- }
-
- if len(l.original) > 1 {
- b := poolOfBuffers.BorrowBuffer(utf8.UTFMax + len(rest))
- defer func() {
- poolOfBuffers.RedeemBuffer(b)
- }()
- b.WriteRune(unicode.ToUpper(first))
- b.WriteString(lower(rest))
- return b.String()
- }
-
- return l.original
-}
-
-func (l nameLexem) GetOriginal() string {
- return l.original
-}
-
-func (l nameLexem) IsInitialism() bool {
- return l.kind == lexemKindInitialismName
-}
diff --git a/vendor/github.com/go-openapi/swag/net.go b/vendor/github.com/go-openapi/swag/net.go
deleted file mode 100644
index 821235f84d..0000000000
--- a/vendor/github.com/go-openapi/swag/net.go
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "net"
- "strconv"
-)
-
-// SplitHostPort splits a network address into a host and a port.
-// The port is -1 when there is no port to be found
-func SplitHostPort(addr string) (host string, port int, err error) {
- h, p, err := net.SplitHostPort(addr)
- if err != nil {
- return "", -1, err
- }
- if p == "" {
- return "", -1, &net.AddrError{Err: "missing port in address", Addr: addr}
- }
-
- pi, err := strconv.Atoi(p)
- if err != nil {
- return "", -1, err
- }
- return h, pi, nil
-}
diff --git a/vendor/github.com/go-openapi/swag/path.go b/vendor/github.com/go-openapi/swag/path.go
deleted file mode 100644
index 941bd0176b..0000000000
--- a/vendor/github.com/go-openapi/swag/path.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "os"
- "path/filepath"
- "runtime"
- "strings"
-)
-
-const (
- // GOPATHKey represents the env key for gopath
- GOPATHKey = "GOPATH"
-)
-
-// FindInSearchPath finds a package in a provided lists of paths
-func FindInSearchPath(searchPath, pkg string) string {
- pathsList := filepath.SplitList(searchPath)
- for _, path := range pathsList {
- if evaluatedPath, err := filepath.EvalSymlinks(filepath.Join(path, "src", pkg)); err == nil {
- if _, err := os.Stat(evaluatedPath); err == nil {
- return evaluatedPath
- }
- }
- }
- return ""
-}
-
-// FindInGoSearchPath finds a package in the $GOPATH:$GOROOT
-func FindInGoSearchPath(pkg string) string {
- return FindInSearchPath(FullGoSearchPath(), pkg)
-}
-
-// FullGoSearchPath gets the search paths for finding packages
-func FullGoSearchPath() string {
- allPaths := os.Getenv(GOPATHKey)
- if allPaths == "" {
- allPaths = filepath.Join(os.Getenv("HOME"), "go")
- }
- if allPaths != "" {
- allPaths = strings.Join([]string{allPaths, runtime.GOROOT()}, ":")
- } else {
- allPaths = runtime.GOROOT()
- }
- return allPaths
-}
diff --git a/vendor/github.com/go-openapi/swag/split.go b/vendor/github.com/go-openapi/swag/split.go
deleted file mode 100644
index 274727a866..0000000000
--- a/vendor/github.com/go-openapi/swag/split.go
+++ /dev/null
@@ -1,508 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "bytes"
- "sync"
- "unicode"
- "unicode/utf8"
-)
-
-type (
- splitter struct {
- initialisms []string
- initialismsRunes [][]rune
- initialismsUpperCased [][]rune // initialisms cached in their trimmed, upper-cased version
- postSplitInitialismCheck bool
- }
-
- splitterOption func(*splitter)
-
- initialismMatch struct {
- body []rune
- start, end int
- complete bool
- }
- initialismMatches []initialismMatch
-)
-
-type (
- // memory pools of temporary objects.
- //
- // These are used to recycle temporarily allocated objects
- // and relieve the GC from undue pressure.
-
- matchesPool struct {
- *sync.Pool
- }
-
- buffersPool struct {
- *sync.Pool
- }
-
- lexemsPool struct {
- *sync.Pool
- }
-
- splittersPool struct {
- *sync.Pool
- }
-)
-
-var (
- // poolOfMatches holds temporary slices for recycling during the initialism match process
- poolOfMatches = matchesPool{
- Pool: &sync.Pool{
- New: func() any {
- s := make(initialismMatches, 0, maxAllocMatches)
-
- return &s
- },
- },
- }
-
- poolOfBuffers = buffersPool{
- Pool: &sync.Pool{
- New: func() any {
- return new(bytes.Buffer)
- },
- },
- }
-
- poolOfLexems = lexemsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := make([]nameLexem, 0, maxAllocMatches)
-
- return &s
- },
- },
- }
-
- poolOfSplitters = splittersPool{
- Pool: &sync.Pool{
- New: func() any {
- s := newSplitter()
-
- return &s
- },
- },
- }
-)
-
-// nameReplaceTable finds a word representation for special characters.
-func nameReplaceTable(r rune) (string, bool) {
- switch r {
- case '@':
- return "At ", true
- case '&':
- return "And ", true
- case '|':
- return "Pipe ", true
- case '$':
- return "Dollar ", true
- case '!':
- return "Bang ", true
- case '-':
- return "", true
- case '_':
- return "", true
- default:
- return "", false
- }
-}
-
-// split calls the splitter.
-//
-// Use newSplitter for more control and options
-func split(str string) []string {
- s := poolOfSplitters.BorrowSplitter()
- lexems := s.split(str)
- result := make([]string, 0, len(*lexems))
-
- for _, lexem := range *lexems {
- result = append(result, lexem.GetOriginal())
- }
- poolOfLexems.RedeemLexems(lexems)
- poolOfSplitters.RedeemSplitter(s)
-
- return result
-
-}
-
-func newSplitter(options ...splitterOption) splitter {
- s := splitter{
- postSplitInitialismCheck: false,
- initialisms: initialisms,
- initialismsRunes: initialismsRunes,
- initialismsUpperCased: initialismsUpperCased,
- }
-
- for _, option := range options {
- option(&s)
- }
-
- return s
-}
-
-// withPostSplitInitialismCheck allows to catch initialisms after main split process
-func withPostSplitInitialismCheck(s *splitter) {
- s.postSplitInitialismCheck = true
-}
-
-func (p matchesPool) BorrowMatches() *initialismMatches {
- s := p.Get().(*initialismMatches)
- *s = (*s)[:0] // reset slice, keep allocated capacity
-
- return s
-}
-
-func (p buffersPool) BorrowBuffer(size int) *bytes.Buffer {
- s := p.Get().(*bytes.Buffer)
- s.Reset()
-
- if s.Cap() < size {
- s.Grow(size)
- }
-
- return s
-}
-
-func (p lexemsPool) BorrowLexems() *[]nameLexem {
- s := p.Get().(*[]nameLexem)
- *s = (*s)[:0] // reset slice, keep allocated capacity
-
- return s
-}
-
-func (p splittersPool) BorrowSplitter(options ...splitterOption) *splitter {
- s := p.Get().(*splitter)
- s.postSplitInitialismCheck = false // reset options
- for _, apply := range options {
- apply(s)
- }
-
- return s
-}
-
-func (p matchesPool) RedeemMatches(s *initialismMatches) {
- p.Put(s)
-}
-
-func (p buffersPool) RedeemBuffer(s *bytes.Buffer) {
- p.Put(s)
-}
-
-func (p lexemsPool) RedeemLexems(s *[]nameLexem) {
- p.Put(s)
-}
-
-func (p splittersPool) RedeemSplitter(s *splitter) {
- p.Put(s)
-}
-
-func (m initialismMatch) isZero() bool {
- return m.start == 0 && m.end == 0
-}
-
-func (s splitter) split(name string) *[]nameLexem {
- nameRunes := []rune(name)
- matches := s.gatherInitialismMatches(nameRunes)
- if matches == nil {
- return poolOfLexems.BorrowLexems()
- }
-
- return s.mapMatchesToNameLexems(nameRunes, matches)
-}
-
-func (s splitter) gatherInitialismMatches(nameRunes []rune) *initialismMatches {
- var matches *initialismMatches
-
- for currentRunePosition, currentRune := range nameRunes {
- // recycle these allocations as we loop over runes
- // with such recycling, only 2 slices should be allocated per call
- // instead of o(n).
- newMatches := poolOfMatches.BorrowMatches()
-
- // check current initialism matches
- if matches != nil { // skip first iteration
- for _, match := range *matches {
- if keepCompleteMatch := match.complete; keepCompleteMatch {
- *newMatches = append(*newMatches, match)
- continue
- }
-
- // drop failed match
- currentMatchRune := match.body[currentRunePosition-match.start]
- if currentMatchRune != currentRune {
- continue
- }
-
- // try to complete ongoing match
- if currentRunePosition-match.start == len(match.body)-1 {
- // we are close; the next step is to check the symbol ahead
- // if it is a small letter, then it is not the end of match
- // but beginning of the next word
-
- if currentRunePosition < len(nameRunes)-1 {
- nextRune := nameRunes[currentRunePosition+1]
- if newWord := unicode.IsLower(nextRune); newWord {
- // oh ok, it was the start of a new word
- continue
- }
- }
-
- match.complete = true
- match.end = currentRunePosition
- }
-
- *newMatches = append(*newMatches, match)
- }
- }
-
- // check for new initialism matches
- for i := range s.initialisms {
- initialismRunes := s.initialismsRunes[i]
- if initialismRunes[0] == currentRune {
- *newMatches = append(*newMatches, initialismMatch{
- start: currentRunePosition,
- body: initialismRunes,
- complete: false,
- })
- }
- }
-
- if matches != nil {
- poolOfMatches.RedeemMatches(matches)
- }
- matches = newMatches
- }
-
- // up to the caller to redeem this last slice
- return matches
-}
-
-func (s splitter) mapMatchesToNameLexems(nameRunes []rune, matches *initialismMatches) *[]nameLexem {
- nameLexems := poolOfLexems.BorrowLexems()
-
- var lastAcceptedMatch initialismMatch
- for _, match := range *matches {
- if !match.complete {
- continue
- }
-
- if firstMatch := lastAcceptedMatch.isZero(); firstMatch {
- s.appendBrokenDownCasualString(nameLexems, nameRunes[:match.start])
- *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body)))
-
- lastAcceptedMatch = match
-
- continue
- }
-
- if overlappedMatch := match.start <= lastAcceptedMatch.end; overlappedMatch {
- continue
- }
-
- middle := nameRunes[lastAcceptedMatch.end+1 : match.start]
- s.appendBrokenDownCasualString(nameLexems, middle)
- *nameLexems = append(*nameLexems, s.breakInitialism(string(match.body)))
-
- lastAcceptedMatch = match
- }
-
- // we have not found any accepted matches
- if lastAcceptedMatch.isZero() {
- *nameLexems = (*nameLexems)[:0]
- s.appendBrokenDownCasualString(nameLexems, nameRunes)
- } else if lastAcceptedMatch.end+1 != len(nameRunes) {
- rest := nameRunes[lastAcceptedMatch.end+1:]
- s.appendBrokenDownCasualString(nameLexems, rest)
- }
-
- poolOfMatches.RedeemMatches(matches)
-
- return nameLexems
-}
-
-func (s splitter) breakInitialism(original string) nameLexem {
- return newInitialismNameLexem(original, original)
-}
-
-func (s splitter) appendBrokenDownCasualString(segments *[]nameLexem, str []rune) {
- currentSegment := poolOfBuffers.BorrowBuffer(len(str)) // unlike strings.Builder, bytes.Buffer initial storage can reused
- defer func() {
- poolOfBuffers.RedeemBuffer(currentSegment)
- }()
-
- addCasualNameLexem := func(original string) {
- *segments = append(*segments, newCasualNameLexem(original))
- }
-
- addInitialismNameLexem := func(original, match string) {
- *segments = append(*segments, newInitialismNameLexem(original, match))
- }
-
- var addNameLexem func(string)
- if s.postSplitInitialismCheck {
- addNameLexem = func(original string) {
- for i := range s.initialisms {
- if isEqualFoldIgnoreSpace(s.initialismsUpperCased[i], original) {
- addInitialismNameLexem(original, s.initialisms[i])
-
- return
- }
- }
-
- addCasualNameLexem(original)
- }
- } else {
- addNameLexem = addCasualNameLexem
- }
-
- for _, rn := range str {
- if replace, found := nameReplaceTable(rn); found {
- if currentSegment.Len() > 0 {
- addNameLexem(currentSegment.String())
- currentSegment.Reset()
- }
-
- if replace != "" {
- addNameLexem(replace)
- }
-
- continue
- }
-
- if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) {
- if currentSegment.Len() > 0 {
- addNameLexem(currentSegment.String())
- currentSegment.Reset()
- }
-
- continue
- }
-
- if unicode.IsUpper(rn) {
- if currentSegment.Len() > 0 {
- addNameLexem(currentSegment.String())
- }
- currentSegment.Reset()
- }
-
- currentSegment.WriteRune(rn)
- }
-
- if currentSegment.Len() > 0 {
- addNameLexem(currentSegment.String())
- }
-}
-
-// isEqualFoldIgnoreSpace is the same as strings.EqualFold, but
-// it ignores leading and trailing blank spaces in the compared
-// string.
-//
-// base is assumed to be composed of upper-cased runes, and be already
-// trimmed.
-//
-// This code is heavily inspired from strings.EqualFold.
-func isEqualFoldIgnoreSpace(base []rune, str string) bool {
- var i, baseIndex int
- // equivalent to b := []byte(str), but without data copy
- b := hackStringBytes(str)
-
- for i < len(b) {
- if c := b[i]; c < utf8.RuneSelf {
- // fast path for ASCII
- if c != ' ' && c != '\t' {
- break
- }
- i++
-
- continue
- }
-
- // unicode case
- r, size := utf8.DecodeRune(b[i:])
- if !unicode.IsSpace(r) {
- break
- }
- i += size
- }
-
- if i >= len(b) {
- return len(base) == 0
- }
-
- for _, baseRune := range base {
- if i >= len(b) {
- break
- }
-
- if c := b[i]; c < utf8.RuneSelf {
- // single byte rune case (ASCII)
- if baseRune >= utf8.RuneSelf {
- return false
- }
-
- baseChar := byte(baseRune)
- if c != baseChar &&
- !('a' <= c && c <= 'z' && c-'a'+'A' == baseChar) {
- return false
- }
-
- baseIndex++
- i++
-
- continue
- }
-
- // unicode case
- r, size := utf8.DecodeRune(b[i:])
- if unicode.ToUpper(r) != baseRune {
- return false
- }
- baseIndex++
- i += size
- }
-
- if baseIndex != len(base) {
- return false
- }
-
- // all passed: now we should only have blanks
- for i < len(b) {
- if c := b[i]; c < utf8.RuneSelf {
- // fast path for ASCII
- if c != ' ' && c != '\t' {
- return false
- }
- i++
-
- continue
- }
-
- // unicode case
- r, size := utf8.DecodeRune(b[i:])
- if !unicode.IsSpace(r) {
- return false
- }
-
- i += size
- }
-
- return true
-}
diff --git a/vendor/github.com/go-openapi/swag/string_bytes.go b/vendor/github.com/go-openapi/swag/string_bytes.go
deleted file mode 100644
index 90745d5ca9..0000000000
--- a/vendor/github.com/go-openapi/swag/string_bytes.go
+++ /dev/null
@@ -1,8 +0,0 @@
-package swag
-
-import "unsafe"
-
-// hackStringBytes returns the (unsafe) underlying bytes slice of a string.
-func hackStringBytes(str string) []byte {
- return unsafe.Slice(unsafe.StringData(str), len(str))
-}
diff --git a/vendor/github.com/go-openapi/swag/util.go b/vendor/github.com/go-openapi/swag/util.go
deleted file mode 100644
index 5051401c49..0000000000
--- a/vendor/github.com/go-openapi/swag/util.go
+++ /dev/null
@@ -1,364 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "reflect"
- "strings"
- "unicode"
- "unicode/utf8"
-)
-
-// GoNamePrefixFunc sets an optional rule to prefix go names
-// which do not start with a letter.
-//
-// The prefix function is assumed to return a string that starts with an upper case letter.
-//
-// e.g. to help convert "123" into "{prefix}123"
-//
-// The default is to prefix with "X"
-var GoNamePrefixFunc func(string) string
-
-func prefixFunc(name, in string) string {
- if GoNamePrefixFunc == nil {
- return "X" + in
- }
-
- return GoNamePrefixFunc(name) + in
-}
-
-const (
- // collectionFormatComma = "csv"
- collectionFormatSpace = "ssv"
- collectionFormatTab = "tsv"
- collectionFormatPipe = "pipes"
- collectionFormatMulti = "multi"
-)
-
-// JoinByFormat joins a string array by a known format (e.g. swagger's collectionFormat attribute):
-//
-// ssv: space separated value
-// tsv: tab separated value
-// pipes: pipe (|) separated value
-// csv: comma separated value (default)
-func JoinByFormat(data []string, format string) []string {
- if len(data) == 0 {
- return data
- }
- var sep string
- switch format {
- case collectionFormatSpace:
- sep = " "
- case collectionFormatTab:
- sep = "\t"
- case collectionFormatPipe:
- sep = "|"
- case collectionFormatMulti:
- return data
- default:
- sep = ","
- }
- return []string{strings.Join(data, sep)}
-}
-
-// SplitByFormat splits a string by a known format:
-//
-// ssv: space separated value
-// tsv: tab separated value
-// pipes: pipe (|) separated value
-// csv: comma separated value (default)
-func SplitByFormat(data, format string) []string {
- if data == "" {
- return nil
- }
- var sep string
- switch format {
- case collectionFormatSpace:
- sep = " "
- case collectionFormatTab:
- sep = "\t"
- case collectionFormatPipe:
- sep = "|"
- case collectionFormatMulti:
- return nil
- default:
- sep = ","
- }
- var result []string
- for _, s := range strings.Split(data, sep) {
- if ts := strings.TrimSpace(s); ts != "" {
- result = append(result, ts)
- }
- }
- return result
-}
-
-// Removes leading whitespaces
-func trim(str string) string {
- return strings.TrimSpace(str)
-}
-
-// Shortcut to strings.ToUpper()
-func upper(str string) string {
- return strings.ToUpper(trim(str))
-}
-
-// Shortcut to strings.ToLower()
-func lower(str string) string {
- return strings.ToLower(trim(str))
-}
-
-// Camelize an uppercased word
-func Camelize(word string) string {
- camelized := poolOfBuffers.BorrowBuffer(len(word))
- defer func() {
- poolOfBuffers.RedeemBuffer(camelized)
- }()
-
- for pos, ru := range []rune(word) {
- if pos > 0 {
- camelized.WriteRune(unicode.ToLower(ru))
- } else {
- camelized.WriteRune(unicode.ToUpper(ru))
- }
- }
- return camelized.String()
-}
-
-// ToFileName lowercases and underscores a go type name
-func ToFileName(name string) string {
- in := split(name)
- out := make([]string, 0, len(in))
-
- for _, w := range in {
- out = append(out, lower(w))
- }
-
- return strings.Join(out, "_")
-}
-
-// ToCommandName lowercases and underscores a go type name
-func ToCommandName(name string) string {
- in := split(name)
- out := make([]string, 0, len(in))
-
- for _, w := range in {
- out = append(out, lower(w))
- }
- return strings.Join(out, "-")
-}
-
-// ToHumanNameLower represents a code name as a human series of words
-func ToHumanNameLower(name string) string {
- s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
- in := s.split(name)
- poolOfSplitters.RedeemSplitter(s)
- out := make([]string, 0, len(*in))
-
- for _, w := range *in {
- if !w.IsInitialism() {
- out = append(out, lower(w.GetOriginal()))
- } else {
- out = append(out, trim(w.GetOriginal()))
- }
- }
- poolOfLexems.RedeemLexems(in)
-
- return strings.Join(out, " ")
-}
-
-// ToHumanNameTitle represents a code name as a human series of words with the first letters titleized
-func ToHumanNameTitle(name string) string {
- s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
- in := s.split(name)
- poolOfSplitters.RedeemSplitter(s)
-
- out := make([]string, 0, len(*in))
- for _, w := range *in {
- original := trim(w.GetOriginal())
- if !w.IsInitialism() {
- out = append(out, Camelize(original))
- } else {
- out = append(out, original)
- }
- }
- poolOfLexems.RedeemLexems(in)
-
- return strings.Join(out, " ")
-}
-
-// ToJSONName camelcases a name which can be underscored or pascal cased
-func ToJSONName(name string) string {
- in := split(name)
- out := make([]string, 0, len(in))
-
- for i, w := range in {
- if i == 0 {
- out = append(out, lower(w))
- continue
- }
- out = append(out, Camelize(trim(w)))
- }
- return strings.Join(out, "")
-}
-
-// ToVarName camelcases a name which can be underscored or pascal cased
-func ToVarName(name string) string {
- res := ToGoName(name)
- if isInitialism(res) {
- return lower(res)
- }
- if len(res) <= 1 {
- return lower(res)
- }
- return lower(res[:1]) + res[1:]
-}
-
-// ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes
-func ToGoName(name string) string {
- s := poolOfSplitters.BorrowSplitter(withPostSplitInitialismCheck)
- lexems := s.split(name)
- poolOfSplitters.RedeemSplitter(s)
- defer func() {
- poolOfLexems.RedeemLexems(lexems)
- }()
- lexemes := *lexems
-
- if len(lexemes) == 0 {
- return ""
- }
-
- result := poolOfBuffers.BorrowBuffer(len(name))
- defer func() {
- poolOfBuffers.RedeemBuffer(result)
- }()
-
- // check if not starting with a letter, upper case
- firstPart := lexemes[0].GetUnsafeGoName()
- if lexemes[0].IsInitialism() {
- firstPart = upper(firstPart)
- }
-
- if c := firstPart[0]; c < utf8.RuneSelf {
- // ASCII
- switch {
- case 'A' <= c && c <= 'Z':
- result.WriteString(firstPart)
- case 'a' <= c && c <= 'z':
- result.WriteByte(c - 'a' + 'A')
- result.WriteString(firstPart[1:])
- default:
- result.WriteString(prefixFunc(name, firstPart))
- // NOTE: no longer check if prefixFunc returns a string that starts with uppercase:
- // assume this is always the case
- }
- } else {
- // unicode
- firstRune, _ := utf8.DecodeRuneInString(firstPart)
- switch {
- case !unicode.IsLetter(firstRune):
- result.WriteString(prefixFunc(name, firstPart))
- case !unicode.IsUpper(firstRune):
- result.WriteString(prefixFunc(name, firstPart))
- /*
- result.WriteRune(unicode.ToUpper(firstRune))
- result.WriteString(firstPart[offset:])
- */
- default:
- result.WriteString(firstPart)
- }
- }
-
- for _, lexem := range lexemes[1:] {
- goName := lexem.GetUnsafeGoName()
-
- // to support old behavior
- if lexem.IsInitialism() {
- goName = upper(goName)
- }
- result.WriteString(goName)
- }
-
- return result.String()
-}
-
-// ContainsStrings searches a slice of strings for a case-sensitive match
-func ContainsStrings(coll []string, item string) bool {
- for _, a := range coll {
- if a == item {
- return true
- }
- }
- return false
-}
-
-// ContainsStringsCI searches a slice of strings for a case-insensitive match
-func ContainsStringsCI(coll []string, item string) bool {
- for _, a := range coll {
- if strings.EqualFold(a, item) {
- return true
- }
- }
- return false
-}
-
-type zeroable interface {
- IsZero() bool
-}
-
-// IsZero returns true when the value passed into the function is a zero value.
-// This allows for safer checking of interface values.
-func IsZero(data interface{}) bool {
- v := reflect.ValueOf(data)
- // check for nil data
- switch v.Kind() { //nolint:exhaustive
- case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
- if v.IsNil() {
- return true
- }
- }
-
- // check for things that have an IsZero method instead
- if vv, ok := data.(zeroable); ok {
- return vv.IsZero()
- }
-
- // continue with slightly more complex reflection
- switch v.Kind() { //nolint:exhaustive
- case reflect.String:
- return v.Len() == 0
- case reflect.Bool:
- return !v.Bool()
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return v.Int() == 0
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return v.Uint() == 0
- case reflect.Float32, reflect.Float64:
- return v.Float() == 0
- case reflect.Struct, reflect.Array:
- return reflect.DeepEqual(data, reflect.Zero(v.Type()).Interface())
- case reflect.Invalid:
- return true
- default:
- return false
- }
-}
-
-// CommandLineOptionsGroup represents a group of user-defined command line options
-type CommandLineOptionsGroup struct {
- ShortDescription string
- LongDescription string
- Options interface{}
-}
diff --git a/vendor/github.com/go-openapi/swag/yaml.go b/vendor/github.com/go-openapi/swag/yaml.go
deleted file mode 100644
index 575346539a..0000000000
--- a/vendor/github.com/go-openapi/swag/yaml.go
+++ /dev/null
@@ -1,481 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package swag
-
-import (
- "encoding/json"
- "fmt"
- "path/filepath"
- "reflect"
- "sort"
- "strconv"
-
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
- yaml "gopkg.in/yaml.v3"
-)
-
-// YAMLMatcher matches yaml
-func YAMLMatcher(path string) bool {
- ext := filepath.Ext(path)
- return ext == ".yaml" || ext == ".yml"
-}
-
-// YAMLToJSON converts YAML unmarshaled data into json compatible data
-func YAMLToJSON(data interface{}) (json.RawMessage, error) {
- jm, err := transformData(data)
- if err != nil {
- return nil, err
- }
- b, err := WriteJSON(jm)
- return json.RawMessage(b), err
-}
-
-// BytesToYAMLDoc converts a byte slice into a YAML document
-func BytesToYAMLDoc(data []byte) (interface{}, error) {
- var document yaml.Node // preserve order that is present in the document
- if err := yaml.Unmarshal(data, &document); err != nil {
- return nil, err
- }
- if document.Kind != yaml.DocumentNode || len(document.Content) != 1 || document.Content[0].Kind != yaml.MappingNode {
- return nil, fmt.Errorf("only YAML documents that are objects are supported: %w", ErrYAML)
- }
- return &document, nil
-}
-
-func yamlNode(root *yaml.Node) (interface{}, error) {
- switch root.Kind {
- case yaml.DocumentNode:
- return yamlDocument(root)
- case yaml.SequenceNode:
- return yamlSequence(root)
- case yaml.MappingNode:
- return yamlMapping(root)
- case yaml.ScalarNode:
- return yamlScalar(root)
- case yaml.AliasNode:
- return yamlNode(root.Alias)
- default:
- return nil, fmt.Errorf("unsupported YAML node type: %v: %w", root.Kind, ErrYAML)
- }
-}
-
-func yamlDocument(node *yaml.Node) (interface{}, error) {
- if len(node.Content) != 1 {
- return nil, fmt.Errorf("unexpected YAML Document node content length: %d: %w", len(node.Content), ErrYAML)
- }
- return yamlNode(node.Content[0])
-}
-
-func yamlMapping(node *yaml.Node) (interface{}, error) {
- const sensibleAllocDivider = 2
- m := make(JSONMapSlice, len(node.Content)/sensibleAllocDivider)
-
- var j int
- for i := 0; i < len(node.Content); i += 2 {
- var nmi JSONMapItem
- k, err := yamlStringScalarC(node.Content[i])
- if err != nil {
- return nil, fmt.Errorf("unable to decode YAML map key: %w: %w", err, ErrYAML)
- }
- nmi.Key = k
- v, err := yamlNode(node.Content[i+1])
- if err != nil {
- return nil, fmt.Errorf("unable to process YAML map value for key %q: %w: %w", k, err, ErrYAML)
- }
- nmi.Value = v
- m[j] = nmi
- j++
- }
- return m, nil
-}
-
-func yamlSequence(node *yaml.Node) (interface{}, error) {
- s := make([]interface{}, 0)
-
- for i := 0; i < len(node.Content); i++ {
-
- v, err := yamlNode(node.Content[i])
- if err != nil {
- return nil, fmt.Errorf("unable to decode YAML sequence value: %w: %w", err, ErrYAML)
- }
- s = append(s, v)
- }
- return s, nil
-}
-
-const ( // See https://yaml.org/type/
- yamlStringScalar = "tag:yaml.org,2002:str"
- yamlIntScalar = "tag:yaml.org,2002:int"
- yamlBoolScalar = "tag:yaml.org,2002:bool"
- yamlFloatScalar = "tag:yaml.org,2002:float"
- yamlTimestamp = "tag:yaml.org,2002:timestamp"
- yamlNull = "tag:yaml.org,2002:null"
-)
-
-func yamlScalar(node *yaml.Node) (interface{}, error) {
- switch node.LongTag() {
- case yamlStringScalar:
- return node.Value, nil
- case yamlBoolScalar:
- b, err := strconv.ParseBool(node.Value)
- if err != nil {
- return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting bool content: %w: %w", node.Value, err, ErrYAML)
- }
- return b, nil
- case yamlIntScalar:
- i, err := strconv.ParseInt(node.Value, 10, 64)
- if err != nil {
- return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting integer content: %w: %w", node.Value, err, ErrYAML)
- }
- return i, nil
- case yamlFloatScalar:
- f, err := strconv.ParseFloat(node.Value, 64)
- if err != nil {
- return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting float content: %w: %w", node.Value, err, ErrYAML)
- }
- return f, nil
- case yamlTimestamp:
- return node.Value, nil
- case yamlNull:
- return nil, nil //nolint:nilnil
- default:
- return nil, fmt.Errorf("YAML tag %q is not supported: %w", node.LongTag(), ErrYAML)
- }
-}
-
-func yamlStringScalarC(node *yaml.Node) (string, error) {
- if node.Kind != yaml.ScalarNode {
- return "", fmt.Errorf("expecting a string scalar but got %q: %w", node.Kind, ErrYAML)
- }
- switch node.LongTag() {
- case yamlStringScalar, yamlIntScalar, yamlFloatScalar:
- return node.Value, nil
- default:
- return "", fmt.Errorf("YAML tag %q is not supported as map key: %w", node.LongTag(), ErrYAML)
- }
-}
-
-// JSONMapSlice represent a JSON object, with the order of keys maintained
-type JSONMapSlice []JSONMapItem
-
-// MarshalJSON renders a JSONMapSlice as JSON
-func (s JSONMapSlice) MarshalJSON() ([]byte, error) {
- w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty}
- s.MarshalEasyJSON(w)
- return w.BuildBytes()
-}
-
-// MarshalEasyJSON renders a JSONMapSlice as JSON, using easyJSON
-func (s JSONMapSlice) MarshalEasyJSON(w *jwriter.Writer) {
- w.RawByte('{')
-
- ln := len(s)
- last := ln - 1
- for i := 0; i < ln; i++ {
- s[i].MarshalEasyJSON(w)
- if i != last { // last item
- w.RawByte(',')
- }
- }
-
- w.RawByte('}')
-}
-
-// UnmarshalJSON makes a JSONMapSlice from JSON
-func (s *JSONMapSlice) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- s.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-// UnmarshalEasyJSON makes a JSONMapSlice from JSON, using easyJSON
-func (s *JSONMapSlice) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if in.IsNull() {
- in.Skip()
- return
- }
-
- var result JSONMapSlice
- in.Delim('{')
- for !in.IsDelim('}') {
- var mi JSONMapItem
- mi.UnmarshalEasyJSON(in)
- result = append(result, mi)
- }
- *s = result
-}
-
-func (s JSONMapSlice) MarshalYAML() (interface{}, error) {
- var n yaml.Node
- n.Kind = yaml.DocumentNode
- var nodes []*yaml.Node
- for _, item := range s {
- nn, err := json2yaml(item.Value)
- if err != nil {
- return nil, err
- }
- ns := []*yaml.Node{
- {
- Kind: yaml.ScalarNode,
- Tag: yamlStringScalar,
- Value: item.Key,
- },
- nn,
- }
- nodes = append(nodes, ns...)
- }
-
- n.Content = []*yaml.Node{
- {
- Kind: yaml.MappingNode,
- Content: nodes,
- },
- }
-
- return yaml.Marshal(&n)
-}
-
-func isNil(input interface{}) bool {
- if input == nil {
- return true
- }
- kind := reflect.TypeOf(input).Kind()
- switch kind { //nolint:exhaustive
- case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan:
- return reflect.ValueOf(input).IsNil()
- default:
- return false
- }
-}
-
-func json2yaml(item interface{}) (*yaml.Node, error) {
- if isNil(item) {
- return &yaml.Node{
- Kind: yaml.ScalarNode,
- Value: "null",
- }, nil
- }
-
- switch val := item.(type) {
- case JSONMapSlice:
- var n yaml.Node
- n.Kind = yaml.MappingNode
- for i := range val {
- childNode, err := json2yaml(&val[i].Value)
- if err != nil {
- return nil, err
- }
- n.Content = append(n.Content, &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlStringScalar,
- Value: val[i].Key,
- }, childNode)
- }
- return &n, nil
- case map[string]interface{}:
- var n yaml.Node
- n.Kind = yaml.MappingNode
- keys := make([]string, 0, len(val))
- for k := range val {
- keys = append(keys, k)
- }
- sort.Strings(keys)
-
- for _, k := range keys {
- v := val[k]
- childNode, err := json2yaml(v)
- if err != nil {
- return nil, err
- }
- n.Content = append(n.Content, &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlStringScalar,
- Value: k,
- }, childNode)
- }
- return &n, nil
- case []interface{}:
- var n yaml.Node
- n.Kind = yaml.SequenceNode
- for i := range val {
- childNode, err := json2yaml(val[i])
- if err != nil {
- return nil, err
- }
- n.Content = append(n.Content, childNode)
- }
- return &n, nil
- case string:
- return &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlStringScalar,
- Value: val,
- }, nil
- case float64:
- return &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlFloatScalar,
- Value: strconv.FormatFloat(val, 'f', -1, 64),
- }, nil
- case int64:
- return &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlIntScalar,
- Value: strconv.FormatInt(val, 10),
- }, nil
- case uint64:
- return &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlIntScalar,
- Value: strconv.FormatUint(val, 10),
- }, nil
- case bool:
- return &yaml.Node{
- Kind: yaml.ScalarNode,
- Tag: yamlBoolScalar,
- Value: strconv.FormatBool(val),
- }, nil
- default:
- return nil, fmt.Errorf("unhandled type: %T: %w", val, ErrYAML)
- }
-}
-
-// JSONMapItem represents the value of a key in a JSON object held by JSONMapSlice
-type JSONMapItem struct {
- Key string
- Value interface{}
-}
-
-// MarshalJSON renders a JSONMapItem as JSON
-func (s JSONMapItem) MarshalJSON() ([]byte, error) {
- w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty}
- s.MarshalEasyJSON(w)
- return w.BuildBytes()
-}
-
-// MarshalEasyJSON renders a JSONMapItem as JSON, using easyJSON
-func (s JSONMapItem) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(s.Key)
- w.RawByte(':')
- w.Raw(WriteJSON(s.Value))
-}
-
-// UnmarshalJSON makes a JSONMapItem from JSON
-func (s *JSONMapItem) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- s.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-// UnmarshalEasyJSON makes a JSONMapItem from JSON, using easyJSON
-func (s *JSONMapItem) UnmarshalEasyJSON(in *jlexer.Lexer) {
- key := in.UnsafeString()
- in.WantColon()
- value := in.Interface()
- in.WantComma()
- s.Key = key
- s.Value = value
-}
-
-func transformData(input interface{}) (out interface{}, err error) {
- format := func(t interface{}) (string, error) {
- switch k := t.(type) {
- case string:
- return k, nil
- case uint:
- return strconv.FormatUint(uint64(k), 10), nil
- case uint8:
- return strconv.FormatUint(uint64(k), 10), nil
- case uint16:
- return strconv.FormatUint(uint64(k), 10), nil
- case uint32:
- return strconv.FormatUint(uint64(k), 10), nil
- case uint64:
- return strconv.FormatUint(k, 10), nil
- case int:
- return strconv.Itoa(k), nil
- case int8:
- return strconv.FormatInt(int64(k), 10), nil
- case int16:
- return strconv.FormatInt(int64(k), 10), nil
- case int32:
- return strconv.FormatInt(int64(k), 10), nil
- case int64:
- return strconv.FormatInt(k, 10), nil
- default:
- return "", fmt.Errorf("unexpected map key type, got: %T: %w", k, ErrYAML)
- }
- }
-
- switch in := input.(type) {
- case yaml.Node:
- return yamlNode(&in)
- case *yaml.Node:
- return yamlNode(in)
- case map[interface{}]interface{}:
- o := make(JSONMapSlice, 0, len(in))
- for ke, va := range in {
- var nmi JSONMapItem
- if nmi.Key, err = format(ke); err != nil {
- return nil, err
- }
-
- v, ert := transformData(va)
- if ert != nil {
- return nil, ert
- }
- nmi.Value = v
- o = append(o, nmi)
- }
- return o, nil
- case []interface{}:
- len1 := len(in)
- o := make([]interface{}, len1)
- for i := 0; i < len1; i++ {
- o[i], err = transformData(in[i])
- if err != nil {
- return nil, err
- }
- }
- return o, nil
- }
- return input, nil
-}
-
-// YAMLDoc loads a yaml document from either http or a file and converts it to json
-func YAMLDoc(path string) (json.RawMessage, error) {
- yamlDoc, err := YAMLData(path)
- if err != nil {
- return nil, err
- }
-
- data, err := YAMLToJSON(yamlDoc)
- if err != nil {
- return nil, err
- }
-
- return data, nil
-}
-
-// YAMLData loads a yaml document from either http or a file
-func YAMLData(path string) (interface{}, error) {
- data, err := LoadFromFileOrHTTP(path)
- if err != nil {
- return nil, err
- }
-
- return BytesToYAMLDoc(data)
-}
diff --git a/vendor/github.com/go-openapi/validate/.editorconfig b/vendor/github.com/go-openapi/validate/.editorconfig
deleted file mode 100644
index 3152da69a5..0000000000
--- a/vendor/github.com/go-openapi/validate/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/validate/.gitattributes b/vendor/github.com/go-openapi/validate/.gitattributes
deleted file mode 100644
index 49ad52766a..0000000000
--- a/vendor/github.com/go-openapi/validate/.gitattributes
+++ /dev/null
@@ -1,2 +0,0 @@
-# gofmt always uses LF, whereas Git uses CRLF on Windows.
-*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/validate/.gitignore b/vendor/github.com/go-openapi/validate/.gitignore
deleted file mode 100644
index fea8b84eca..0000000000
--- a/vendor/github.com/go-openapi/validate/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-secrets.yml
-coverage.out
-*.cov
-*.out
-playground
diff --git a/vendor/github.com/go-openapi/validate/.golangci.yml b/vendor/github.com/go-openapi/validate/.golangci.yml
deleted file mode 100644
index 22f8d21cca..0000000000
--- a/vendor/github.com/go-openapi/validate/.golangci.yml
+++ /dev/null
@@ -1,61 +0,0 @@
-linters-settings:
- govet:
- check-shadowing: true
- golint:
- min-confidence: 0
- gocyclo:
- min-complexity: 45
- maligned:
- suggest-new: true
- dupl:
- threshold: 200
- goconst:
- min-len: 2
- min-occurrences: 3
-
-linters:
- enable-all: true
- disable:
- - maligned
- - unparam
- - lll
- - gochecknoinits
- - gochecknoglobals
- - funlen
- - godox
- - gocognit
- - whitespace
- - wsl
- - wrapcheck
- - testpackage
- - nlreturn
- - gomnd
- - exhaustivestruct
- - goerr113
- - errorlint
- - nestif
- - godot
- - gofumpt
- - paralleltest
- - tparallel
- - thelper
- - ifshort
- - exhaustruct
- - varnamelen
- - gci
- - depguard
- - errchkjson
- - inamedparam
- - nonamedreturns
- - musttag
- - ireturn
- - forcetypeassert
- - cyclop
- # deprecated linters
- - deadcode
- - interfacer
- - scopelint
- - varcheck
- - structcheck
- - golint
- - nosnakecase
diff --git a/vendor/github.com/go-openapi/validate/BENCHMARK.md b/vendor/github.com/go-openapi/validate/BENCHMARK.md
deleted file mode 100644
index 79cf6a077b..0000000000
--- a/vendor/github.com/go-openapi/validate/BENCHMARK.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Benchmark
-
-Validating the Kubernetes Swagger API
-
-## v0.22.6: 60,000,000 allocs
-```
-goos: linux
-goarch: amd64
-pkg: github.com/go-openapi/validate
-cpu: AMD Ryzen 7 5800X 8-Core Processor
-Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 8549863982 ns/op 7067424936 B/op 59583275 allocs/op
-```
-
-## After refact PR: minor but noticable improvements: 25,000,000 allocs
-```
-go test -bench Spec
-goos: linux
-goarch: amd64
-pkg: github.com/go-openapi/validate
-cpu: AMD Ryzen 7 5800X 8-Core Processor
-Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 4064535557 ns/op 3379715592 B/op 25320330 allocs/op
-```
-
-## After reduce GC pressure PR: 17,000,000 allocs
-```
-goos: linux
-goarch: amd64
-pkg: github.com/go-openapi/validate
-cpu: AMD Ryzen 7 5800X 8-Core Processor
-Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 3758414145 ns/op 2593881496 B/op 17111373 allocs/op
-```
diff --git a/vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e3..0000000000
--- a/vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/validate/LICENSE b/vendor/github.com/go-openapi/validate/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/go-openapi/validate/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/validate/README.md b/vendor/github.com/go-openapi/validate/README.md
deleted file mode 100644
index e8e1bb218d..0000000000
--- a/vendor/github.com/go-openapi/validate/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-# Validation helpers [](https://github.com/go-openapi/validate/actions?query=workflow%3A"go+test") [](https://codecov.io/gh/go-openapi/validate)
-
-[](https://slackin.goswagger.io)
-[](https://raw.githubusercontent.com/go-openapi/validate/master/LICENSE)
-[](https://pkg.go.dev/github.com/go-openapi/validate)
-[](https://goreportcard.com/report/github.com/go-openapi/validate)
-
-This package provides helpers to validate Swagger 2.0. specification (aka OpenAPI 2.0).
-
-Reference can be found here: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md.
-
-## What's inside?
-
-* A validator for Swagger specifications
-* A validator for JSON schemas draft4
-* Helper functions to validate individual values (used by code generated by [go-swagger](https://github.com/go-swagger/go-swagger)).
- * Required, RequiredNumber, RequiredString
- * ReadOnly
- * UniqueItems, MaxItems, MinItems
- * Enum, EnumCase
- * Pattern, MinLength, MaxLength
- * Minimum, Maximum, MultipleOf
- * FormatOf
-
-[Documentation](https://pkg.go.dev/github.com/go-openapi/validate)
-
-## FAQ
-
-* Does this library support OpenAPI 3?
-
-> No.
-> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
-> There is no plan to make it evolve toward supporting OpenAPI 3.x.
-> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
->
-> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3
diff --git a/vendor/github.com/go-openapi/validate/context.go b/vendor/github.com/go-openapi/validate/context.go
deleted file mode 100644
index 89977173b9..0000000000
--- a/vendor/github.com/go-openapi/validate/context.go
+++ /dev/null
@@ -1,56 +0,0 @@
-package validate
-
-import (
- "context"
-)
-
-// validateCtxKey is the key type of context key in this pkg
-type validateCtxKey string
-
-const (
- operationTypeKey validateCtxKey = "operationTypeKey"
-)
-
-type operationType string
-
-const (
- request operationType = "request"
- response operationType = "response"
- none operationType = "none" // not specified in ctx
-)
-
-var operationTypeEnum = []operationType{request, response, none}
-
-// WithOperationRequest returns a new context with operationType request
-// in context value
-func WithOperationRequest(ctx context.Context) context.Context {
- return withOperation(ctx, request)
-}
-
-// WithOperationRequest returns a new context with operationType response
-// in context value
-func WithOperationResponse(ctx context.Context) context.Context {
- return withOperation(ctx, response)
-}
-
-func withOperation(ctx context.Context, operation operationType) context.Context {
- return context.WithValue(ctx, operationTypeKey, operation)
-}
-
-// extractOperationType extracts the operation type from ctx
-// if not specified or of unknown value, return none operation type
-func extractOperationType(ctx context.Context) operationType {
- v := ctx.Value(operationTypeKey)
- if v == nil {
- return none
- }
- res, ok := v.(operationType)
- if !ok {
- return none
- }
- // validate the value is in operation enum
- if err := Enum("", "", res, operationTypeEnum); err != nil {
- return none
- }
- return res
-}
diff --git a/vendor/github.com/go-openapi/validate/debug.go b/vendor/github.com/go-openapi/validate/debug.go
deleted file mode 100644
index 8815fd9359..0000000000
--- a/vendor/github.com/go-openapi/validate/debug.go
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
- "log"
- "os"
- "path/filepath"
- "runtime"
-)
-
-var (
- // Debug is true when the SWAGGER_DEBUG env var is not empty.
- // It enables a more verbose logging of validators.
- Debug = os.Getenv("SWAGGER_DEBUG") != ""
- // validateLogger is a debug logger for this package
- validateLogger *log.Logger
-)
-
-func init() {
- debugOptions()
-}
-
-func debugOptions() {
- validateLogger = log.New(os.Stdout, "validate:", log.LstdFlags)
-}
-
-func debugLog(msg string, args ...interface{}) {
- // A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog()
- if Debug {
- _, file1, pos1, _ := runtime.Caller(1)
- validateLogger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
- }
-}
diff --git a/vendor/github.com/go-openapi/validate/default_validator.go b/vendor/github.com/go-openapi/validate/default_validator.go
deleted file mode 100644
index e0dd93839e..0000000000
--- a/vendor/github.com/go-openapi/validate/default_validator.go
+++ /dev/null
@@ -1,304 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
- "strings"
-
- "github.com/go-openapi/spec"
-)
-
-// defaultValidator validates default values in a spec.
-// According to Swagger spec, default values MUST validate their schema.
-type defaultValidator struct {
- SpecValidator *SpecValidator
- visitedSchemas map[string]struct{}
- schemaOptions *SchemaValidatorOptions
-}
-
-// resetVisited resets the internal state of visited schemas
-func (d *defaultValidator) resetVisited() {
- if d.visitedSchemas == nil {
- d.visitedSchemas = make(map[string]struct{})
-
- return
- }
-
- // TODO(go1.21): clear(ex.visitedSchemas)
- for k := range d.visitedSchemas {
- delete(d.visitedSchemas, k)
- }
-}
-
-func isVisited(path string, visitedSchemas map[string]struct{}) bool {
- _, found := visitedSchemas[path]
- if found {
- return true
- }
-
- // search for overlapping paths
- var (
- parent string
- suffix string
- )
- for i := len(path) - 2; i >= 0; i-- {
- r := path[i]
- if r != '.' {
- continue
- }
-
- parent = path[0:i]
- suffix = path[i+1:]
-
- if strings.HasSuffix(parent, suffix) {
- return true
- }
- }
-
- return false
-}
-
-// beingVisited asserts a schema is being visited
-func (d *defaultValidator) beingVisited(path string) {
- d.visitedSchemas[path] = struct{}{}
-}
-
-// isVisited tells if a path has already been visited
-func (d *defaultValidator) isVisited(path string) bool {
- return isVisited(path, d.visitedSchemas)
-}
-
-// Validate validates the default values declared in the swagger spec
-func (d *defaultValidator) Validate() *Result {
- errs := pools.poolOfResults.BorrowResult() // will redeem when merged
-
- if d == nil || d.SpecValidator == nil {
- return errs
- }
- d.resetVisited()
- errs.Merge(d.validateDefaultValueValidAgainstSchema()) // error -
- return errs
-}
-
-func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
- // every default value that is specified must validate against the schema for that property
- // headers, items, parameters, schema
-
- res := pools.poolOfResults.BorrowResult() // will redeem when merged
- s := d.SpecValidator
-
- for method, pathItem := range s.expandedAnalyzer().Operations() {
- for path, op := range pathItem {
- // parameters
- for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
- if param.Default != nil && param.Required {
- res.AddWarnings(requiredHasDefaultMsg(param.Name, param.In))
- }
-
- // reset explored schemas to get depth-first recursive-proof exploration
- d.resetVisited()
-
- // Check simple parameters first
- // default values provided must validate against their inline definition (no explicit schema)
- if param.Default != nil && param.Schema == nil {
- // check param default value is valid
- red := newParamValidator(¶m, s.KnownFormats, d.schemaOptions).Validate(param.Default) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- // Recursively follows Items and Schemas
- if param.Items != nil {
- red := d.validateDefaultValueItemsAgainstSchema(param.Name, param.In, ¶m, param.Items) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddErrors(defaultValueItemsDoesNotValidateMsg(param.Name, param.In))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- if param.Schema != nil {
- // Validate default value against schema
- red := d.validateDefaultValueSchemaAgainstSchema(param.Name, param.In, param.Schema)
- if red.HasErrorsOrWarnings() {
- res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
- }
-
- if op.Responses != nil {
- if op.Responses.Default != nil {
- // Same constraint on default Response
- res.Merge(d.validateDefaultInResponse(op.Responses.Default, jsonDefault, path, 0, op.ID))
- }
- // Same constraint on regular Responses
- if op.Responses.StatusCodeResponses != nil { // Safeguard
- for code, r := range op.Responses.StatusCodeResponses {
- res.Merge(d.validateDefaultInResponse(&r, "response", path, code, op.ID)) //#nosec
- }
- }
- } else if op.ID != "" {
- // Empty op.ID means there is no meaningful operation: no need to report a specific message
- res.AddErrors(noValidResponseMsg(op.ID))
- }
- }
- }
- if s.spec.Spec().Definitions != nil { // Safeguard
- // reset explored schemas to get depth-first recursive-proof exploration
- d.resetVisited()
- for nm, sch := range s.spec.Spec().Definitions {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema("definitions."+nm, "body", &sch)) //#nosec
- }
- }
- return res
-}
-
-func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, responseType, path string, responseCode int, operationID string) *Result {
- s := d.SpecValidator
-
- response, res := responseHelp.expandResponseRef(resp, path, s)
- if !res.IsValid() {
- return res
- }
-
- responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode)
-
- if response.Headers != nil { // Safeguard
- for nm, h := range response.Headers {
- // reset explored schemas to get depth-first recursive-proof exploration
- d.resetVisited()
-
- if h.Default != nil {
- red := newHeaderValidator(nm, &h, s.KnownFormats, d.schemaOptions).Validate(h.Default) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddErrors(defaultValueHeaderDoesNotValidateMsg(operationID, nm, responseName))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- // Headers have inline definition, like params
- if h.Items != nil {
- red := d.validateDefaultValueItemsAgainstSchema(nm, "header", &h, h.Items) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddErrors(defaultValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- if _, err := compileRegexp(h.Pattern); err != nil {
- res.AddErrors(invalidPatternInHeaderMsg(operationID, nm, responseName, h.Pattern, err))
- }
-
- // Headers don't have schema
- }
- }
- if response.Schema != nil {
- // reset explored schemas to get depth-first recursive-proof exploration
- d.resetVisited()
-
- red := d.validateDefaultValueSchemaAgainstSchema(responseCodeAsStr, "response", response.Schema)
- if red.HasErrorsOrWarnings() {
- // Additional message to make sure the context of the error is not lost
- res.AddErrors(defaultValueInDoesNotValidateMsg(operationID, responseName))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
- return res
-}
-
-func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in string, schema *spec.Schema) *Result {
- if schema == nil || d.isVisited(path) {
- // Avoids recursing if we are already done with that check
- return nil
- }
- d.beingVisited(path)
- res := pools.poolOfResults.BorrowResult()
- s := d.SpecValidator
-
- if schema.Default != nil {
- res.Merge(
- newSchemaValidator(schema, s.spec.Spec(), path+".default", s.KnownFormats, d.schemaOptions).Validate(schema.Default),
- )
- }
- if schema.Items != nil {
- if schema.Items.Schema != nil {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".items.default", in, schema.Items.Schema))
- }
- // Multiple schemas in items
- if schema.Items.Schemas != nil { // Safeguard
- for i, sch := range schema.Items.Schemas {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.items[%d].default", path, i), in, &sch)) //#nosec
- }
- }
- }
- if _, err := compileRegexp(schema.Pattern); err != nil {
- res.AddErrors(invalidPatternInMsg(path, in, schema.Pattern))
- }
- if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
- // NOTE: we keep validating values, even though additionalItems is not supported by Swagger 2.0 (and 3.0 as well)
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".additionalItems", in, schema.AdditionalItems.Schema))
- }
- for propName, prop := range schema.Properties {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
- }
- for propName, prop := range schema.PatternProperties {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
- }
- if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".additionalProperties", in, schema.AdditionalProperties.Schema))
- }
- if schema.AllOf != nil {
- for i, aoSch := range schema.AllOf {
- res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.allOf[%d]", path, i), in, &aoSch)) //#nosec
- }
- }
- return res
-}
-
-// TODO: Temporary duplicated code. Need to refactor with examples
-
-func (d *defaultValidator) validateDefaultValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result {
- res := pools.poolOfResults.BorrowResult()
- s := d.SpecValidator
- if items != nil {
- if items.Default != nil {
- res.Merge(
- newItemsValidator(path, in, items, root, s.KnownFormats, d.schemaOptions).Validate(0, items.Default),
- )
- }
- if items.Items != nil {
- res.Merge(d.validateDefaultValueItemsAgainstSchema(path+"[0].default", in, root, items.Items))
- }
- if _, err := compileRegexp(items.Pattern); err != nil {
- res.AddErrors(invalidPatternInMsg(path, in, items.Pattern))
- }
- }
- return res
-}
diff --git a/vendor/github.com/go-openapi/validate/doc.go b/vendor/github.com/go-openapi/validate/doc.go
deleted file mode 100644
index d2b901eab9..0000000000
--- a/vendor/github.com/go-openapi/validate/doc.go
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-/*
-Package validate provides methods to validate a swagger specification,
-as well as tools to validate data against their schema.
-
-This package follows Swagger 2.0. specification (aka OpenAPI 2.0). Reference
-can be found here: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md.
-
-# Validating a specification
-
-Validates a spec document (from JSON or YAML) against the JSON schema for swagger,
-then checks a number of extra rules that can't be expressed in JSON schema.
-
-Entry points:
- - Spec()
- - NewSpecValidator()
- - SpecValidator.Validate()
-
-Reported as errors:
-
- [x] definition can't declare a property that's already defined by one of its ancestors
- [x] definition's ancestor can't be a descendant of the same model
- [x] path uniqueness: each api path should be non-verbatim (account for path param names) unique per method. Validation can be laxed by disabling StrictPathParamUniqueness.
- [x] each security reference should contain only unique scopes
- [x] each security scope in a security definition should be unique
- [x] parameters in path must be unique
- [x] each path parameter must correspond to a parameter placeholder and vice versa
- [x] each referenceable definition must have references
- [x] each definition property listed in the required array must be defined in the properties of the model
- [x] each parameter should have a unique `name` and `type` combination
- [x] each operation should have only 1 parameter of type body
- [x] each reference must point to a valid object
- [x] every default value that is specified must validate against the schema for that property
- [x] items property is required for all schemas/definitions of type `array`
- [x] path parameters must be declared a required
- [x] headers must not contain $ref
- [x] schema and property examples provided must validate against their respective object's schema
- [x] examples provided must validate their schema
-
-Reported as warnings:
-
- [x] path parameters should not contain any of [{,},\w]
- [x] empty path
- [x] unused definitions
- [x] unsupported validation of examples on non-JSON media types
- [x] examples in response without schema
- [x] readOnly properties should not be required
-
-# Validating a schema
-
-The schema validation toolkit validates data against JSON-schema-draft 04 schema.
-
-It is tested against the full json-schema-testing-suite (https://github.com/json-schema-org/JSON-Schema-Test-Suite),
-except for the optional part (bignum, ECMA regexp, ...).
-
-It supports the complete JSON-schema vocabulary, including keywords not supported by Swagger (e.g. additionalItems, ...)
-
-Entry points:
- - AgainstSchema()
- - ...
-
-# Known limitations
-
-With the current version of this package, the following aspects of swagger are not yet supported:
-
- [ ] errors and warnings are not reported with key/line number in spec
- [ ] default values and examples on responses only support application/json producer type
- [ ] invalid numeric constraints (such as Minimum, etc..) are not checked except for default and example values
- [ ] rules for collectionFormat are not implemented
- [ ] no validation rule for polymorphism support (discriminator) [not done here]
- [ ] valid js ECMA regexp not supported by Go regexp engine are considered invalid
- [ ] arbitrary large numbers are not supported: max is math.MaxFloat64
-*/
-package validate
diff --git a/vendor/github.com/go-openapi/validate/example_validator.go b/vendor/github.com/go-openapi/validate/example_validator.go
deleted file mode 100644
index d08956973c..0000000000
--- a/vendor/github.com/go-openapi/validate/example_validator.go
+++ /dev/null
@@ -1,299 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
-
- "github.com/go-openapi/spec"
-)
-
-// ExampleValidator validates example values defined in a spec
-type exampleValidator struct {
- SpecValidator *SpecValidator
- visitedSchemas map[string]struct{}
- schemaOptions *SchemaValidatorOptions
-}
-
-// resetVisited resets the internal state of visited schemas
-func (ex *exampleValidator) resetVisited() {
- if ex.visitedSchemas == nil {
- ex.visitedSchemas = make(map[string]struct{})
-
- return
- }
-
- // TODO(go1.21): clear(ex.visitedSchemas)
- for k := range ex.visitedSchemas {
- delete(ex.visitedSchemas, k)
- }
-}
-
-// beingVisited asserts a schema is being visited
-func (ex *exampleValidator) beingVisited(path string) {
- ex.visitedSchemas[path] = struct{}{}
-}
-
-// isVisited tells if a path has already been visited
-func (ex *exampleValidator) isVisited(path string) bool {
- return isVisited(path, ex.visitedSchemas)
-}
-
-// Validate validates the example values declared in the swagger spec
-// Example values MUST conform to their schema.
-//
-// With Swagger 2.0, examples are supported in:
-// - schemas
-// - individual property
-// - responses
-func (ex *exampleValidator) Validate() *Result {
- errs := pools.poolOfResults.BorrowResult()
-
- if ex == nil || ex.SpecValidator == nil {
- return errs
- }
- ex.resetVisited()
- errs.Merge(ex.validateExampleValueValidAgainstSchema()) // error -
-
- return errs
-}
-
-func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
- // every example value that is specified must validate against the schema for that property
- // in: schemas, properties, object, items
- // not in: headers, parameters without schema
-
- res := pools.poolOfResults.BorrowResult()
- s := ex.SpecValidator
-
- for method, pathItem := range s.expandedAnalyzer().Operations() {
- for path, op := range pathItem {
- // parameters
- for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
-
- // As of swagger 2.0, Examples are not supported in simple parameters
- // However, it looks like it is supported by go-openapi
-
- // reset explored schemas to get depth-first recursive-proof exploration
- ex.resetVisited()
-
- // Check simple parameters first
- // default values provided must validate against their inline definition (no explicit schema)
- if param.Example != nil && param.Schema == nil {
- // check param default value is valid
- red := newParamValidator(¶m, s.KnownFormats, ex.schemaOptions).Validate(param.Example) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In))
- res.MergeAsWarnings(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- // Recursively follows Items and Schemas
- if param.Items != nil {
- red := ex.validateExampleValueItemsAgainstSchema(param.Name, param.In, ¶m, param.Items) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddWarnings(exampleValueItemsDoesNotValidateMsg(param.Name, param.In))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- if param.Schema != nil {
- // Validate example value against schema
- red := ex.validateExampleValueSchemaAgainstSchema(param.Name, param.In, param.Schema)
- if red.HasErrorsOrWarnings() {
- res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
- }
-
- if op.Responses != nil {
- if op.Responses.Default != nil {
- // Same constraint on default Response
- res.Merge(ex.validateExampleInResponse(op.Responses.Default, jsonDefault, path, 0, op.ID))
- }
- // Same constraint on regular Responses
- if op.Responses.StatusCodeResponses != nil { // Safeguard
- for code, r := range op.Responses.StatusCodeResponses {
- res.Merge(ex.validateExampleInResponse(&r, "response", path, code, op.ID)) //#nosec
- }
- }
- } else if op.ID != "" {
- // Empty op.ID means there is no meaningful operation: no need to report a specific message
- res.AddErrors(noValidResponseMsg(op.ID))
- }
- }
- }
- if s.spec.Spec().Definitions != nil { // Safeguard
- // reset explored schemas to get depth-first recursive-proof exploration
- ex.resetVisited()
- for nm, sch := range s.spec.Spec().Definitions {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema("definitions."+nm, "body", &sch)) //#nosec
- }
- }
- return res
-}
-
-func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, responseType, path string, responseCode int, operationID string) *Result {
- s := ex.SpecValidator
-
- response, res := responseHelp.expandResponseRef(resp, path, s)
- if !res.IsValid() { // Safeguard
- return res
- }
-
- responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode)
-
- if response.Headers != nil { // Safeguard
- for nm, h := range response.Headers {
- // reset explored schemas to get depth-first recursive-proof exploration
- ex.resetVisited()
-
- if h.Example != nil {
- red := newHeaderValidator(nm, &h, s.KnownFormats, ex.schemaOptions).Validate(h.Example) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddWarnings(exampleValueHeaderDoesNotValidateMsg(operationID, nm, responseName))
- res.MergeAsWarnings(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- // Headers have inline definition, like params
- if h.Items != nil {
- red := ex.validateExampleValueItemsAgainstSchema(nm, "header", &h, h.Items) //#nosec
- if red.HasErrorsOrWarnings() {
- res.AddWarnings(exampleValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName))
- res.MergeAsWarnings(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- if _, err := compileRegexp(h.Pattern); err != nil {
- res.AddErrors(invalidPatternInHeaderMsg(operationID, nm, responseName, h.Pattern, err))
- }
-
- // Headers don't have schema
- }
- }
- if response.Schema != nil {
- // reset explored schemas to get depth-first recursive-proof exploration
- ex.resetVisited()
-
- red := ex.validateExampleValueSchemaAgainstSchema(responseCodeAsStr, "response", response.Schema)
- if red.HasErrorsOrWarnings() {
- // Additional message to make sure the context of the error is not lost
- res.AddWarnings(exampleValueInDoesNotValidateMsg(operationID, responseName))
- res.Merge(red)
- } else if red.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(red)
- }
- }
-
- if response.Examples != nil {
- if response.Schema != nil {
- if example, ok := response.Examples["application/json"]; ok {
- res.MergeAsWarnings(
- newSchemaValidator(response.Schema, s.spec.Spec(), path+".examples", s.KnownFormats, s.schemaOptions).Validate(example),
- )
- } else {
- // TODO: validate other media types too
- res.AddWarnings(examplesMimeNotSupportedMsg(operationID, responseName))
- }
- } else {
- res.AddWarnings(examplesWithoutSchemaMsg(operationID, responseName))
- }
- }
- return res
-}
-
-func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in string, schema *spec.Schema) *Result {
- if schema == nil || ex.isVisited(path) {
- // Avoids recursing if we are already done with that check
- return nil
- }
- ex.beingVisited(path)
- s := ex.SpecValidator
- res := pools.poolOfResults.BorrowResult()
-
- if schema.Example != nil {
- res.MergeAsWarnings(
- newSchemaValidator(schema, s.spec.Spec(), path+".example", s.KnownFormats, ex.schemaOptions).Validate(schema.Example),
- )
- }
- if schema.Items != nil {
- if schema.Items.Schema != nil {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".items.example", in, schema.Items.Schema))
- }
- // Multiple schemas in items
- if schema.Items.Schemas != nil { // Safeguard
- for i, sch := range schema.Items.Schemas {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.items[%d].example", path, i), in, &sch)) //#nosec
- }
- }
- }
- if _, err := compileRegexp(schema.Pattern); err != nil {
- res.AddErrors(invalidPatternInMsg(path, in, schema.Pattern))
- }
- if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
- // NOTE: we keep validating values, even though additionalItems is unsupported in Swagger 2.0 (and 3.0 as well)
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".additionalItems", in, schema.AdditionalItems.Schema))
- }
- for propName, prop := range schema.Properties {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
- }
- for propName, prop := range schema.PatternProperties {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
- }
- if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".additionalProperties", in, schema.AdditionalProperties.Schema))
- }
- if schema.AllOf != nil {
- for i, aoSch := range schema.AllOf {
- res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.allOf[%d]", path, i), in, &aoSch)) //#nosec
- }
- }
- return res
-}
-
-// TODO: Temporary duplicated code. Need to refactor with examples
-//
-
-func (ex *exampleValidator) validateExampleValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result {
- res := pools.poolOfResults.BorrowResult()
- s := ex.SpecValidator
- if items != nil {
- if items.Example != nil {
- res.MergeAsWarnings(
- newItemsValidator(path, in, items, root, s.KnownFormats, ex.schemaOptions).Validate(0, items.Example),
- )
- }
- if items.Items != nil {
- res.Merge(ex.validateExampleValueItemsAgainstSchema(path+"[0].example", in, root, items.Items))
- }
- if _, err := compileRegexp(items.Pattern); err != nil {
- res.AddErrors(invalidPatternInMsg(path, in, items.Pattern))
- }
- }
-
- return res
-}
diff --git a/vendor/github.com/go-openapi/validate/formats.go b/vendor/github.com/go-openapi/validate/formats.go
deleted file mode 100644
index f4e3552130..0000000000
--- a/vendor/github.com/go-openapi/validate/formats.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "reflect"
-
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-type formatValidator struct {
- Path string
- In string
- Format string
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-func newFormatValidator(path, in, format string, formats strfmt.Registry, opts *SchemaValidatorOptions) *formatValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var f *formatValidator
- if opts.recycleValidators {
- f = pools.poolOfFormatValidators.BorrowValidator()
- } else {
- f = new(formatValidator)
- }
-
- f.Path = path
- f.In = in
- f.Format = format
- f.KnownFormats = formats
- f.Options = opts
-
- return f
-}
-
-func (f *formatValidator) SetPath(path string) {
- f.Path = path
-}
-
-func (f *formatValidator) Applies(source interface{}, kind reflect.Kind) bool {
- if source == nil || f.KnownFormats == nil {
- return false
- }
-
- switch source := source.(type) {
- case *spec.Items:
- return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
- case *spec.Parameter:
- return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
- case *spec.Schema:
- return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
- case *spec.Header:
- return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
- default:
- return false
- }
-}
-
-func (f *formatValidator) Validate(val interface{}) *Result {
- if f.Options.recycleValidators {
- defer func() {
- f.redeem()
- }()
- }
-
- var result *Result
- if f.Options.recycleResult {
- result = pools.poolOfResults.BorrowResult()
- } else {
- result = new(Result)
- }
-
- if err := FormatOf(f.Path, f.In, f.Format, val.(string), f.KnownFormats); err != nil {
- result.AddErrors(err)
- }
-
- return result
-}
-
-func (f *formatValidator) redeem() {
- pools.poolOfFormatValidators.RedeemValidator(f)
-}
diff --git a/vendor/github.com/go-openapi/validate/helpers.go b/vendor/github.com/go-openapi/validate/helpers.go
deleted file mode 100644
index 757e403d91..0000000000
--- a/vendor/github.com/go-openapi/validate/helpers.go
+++ /dev/null
@@ -1,333 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-// TODO: define this as package validate/internal
-// This must be done while keeping CI intact with all tests and test coverage
-
-import (
- "reflect"
- "strconv"
- "strings"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
-)
-
-const (
- swaggerBody = "body"
- swaggerExample = "example"
- swaggerExamples = "examples"
-)
-
-const (
- objectType = "object"
- arrayType = "array"
- stringType = "string"
- integerType = "integer"
- numberType = "number"
- booleanType = "boolean"
- fileType = "file"
- nullType = "null"
-)
-
-const (
- jsonProperties = "properties"
- jsonItems = "items"
- jsonType = "type"
- // jsonSchema = "schema"
- jsonDefault = "default"
-)
-
-const (
- stringFormatDate = "date"
- stringFormatDateTime = "date-time"
- stringFormatPassword = "password"
- stringFormatByte = "byte"
- // stringFormatBinary = "binary"
- stringFormatCreditCard = "creditcard"
- stringFormatDuration = "duration"
- stringFormatEmail = "email"
- stringFormatHexColor = "hexcolor"
- stringFormatHostname = "hostname"
- stringFormatIPv4 = "ipv4"
- stringFormatIPv6 = "ipv6"
- stringFormatISBN = "isbn"
- stringFormatISBN10 = "isbn10"
- stringFormatISBN13 = "isbn13"
- stringFormatMAC = "mac"
- stringFormatBSONObjectID = "bsonobjectid"
- stringFormatRGBColor = "rgbcolor"
- stringFormatSSN = "ssn"
- stringFormatURI = "uri"
- stringFormatUUID = "uuid"
- stringFormatUUID3 = "uuid3"
- stringFormatUUID4 = "uuid4"
- stringFormatUUID5 = "uuid5"
-
- integerFormatInt32 = "int32"
- integerFormatInt64 = "int64"
- integerFormatUInt32 = "uint32"
- integerFormatUInt64 = "uint64"
-
- numberFormatFloat32 = "float32"
- numberFormatFloat64 = "float64"
- numberFormatFloat = "float"
- numberFormatDouble = "double"
-)
-
-// Helpers available at the package level
-var (
- pathHelp *pathHelper
- valueHelp *valueHelper
- errorHelp *errorHelper
- paramHelp *paramHelper
- responseHelp *responseHelper
-)
-
-type errorHelper struct {
- // A collection of unexported helpers for error construction
-}
-
-func (h *errorHelper) sErr(err errors.Error, recycle bool) *Result {
- // Builds a Result from standard errors.Error
- var result *Result
- if recycle {
- result = pools.poolOfResults.BorrowResult()
- } else {
- result = new(Result)
- }
- result.Errors = []error{err}
-
- return result
-}
-
-func (h *errorHelper) addPointerError(res *Result, err error, ref string, fromPath string) *Result {
- // Provides more context on error messages
- // reported by the jsoinpointer package by altering the passed Result
- if err != nil {
- res.AddErrors(cannotResolveRefMsg(fromPath, ref, err))
- }
- return res
-}
-
-type pathHelper struct {
- // A collection of unexported helpers for path validation
-}
-
-func (h *pathHelper) stripParametersInPath(path string) string {
- // Returns a path stripped from all path parameters, with multiple or trailing slashes removed.
- //
- // Stripping is performed on a slash-separated basis, e.g '/a{/b}' remains a{/b} and not /a.
- // - Trailing "/" make a difference, e.g. /a/ !~ /a (ex: canary/bitbucket.org/swagger.json)
- // - presence or absence of a parameter makes a difference, e.g. /a/{log} !~ /a/ (ex: canary/kubernetes/swagger.json)
-
- // Regexp to extract parameters from path, with surrounding {}.
- // NOTE: important non-greedy modifier
- rexParsePathParam := mustCompileRegexp(`{[^{}]+?}`)
- strippedSegments := []string{}
-
- for _, segment := range strings.Split(path, "/") {
- strippedSegments = append(strippedSegments, rexParsePathParam.ReplaceAllString(segment, "X"))
- }
- return strings.Join(strippedSegments, "/")
-}
-
-func (h *pathHelper) extractPathParams(path string) (params []string) {
- // Extracts all params from a path, with surrounding "{}"
- rexParsePathParam := mustCompileRegexp(`{[^{}]+?}`)
-
- for _, segment := range strings.Split(path, "/") {
- for _, v := range rexParsePathParam.FindAllStringSubmatch(segment, -1) {
- params = append(params, v...)
- }
- }
- return
-}
-
-type valueHelper struct {
- // A collection of unexported helpers for value validation
-}
-
-func (h *valueHelper) asInt64(val interface{}) int64 {
- // Number conversion function for int64, without error checking
- // (implements an implicit type upgrade).
- v := reflect.ValueOf(val)
- switch v.Kind() { //nolint:exhaustive
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return v.Int()
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- return int64(v.Uint())
- case reflect.Float32, reflect.Float64:
- return int64(v.Float())
- default:
- // panic("Non numeric value in asInt64()")
- return 0
- }
-}
-
-func (h *valueHelper) asUint64(val interface{}) uint64 {
- // Number conversion function for uint64, without error checking
- // (implements an implicit type upgrade).
- v := reflect.ValueOf(val)
- switch v.Kind() { //nolint:exhaustive
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return uint64(v.Int())
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- return v.Uint()
- case reflect.Float32, reflect.Float64:
- return uint64(v.Float())
- default:
- // panic("Non numeric value in asUint64()")
- return 0
- }
-}
-
-// Same for unsigned floats
-func (h *valueHelper) asFloat64(val interface{}) float64 {
- // Number conversion function for float64, without error checking
- // (implements an implicit type upgrade).
- v := reflect.ValueOf(val)
- switch v.Kind() { //nolint:exhaustive
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return float64(v.Int())
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- return float64(v.Uint())
- case reflect.Float32, reflect.Float64:
- return v.Float()
- default:
- // panic("Non numeric value in asFloat64()")
- return 0
- }
-}
-
-type paramHelper struct {
- // A collection of unexported helpers for parameters resolution
-}
-
-func (h *paramHelper) safeExpandedParamsFor(path, method, operationID string, res *Result, s *SpecValidator) (params []spec.Parameter) {
- operation, ok := s.expandedAnalyzer().OperationFor(method, path)
- if ok {
- // expand parameters first if necessary
- resolvedParams := []spec.Parameter{}
- for _, ppr := range operation.Parameters {
- resolvedParam, red := h.resolveParam(path, method, operationID, &ppr, s) //#nosec
- res.Merge(red)
- if resolvedParam != nil {
- resolvedParams = append(resolvedParams, *resolvedParam)
- }
- }
- // remove params with invalid expansion from Slice
- operation.Parameters = resolvedParams
-
- for _, ppr := range s.expandedAnalyzer().SafeParamsFor(method, path,
- func(_ spec.Parameter, err error) bool {
- // since params have already been expanded, there are few causes for error
- res.AddErrors(someParametersBrokenMsg(path, method, operationID))
- // original error from analyzer
- res.AddErrors(err)
- return true
- }) {
- params = append(params, ppr)
- }
- }
- return
-}
-
-func (h *paramHelper) resolveParam(path, method, operationID string, param *spec.Parameter, s *SpecValidator) (*spec.Parameter, *Result) {
- // Ensure parameter is expanded
- var err error
- res := new(Result)
- isRef := param.Ref.String() != ""
- if s.spec.SpecFilePath() == "" {
- err = spec.ExpandParameterWithRoot(param, s.spec.Spec(), nil)
- } else {
- err = spec.ExpandParameter(param, s.spec.SpecFilePath())
-
- }
- if err != nil { // Safeguard
- // NOTE: we may enter here when the whole parameter is an unresolved $ref
- refPath := strings.Join([]string{"\"" + path + "\"", method}, ".")
- errorHelp.addPointerError(res, err, param.Ref.String(), refPath)
- return nil, res
- }
- res.Merge(h.checkExpandedParam(param, param.Name, param.In, operationID, isRef))
- return param, res
-}
-
-func (h *paramHelper) checkExpandedParam(pr *spec.Parameter, path, in, operation string, isRef bool) *Result {
- // Secure parameter structure after $ref resolution
- res := new(Result)
- simpleZero := spec.SimpleSchema{}
- // Try to explain why... best guess
- switch {
- case pr.In == swaggerBody && (pr.SimpleSchema != simpleZero && pr.SimpleSchema.Type != objectType):
- if isRef {
- // Most likely, a $ref with a sibling is an unwanted situation: in itself this is a warning...
- // but we detect it because of the following error:
- // schema took over Parameter for an unexplained reason
- res.AddWarnings(refShouldNotHaveSiblingsMsg(path, operation))
- }
- res.AddErrors(invalidParameterDefinitionMsg(path, in, operation))
- case pr.In != swaggerBody && pr.Schema != nil:
- if isRef {
- res.AddWarnings(refShouldNotHaveSiblingsMsg(path, operation))
- }
- res.AddErrors(invalidParameterDefinitionAsSchemaMsg(path, in, operation))
- case (pr.In == swaggerBody && pr.Schema == nil) || (pr.In != swaggerBody && pr.SimpleSchema == simpleZero):
- // Other unexpected mishaps
- res.AddErrors(invalidParameterDefinitionMsg(path, in, operation))
- }
- return res
-}
-
-type responseHelper struct {
- // A collection of unexported helpers for response resolution
-}
-
-func (r *responseHelper) expandResponseRef(
- response *spec.Response,
- path string, s *SpecValidator) (*spec.Response, *Result) {
- // Ensure response is expanded
- var err error
- res := new(Result)
- if s.spec.SpecFilePath() == "" {
- // there is no physical document to resolve $ref in response
- err = spec.ExpandResponseWithRoot(response, s.spec.Spec(), nil)
- } else {
- err = spec.ExpandResponse(response, s.spec.SpecFilePath())
- }
- if err != nil { // Safeguard
- // NOTE: we may enter here when the whole response is an unresolved $ref.
- errorHelp.addPointerError(res, err, response.Ref.String(), path)
- return nil, res
- }
-
- return response, res
-}
-
-func (r *responseHelper) responseMsgVariants(
- responseType string,
- responseCode int) (responseName, responseCodeAsStr string) {
- // Path variants for messages
- if responseType == jsonDefault {
- responseCodeAsStr = jsonDefault
- responseName = "default response"
- } else {
- responseCodeAsStr = strconv.Itoa(responseCode)
- responseName = "response " + responseCodeAsStr
- }
- return
-}
diff --git a/vendor/github.com/go-openapi/validate/object_validator.go b/vendor/github.com/go-openapi/validate/object_validator.go
deleted file mode 100644
index dff73fa98a..0000000000
--- a/vendor/github.com/go-openapi/validate/object_validator.go
+++ /dev/null
@@ -1,431 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-type objectValidator struct {
- Path string
- In string
- MaxProperties *int64
- MinProperties *int64
- Required []string
- Properties map[string]spec.Schema
- AdditionalProperties *spec.SchemaOrBool
- PatternProperties map[string]spec.Schema
- Root interface{}
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
- splitPath []string
-}
-
-func newObjectValidator(path, in string,
- maxProperties, minProperties *int64, required []string, properties spec.SchemaProperties,
- additionalProperties *spec.SchemaOrBool, patternProperties spec.SchemaProperties,
- root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *objectValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var v *objectValidator
- if opts.recycleValidators {
- v = pools.poolOfObjectValidators.BorrowValidator()
- } else {
- v = new(objectValidator)
- }
-
- v.Path = path
- v.In = in
- v.MaxProperties = maxProperties
- v.MinProperties = minProperties
- v.Required = required
- v.Properties = properties
- v.AdditionalProperties = additionalProperties
- v.PatternProperties = patternProperties
- v.Root = root
- v.KnownFormats = formats
- v.Options = opts
- v.splitPath = strings.Split(v.Path, ".")
-
- return v
-}
-
-func (o *objectValidator) SetPath(path string) {
- o.Path = path
- o.splitPath = strings.Split(path, ".")
-}
-
-func (o *objectValidator) Applies(source interface{}, kind reflect.Kind) bool {
- // TODO: this should also work for structs
- // there is a problem in the type validator where it will be unhappy about null values
- // so that requires more testing
- _, isSchema := source.(*spec.Schema)
- return isSchema && (kind == reflect.Map || kind == reflect.Struct)
-}
-
-func (o *objectValidator) isProperties() bool {
- p := o.splitPath
- return len(p) > 1 && p[len(p)-1] == jsonProperties && p[len(p)-2] != jsonProperties
-}
-
-func (o *objectValidator) isDefault() bool {
- p := o.splitPath
- return len(p) > 1 && p[len(p)-1] == jsonDefault && p[len(p)-2] != jsonDefault
-}
-
-func (o *objectValidator) isExample() bool {
- p := o.splitPath
- return len(p) > 1 && (p[len(p)-1] == swaggerExample || p[len(p)-1] == swaggerExamples) && p[len(p)-2] != swaggerExample
-}
-
-func (o *objectValidator) checkArrayMustHaveItems(res *Result, val map[string]interface{}) {
- // for swagger 2.0 schemas, there is an additional constraint to have array items defined explicitly.
- // with pure jsonschema draft 4, one may have arrays with undefined items (i.e. any type).
- if val == nil {
- return
- }
-
- t, typeFound := val[jsonType]
- if !typeFound {
- return
- }
-
- tpe, isString := t.(string)
- if !isString || tpe != arrayType {
- return
- }
-
- item, itemsKeyFound := val[jsonItems]
- if itemsKeyFound {
- return
- }
-
- res.AddErrors(errors.Required(jsonItems, o.Path, item))
-}
-
-func (o *objectValidator) checkItemsMustBeTypeArray(res *Result, val map[string]interface{}) {
- if val == nil {
- return
- }
-
- if o.isProperties() || o.isDefault() || o.isExample() {
- return
- }
-
- _, itemsKeyFound := val[jsonItems]
- if !itemsKeyFound {
- return
- }
-
- t, typeFound := val[jsonType]
- if !typeFound {
- // there is no type
- res.AddErrors(errors.Required(jsonType, o.Path, t))
- }
-
- if tpe, isString := t.(string); !isString || tpe != arrayType {
- res.AddErrors(errors.InvalidType(o.Path, o.In, arrayType, nil))
- }
-}
-
-func (o *objectValidator) precheck(res *Result, val map[string]interface{}) {
- if o.Options.EnableArrayMustHaveItemsCheck {
- o.checkArrayMustHaveItems(res, val)
- }
- if o.Options.EnableObjectArrayTypeCheck {
- o.checkItemsMustBeTypeArray(res, val)
- }
-}
-
-func (o *objectValidator) Validate(data interface{}) *Result {
- if o.Options.recycleValidators {
- defer func() {
- o.redeem()
- }()
- }
-
- var val map[string]interface{}
- if data != nil {
- var ok bool
- val, ok = data.(map[string]interface{})
- if !ok {
- return errorHelp.sErr(invalidObjectMsg(o.Path, o.In), o.Options.recycleResult)
- }
- }
- numKeys := int64(len(val))
-
- if o.MinProperties != nil && numKeys < *o.MinProperties {
- return errorHelp.sErr(errors.TooFewProperties(o.Path, o.In, *o.MinProperties), o.Options.recycleResult)
- }
- if o.MaxProperties != nil && numKeys > *o.MaxProperties {
- return errorHelp.sErr(errors.TooManyProperties(o.Path, o.In, *o.MaxProperties), o.Options.recycleResult)
- }
-
- var res *Result
- if o.Options.recycleResult {
- res = pools.poolOfResults.BorrowResult()
- } else {
- res = new(Result)
- }
-
- o.precheck(res, val)
-
- // check validity of field names
- if o.AdditionalProperties != nil && !o.AdditionalProperties.Allows {
- // Case: additionalProperties: false
- o.validateNoAdditionalProperties(val, res)
- } else {
- // Cases: empty additionalProperties (implying: true), or additionalProperties: true, or additionalProperties: { <> }
- o.validateAdditionalProperties(val, res)
- }
-
- o.validatePropertiesSchema(val, res)
-
- // Check patternProperties
- // TODO: it looks like we have done that twice in many cases
- for key, value := range val {
- _, regularProperty := o.Properties[key]
- matched, _, patterns := o.validatePatternProperty(key, value, res) // applies to regular properties as well
- if regularProperty || !matched {
- continue
- }
-
- for _, pName := range patterns {
- if v, ok := o.PatternProperties[pName]; ok {
- r := newSchemaValidator(&v, o.Root, o.Path+"."+key, o.KnownFormats, o.Options).Validate(value)
- res.mergeForField(data.(map[string]interface{}), key, r)
- }
- }
- }
-
- return res
-}
-
-func (o *objectValidator) validateNoAdditionalProperties(val map[string]interface{}, res *Result) {
- for k := range val {
- if k == "$schema" || k == "id" {
- // special properties "$schema" and "id" are ignored
- continue
- }
-
- _, regularProperty := o.Properties[k]
- if regularProperty {
- continue
- }
-
- matched := false
- for pk := range o.PatternProperties {
- re, err := compileRegexp(pk)
- if err != nil {
- continue
- }
- if matches := re.MatchString(k); matches {
- matched = true
- break
- }
- }
- if matched {
- continue
- }
-
- res.AddErrors(errors.PropertyNotAllowed(o.Path, o.In, k))
-
- // BUG(fredbi): This section should move to a part dedicated to spec validation as
- // it will conflict with regular schemas where a property "headers" is defined.
-
- //
- // Croaks a more explicit message on top of the standard one
- // on some recognized cases.
- //
- // NOTE: edge cases with invalid type assertion are simply ignored here.
- // NOTE: prefix your messages here by "IMPORTANT!" so there are not filtered
- // by higher level callers (the IMPORTANT! tag will be eventually
- // removed).
- if k != "headers" || val[k] == nil {
- continue
- }
-
- // $ref is forbidden in header
- headers, mapOk := val[k].(map[string]interface{})
- if !mapOk {
- continue
- }
-
- for headerKey, headerBody := range headers {
- if headerBody == nil {
- continue
- }
-
- headerSchema, mapOfMapOk := headerBody.(map[string]interface{})
- if !mapOfMapOk {
- continue
- }
-
- _, found := headerSchema["$ref"]
- if !found {
- continue
- }
-
- refString, stringOk := headerSchema["$ref"].(string)
- if !stringOk {
- continue
- }
-
- msg := strings.Join([]string{", one may not use $ref=\":", refString, "\""}, "")
- res.AddErrors(refNotAllowedInHeaderMsg(o.Path, headerKey, msg))
- /*
- case "$ref":
- if val[k] != nil {
- // TODO: check context of that ref: warn about siblings, check against invalid context
- }
- */
- }
- }
-}
-
-func (o *objectValidator) validateAdditionalProperties(val map[string]interface{}, res *Result) {
- for key, value := range val {
- _, regularProperty := o.Properties[key]
- if regularProperty {
- continue
- }
-
- // Validates property against "patternProperties" if applicable
- // BUG(fredbi): succeededOnce is always false
-
- // NOTE: how about regular properties which do not match patternProperties?
- matched, succeededOnce, _ := o.validatePatternProperty(key, value, res)
- if matched || succeededOnce {
- continue
- }
-
- if o.AdditionalProperties == nil || o.AdditionalProperties.Schema == nil {
- continue
- }
-
- // Cases: properties which are not regular properties and have not been matched by the PatternProperties validator
- // AdditionalProperties as Schema
- r := newSchemaValidator(o.AdditionalProperties.Schema, o.Root, o.Path+"."+key, o.KnownFormats, o.Options).Validate(value)
- res.mergeForField(val, key, r)
- }
- // Valid cases: additionalProperties: true or undefined
-}
-
-func (o *objectValidator) validatePropertiesSchema(val map[string]interface{}, res *Result) {
- createdFromDefaults := map[string]struct{}{}
-
- // Property types:
- // - regular Property
- pSchema := pools.poolOfSchemas.BorrowSchema() // recycle a spec.Schema object which lifespan extends only to the validation of properties
- defer func() {
- pools.poolOfSchemas.RedeemSchema(pSchema)
- }()
-
- for pName := range o.Properties {
- *pSchema = o.Properties[pName]
- var rName string
- if o.Path == "" {
- rName = pName
- } else {
- rName = o.Path + "." + pName
- }
-
- // Recursively validates each property against its schema
- v, ok := val[pName]
- if ok {
- r := newSchemaValidator(pSchema, o.Root, rName, o.KnownFormats, o.Options).Validate(v)
- res.mergeForField(val, pName, r)
-
- continue
- }
-
- if pSchema.Default != nil {
- // if a default value is defined, creates the property from defaults
- // NOTE: JSON schema does not enforce default values to be valid against schema. Swagger does.
- createdFromDefaults[pName] = struct{}{}
- if !o.Options.skipSchemataResult {
- res.addPropertySchemata(val, pName, pSchema) // this shallow-clones the content of the pSchema pointer
- }
- }
- }
-
- if len(o.Required) == 0 {
- return
- }
-
- // Check required properties
- for _, k := range o.Required {
- v, ok := val[k]
- if ok {
- continue
- }
- _, isCreatedFromDefaults := createdFromDefaults[k]
- if isCreatedFromDefaults {
- continue
- }
-
- res.AddErrors(errors.Required(fmt.Sprintf("%s.%s", o.Path, k), o.In, v))
- }
-}
-
-// TODO: succeededOnce is not used anywhere
-func (o *objectValidator) validatePatternProperty(key string, value interface{}, result *Result) (bool, bool, []string) {
- if len(o.PatternProperties) == 0 {
- return false, false, nil
- }
-
- matched := false
- succeededOnce := false
- patterns := make([]string, 0, len(o.PatternProperties))
-
- schema := pools.poolOfSchemas.BorrowSchema()
- defer func() {
- pools.poolOfSchemas.RedeemSchema(schema)
- }()
-
- for k := range o.PatternProperties {
- re, err := compileRegexp(k)
- if err != nil {
- continue
- }
-
- match := re.MatchString(key)
- if !match {
- continue
- }
-
- *schema = o.PatternProperties[k]
- patterns = append(patterns, k)
- matched = true
- validator := newSchemaValidator(schema, o.Root, fmt.Sprintf("%s.%s", o.Path, key), o.KnownFormats, o.Options)
-
- res := validator.Validate(value)
- result.Merge(res)
- }
-
- return matched, succeededOnce, patterns
-}
-
-func (o *objectValidator) redeem() {
- pools.poolOfObjectValidators.RedeemValidator(o)
-}
diff --git a/vendor/github.com/go-openapi/validate/options.go b/vendor/github.com/go-openapi/validate/options.go
deleted file mode 100644
index cfe9b0660f..0000000000
--- a/vendor/github.com/go-openapi/validate/options.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import "sync"
-
-// Opts specifies validation options for a SpecValidator.
-//
-// NOTE: other options might be needed, for example a go-swagger specific mode.
-type Opts struct {
- ContinueOnErrors bool // true: continue reporting errors, even if spec is invalid
-
- // StrictPathParamUniqueness enables a strict validation of paths that include
- // path parameters. When true, it will enforce that for each method, the path
- // is unique, regardless of path parameters such that GET:/petstore/{id} and
- // GET:/petstore/{pet} anre considered duplicate paths.
- //
- // Consider disabling if path parameters can include slashes such as
- // GET:/v1/{shelve} and GET:/v1/{book}, where the IDs are "shelve/*" and
- // /"shelve/*/book/*" respectively.
- StrictPathParamUniqueness bool
- SkipSchemataResult bool
-}
-
-var (
- defaultOpts = Opts{
- // default is to stop validation on errors
- ContinueOnErrors: false,
-
- // StrictPathParamUniqueness is defaulted to true. This maintains existing
- // behavior.
- StrictPathParamUniqueness: true,
- }
-
- defaultOptsMutex = &sync.Mutex{}
-)
-
-// SetContinueOnErrors sets global default behavior regarding spec validation errors reporting.
-//
-// For extended error reporting, you most likely want to set it to true.
-// For faster validation, it's better to give up early when a spec is detected as invalid: set it to false (this is the default).
-//
-// Setting this mode does NOT affect the validation status.
-//
-// NOTE: this method affects global defaults. It is not suitable for a concurrent usage.
-func SetContinueOnErrors(c bool) {
- defer defaultOptsMutex.Unlock()
- defaultOptsMutex.Lock()
- defaultOpts.ContinueOnErrors = c
-}
diff --git a/vendor/github.com/go-openapi/validate/pools.go b/vendor/github.com/go-openapi/validate/pools.go
deleted file mode 100644
index 3ddce4dcc2..0000000000
--- a/vendor/github.com/go-openapi/validate/pools.go
+++ /dev/null
@@ -1,366 +0,0 @@
-//go:build !validatedebug
-
-package validate
-
-import (
- "sync"
-
- "github.com/go-openapi/spec"
-)
-
-var pools allPools
-
-func init() {
- resetPools()
-}
-
-func resetPools() {
- // NOTE: for testing purpose, we might want to reset pools after calling Validate twice.
- // The pool is corrupted in that case: calling Put twice inserts a duplicate in the pool
- // and further calls to Get are mishandled.
-
- pools = allPools{
- poolOfSchemaValidators: schemaValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &SchemaValidator{}
-
- return s
- },
- },
- },
- poolOfObjectValidators: objectValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &objectValidator{}
-
- return s
- },
- },
- },
- poolOfSliceValidators: sliceValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &schemaSliceValidator{}
-
- return s
- },
- },
- },
- poolOfItemsValidators: itemsValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &itemsValidator{}
-
- return s
- },
- },
- },
- poolOfBasicCommonValidators: basicCommonValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &basicCommonValidator{}
-
- return s
- },
- },
- },
- poolOfHeaderValidators: headerValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &HeaderValidator{}
-
- return s
- },
- },
- },
- poolOfParamValidators: paramValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &ParamValidator{}
-
- return s
- },
- },
- },
- poolOfBasicSliceValidators: basicSliceValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &basicSliceValidator{}
-
- return s
- },
- },
- },
- poolOfNumberValidators: numberValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &numberValidator{}
-
- return s
- },
- },
- },
- poolOfStringValidators: stringValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &stringValidator{}
-
- return s
- },
- },
- },
- poolOfSchemaPropsValidators: schemaPropsValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &schemaPropsValidator{}
-
- return s
- },
- },
- },
- poolOfFormatValidators: formatValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &formatValidator{}
-
- return s
- },
- },
- },
- poolOfTypeValidators: typeValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &typeValidator{}
-
- return s
- },
- },
- },
- poolOfSchemas: schemasPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &spec.Schema{}
-
- return s
- },
- },
- },
- poolOfResults: resultsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &Result{}
-
- return s
- },
- },
- },
- }
-}
-
-type (
- allPools struct {
- // memory pools for all validator objects.
- //
- // Each pool can be borrowed from and redeemed to.
- poolOfSchemaValidators schemaValidatorsPool
- poolOfObjectValidators objectValidatorsPool
- poolOfSliceValidators sliceValidatorsPool
- poolOfItemsValidators itemsValidatorsPool
- poolOfBasicCommonValidators basicCommonValidatorsPool
- poolOfHeaderValidators headerValidatorsPool
- poolOfParamValidators paramValidatorsPool
- poolOfBasicSliceValidators basicSliceValidatorsPool
- poolOfNumberValidators numberValidatorsPool
- poolOfStringValidators stringValidatorsPool
- poolOfSchemaPropsValidators schemaPropsValidatorsPool
- poolOfFormatValidators formatValidatorsPool
- poolOfTypeValidators typeValidatorsPool
- poolOfSchemas schemasPool
- poolOfResults resultsPool
- }
-
- schemaValidatorsPool struct {
- *sync.Pool
- }
-
- objectValidatorsPool struct {
- *sync.Pool
- }
-
- sliceValidatorsPool struct {
- *sync.Pool
- }
-
- itemsValidatorsPool struct {
- *sync.Pool
- }
-
- basicCommonValidatorsPool struct {
- *sync.Pool
- }
-
- headerValidatorsPool struct {
- *sync.Pool
- }
-
- paramValidatorsPool struct {
- *sync.Pool
- }
-
- basicSliceValidatorsPool struct {
- *sync.Pool
- }
-
- numberValidatorsPool struct {
- *sync.Pool
- }
-
- stringValidatorsPool struct {
- *sync.Pool
- }
-
- schemaPropsValidatorsPool struct {
- *sync.Pool
- }
-
- formatValidatorsPool struct {
- *sync.Pool
- }
-
- typeValidatorsPool struct {
- *sync.Pool
- }
-
- schemasPool struct {
- *sync.Pool
- }
-
- resultsPool struct {
- *sync.Pool
- }
-)
-
-func (p schemaValidatorsPool) BorrowValidator() *SchemaValidator {
- return p.Get().(*SchemaValidator)
-}
-
-func (p schemaValidatorsPool) RedeemValidator(s *SchemaValidator) {
- // NOTE: s might be nil. In that case, Put is a noop.
- p.Put(s)
-}
-
-func (p objectValidatorsPool) BorrowValidator() *objectValidator {
- return p.Get().(*objectValidator)
-}
-
-func (p objectValidatorsPool) RedeemValidator(s *objectValidator) {
- p.Put(s)
-}
-
-func (p sliceValidatorsPool) BorrowValidator() *schemaSliceValidator {
- return p.Get().(*schemaSliceValidator)
-}
-
-func (p sliceValidatorsPool) RedeemValidator(s *schemaSliceValidator) {
- p.Put(s)
-}
-
-func (p itemsValidatorsPool) BorrowValidator() *itemsValidator {
- return p.Get().(*itemsValidator)
-}
-
-func (p itemsValidatorsPool) RedeemValidator(s *itemsValidator) {
- p.Put(s)
-}
-
-func (p basicCommonValidatorsPool) BorrowValidator() *basicCommonValidator {
- return p.Get().(*basicCommonValidator)
-}
-
-func (p basicCommonValidatorsPool) RedeemValidator(s *basicCommonValidator) {
- p.Put(s)
-}
-
-func (p headerValidatorsPool) BorrowValidator() *HeaderValidator {
- return p.Get().(*HeaderValidator)
-}
-
-func (p headerValidatorsPool) RedeemValidator(s *HeaderValidator) {
- p.Put(s)
-}
-
-func (p paramValidatorsPool) BorrowValidator() *ParamValidator {
- return p.Get().(*ParamValidator)
-}
-
-func (p paramValidatorsPool) RedeemValidator(s *ParamValidator) {
- p.Put(s)
-}
-
-func (p basicSliceValidatorsPool) BorrowValidator() *basicSliceValidator {
- return p.Get().(*basicSliceValidator)
-}
-
-func (p basicSliceValidatorsPool) RedeemValidator(s *basicSliceValidator) {
- p.Put(s)
-}
-
-func (p numberValidatorsPool) BorrowValidator() *numberValidator {
- return p.Get().(*numberValidator)
-}
-
-func (p numberValidatorsPool) RedeemValidator(s *numberValidator) {
- p.Put(s)
-}
-
-func (p stringValidatorsPool) BorrowValidator() *stringValidator {
- return p.Get().(*stringValidator)
-}
-
-func (p stringValidatorsPool) RedeemValidator(s *stringValidator) {
- p.Put(s)
-}
-
-func (p schemaPropsValidatorsPool) BorrowValidator() *schemaPropsValidator {
- return p.Get().(*schemaPropsValidator)
-}
-
-func (p schemaPropsValidatorsPool) RedeemValidator(s *schemaPropsValidator) {
- p.Put(s)
-}
-
-func (p formatValidatorsPool) BorrowValidator() *formatValidator {
- return p.Get().(*formatValidator)
-}
-
-func (p formatValidatorsPool) RedeemValidator(s *formatValidator) {
- p.Put(s)
-}
-
-func (p typeValidatorsPool) BorrowValidator() *typeValidator {
- return p.Get().(*typeValidator)
-}
-
-func (p typeValidatorsPool) RedeemValidator(s *typeValidator) {
- p.Put(s)
-}
-
-func (p schemasPool) BorrowSchema() *spec.Schema {
- return p.Get().(*spec.Schema)
-}
-
-func (p schemasPool) RedeemSchema(s *spec.Schema) {
- p.Put(s)
-}
-
-func (p resultsPool) BorrowResult() *Result {
- return p.Get().(*Result).cleared()
-}
-
-func (p resultsPool) RedeemResult(s *Result) {
- if s == emptyResult {
- return
- }
- p.Put(s)
-}
diff --git a/vendor/github.com/go-openapi/validate/pools_debug.go b/vendor/github.com/go-openapi/validate/pools_debug.go
deleted file mode 100644
index 12949f02a7..0000000000
--- a/vendor/github.com/go-openapi/validate/pools_debug.go
+++ /dev/null
@@ -1,1012 +0,0 @@
-//go:build validatedebug
-
-package validate
-
-import (
- "fmt"
- "runtime"
- "sync"
- "testing"
-
- "github.com/go-openapi/spec"
-)
-
-// This version of the pools is to be used for debugging and testing, with build tag "validatedebug".
-//
-// In this mode, the pools are tracked for allocation and redemption of borrowed objects, so we can
-// verify a few behaviors of the validators. The debug pools panic when an invalid usage pattern is detected.
-
-var pools allPools
-
-func init() {
- resetPools()
-}
-
-func resetPools() {
- // NOTE: for testing purpose, we might want to reset pools after calling Validate twice.
- // The pool is corrupted in that case: calling Put twice inserts a duplicate in the pool
- // and further calls to Get are mishandled.
-
- pools = allPools{
- poolOfSchemaValidators: schemaValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &SchemaValidator{}
-
- return s
- },
- },
- debugMap: make(map[*SchemaValidator]status),
- allocMap: make(map[*SchemaValidator]string),
- redeemMap: make(map[*SchemaValidator]string),
- },
- poolOfObjectValidators: objectValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &objectValidator{}
-
- return s
- },
- },
- debugMap: make(map[*objectValidator]status),
- allocMap: make(map[*objectValidator]string),
- redeemMap: make(map[*objectValidator]string),
- },
- poolOfSliceValidators: sliceValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &schemaSliceValidator{}
-
- return s
- },
- },
- debugMap: make(map[*schemaSliceValidator]status),
- allocMap: make(map[*schemaSliceValidator]string),
- redeemMap: make(map[*schemaSliceValidator]string),
- },
- poolOfItemsValidators: itemsValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &itemsValidator{}
-
- return s
- },
- },
- debugMap: make(map[*itemsValidator]status),
- allocMap: make(map[*itemsValidator]string),
- redeemMap: make(map[*itemsValidator]string),
- },
- poolOfBasicCommonValidators: basicCommonValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &basicCommonValidator{}
-
- return s
- },
- },
- debugMap: make(map[*basicCommonValidator]status),
- allocMap: make(map[*basicCommonValidator]string),
- redeemMap: make(map[*basicCommonValidator]string),
- },
- poolOfHeaderValidators: headerValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &HeaderValidator{}
-
- return s
- },
- },
- debugMap: make(map[*HeaderValidator]status),
- allocMap: make(map[*HeaderValidator]string),
- redeemMap: make(map[*HeaderValidator]string),
- },
- poolOfParamValidators: paramValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &ParamValidator{}
-
- return s
- },
- },
- debugMap: make(map[*ParamValidator]status),
- allocMap: make(map[*ParamValidator]string),
- redeemMap: make(map[*ParamValidator]string),
- },
- poolOfBasicSliceValidators: basicSliceValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &basicSliceValidator{}
-
- return s
- },
- },
- debugMap: make(map[*basicSliceValidator]status),
- allocMap: make(map[*basicSliceValidator]string),
- redeemMap: make(map[*basicSliceValidator]string),
- },
- poolOfNumberValidators: numberValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &numberValidator{}
-
- return s
- },
- },
- debugMap: make(map[*numberValidator]status),
- allocMap: make(map[*numberValidator]string),
- redeemMap: make(map[*numberValidator]string),
- },
- poolOfStringValidators: stringValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &stringValidator{}
-
- return s
- },
- },
- debugMap: make(map[*stringValidator]status),
- allocMap: make(map[*stringValidator]string),
- redeemMap: make(map[*stringValidator]string),
- },
- poolOfSchemaPropsValidators: schemaPropsValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &schemaPropsValidator{}
-
- return s
- },
- },
- debugMap: make(map[*schemaPropsValidator]status),
- allocMap: make(map[*schemaPropsValidator]string),
- redeemMap: make(map[*schemaPropsValidator]string),
- },
- poolOfFormatValidators: formatValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &formatValidator{}
-
- return s
- },
- },
- debugMap: make(map[*formatValidator]status),
- allocMap: make(map[*formatValidator]string),
- redeemMap: make(map[*formatValidator]string),
- },
- poolOfTypeValidators: typeValidatorsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &typeValidator{}
-
- return s
- },
- },
- debugMap: make(map[*typeValidator]status),
- allocMap: make(map[*typeValidator]string),
- redeemMap: make(map[*typeValidator]string),
- },
- poolOfSchemas: schemasPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &spec.Schema{}
-
- return s
- },
- },
- debugMap: make(map[*spec.Schema]status),
- allocMap: make(map[*spec.Schema]string),
- redeemMap: make(map[*spec.Schema]string),
- },
- poolOfResults: resultsPool{
- Pool: &sync.Pool{
- New: func() any {
- s := &Result{}
-
- return s
- },
- },
- debugMap: make(map[*Result]status),
- allocMap: make(map[*Result]string),
- redeemMap: make(map[*Result]string),
- },
- }
-}
-
-const (
- statusFresh status = iota + 1
- statusRecycled
- statusRedeemed
-)
-
-func (s status) String() string {
- switch s {
- case statusFresh:
- return "fresh"
- case statusRecycled:
- return "recycled"
- case statusRedeemed:
- return "redeemed"
- default:
- panic(fmt.Errorf("invalid status: %d", s))
- }
-}
-
-type (
- // Debug
- status uint8
-
- allPools struct {
- // memory pools for all validator objects.
- //
- // Each pool can be borrowed from and redeemed to.
- poolOfSchemaValidators schemaValidatorsPool
- poolOfObjectValidators objectValidatorsPool
- poolOfSliceValidators sliceValidatorsPool
- poolOfItemsValidators itemsValidatorsPool
- poolOfBasicCommonValidators basicCommonValidatorsPool
- poolOfHeaderValidators headerValidatorsPool
- poolOfParamValidators paramValidatorsPool
- poolOfBasicSliceValidators basicSliceValidatorsPool
- poolOfNumberValidators numberValidatorsPool
- poolOfStringValidators stringValidatorsPool
- poolOfSchemaPropsValidators schemaPropsValidatorsPool
- poolOfFormatValidators formatValidatorsPool
- poolOfTypeValidators typeValidatorsPool
- poolOfSchemas schemasPool
- poolOfResults resultsPool
- }
-
- schemaValidatorsPool struct {
- *sync.Pool
- debugMap map[*SchemaValidator]status
- allocMap map[*SchemaValidator]string
- redeemMap map[*SchemaValidator]string
- mx sync.Mutex
- }
-
- objectValidatorsPool struct {
- *sync.Pool
- debugMap map[*objectValidator]status
- allocMap map[*objectValidator]string
- redeemMap map[*objectValidator]string
- mx sync.Mutex
- }
-
- sliceValidatorsPool struct {
- *sync.Pool
- debugMap map[*schemaSliceValidator]status
- allocMap map[*schemaSliceValidator]string
- redeemMap map[*schemaSliceValidator]string
- mx sync.Mutex
- }
-
- itemsValidatorsPool struct {
- *sync.Pool
- debugMap map[*itemsValidator]status
- allocMap map[*itemsValidator]string
- redeemMap map[*itemsValidator]string
- mx sync.Mutex
- }
-
- basicCommonValidatorsPool struct {
- *sync.Pool
- debugMap map[*basicCommonValidator]status
- allocMap map[*basicCommonValidator]string
- redeemMap map[*basicCommonValidator]string
- mx sync.Mutex
- }
-
- headerValidatorsPool struct {
- *sync.Pool
- debugMap map[*HeaderValidator]status
- allocMap map[*HeaderValidator]string
- redeemMap map[*HeaderValidator]string
- mx sync.Mutex
- }
-
- paramValidatorsPool struct {
- *sync.Pool
- debugMap map[*ParamValidator]status
- allocMap map[*ParamValidator]string
- redeemMap map[*ParamValidator]string
- mx sync.Mutex
- }
-
- basicSliceValidatorsPool struct {
- *sync.Pool
- debugMap map[*basicSliceValidator]status
- allocMap map[*basicSliceValidator]string
- redeemMap map[*basicSliceValidator]string
- mx sync.Mutex
- }
-
- numberValidatorsPool struct {
- *sync.Pool
- debugMap map[*numberValidator]status
- allocMap map[*numberValidator]string
- redeemMap map[*numberValidator]string
- mx sync.Mutex
- }
-
- stringValidatorsPool struct {
- *sync.Pool
- debugMap map[*stringValidator]status
- allocMap map[*stringValidator]string
- redeemMap map[*stringValidator]string
- mx sync.Mutex
- }
-
- schemaPropsValidatorsPool struct {
- *sync.Pool
- debugMap map[*schemaPropsValidator]status
- allocMap map[*schemaPropsValidator]string
- redeemMap map[*schemaPropsValidator]string
- mx sync.Mutex
- }
-
- formatValidatorsPool struct {
- *sync.Pool
- debugMap map[*formatValidator]status
- allocMap map[*formatValidator]string
- redeemMap map[*formatValidator]string
- mx sync.Mutex
- }
-
- typeValidatorsPool struct {
- *sync.Pool
- debugMap map[*typeValidator]status
- allocMap map[*typeValidator]string
- redeemMap map[*typeValidator]string
- mx sync.Mutex
- }
-
- schemasPool struct {
- *sync.Pool
- debugMap map[*spec.Schema]status
- allocMap map[*spec.Schema]string
- redeemMap map[*spec.Schema]string
- mx sync.Mutex
- }
-
- resultsPool struct {
- *sync.Pool
- debugMap map[*Result]status
- allocMap map[*Result]string
- redeemMap map[*Result]string
- mx sync.Mutex
- }
-)
-
-func (p *schemaValidatorsPool) BorrowValidator() *SchemaValidator {
- s := p.Get().(*SchemaValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled schema should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *schemaValidatorsPool) RedeemValidator(s *SchemaValidator) {
- // NOTE: s might be nil. In that case, Put is a noop.
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed schema should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed schema should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *objectValidatorsPool) BorrowValidator() *objectValidator {
- s := p.Get().(*objectValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled object should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *objectValidatorsPool) RedeemValidator(s *objectValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed object should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed object should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *sliceValidatorsPool) BorrowValidator() *schemaSliceValidator {
- s := p.Get().(*schemaSliceValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled schemaSliceValidator should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *sliceValidatorsPool) RedeemValidator(s *schemaSliceValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed schemaSliceValidator should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed schemaSliceValidator should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *itemsValidatorsPool) BorrowValidator() *itemsValidator {
- s := p.Get().(*itemsValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled itemsValidator should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *itemsValidatorsPool) RedeemValidator(s *itemsValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed itemsValidator should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed itemsValidator should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *basicCommonValidatorsPool) BorrowValidator() *basicCommonValidator {
- s := p.Get().(*basicCommonValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled basicCommonValidator should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *basicCommonValidatorsPool) RedeemValidator(s *basicCommonValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed basicCommonValidator should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed basicCommonValidator should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *headerValidatorsPool) BorrowValidator() *HeaderValidator {
- s := p.Get().(*HeaderValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled HeaderValidator should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *headerValidatorsPool) RedeemValidator(s *HeaderValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed header should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed header should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *paramValidatorsPool) BorrowValidator() *ParamValidator {
- s := p.Get().(*ParamValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled param should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *paramValidatorsPool) RedeemValidator(s *ParamValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed param should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed param should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *basicSliceValidatorsPool) BorrowValidator() *basicSliceValidator {
- s := p.Get().(*basicSliceValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled basicSliceValidator should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *basicSliceValidatorsPool) RedeemValidator(s *basicSliceValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed basicSliceValidator should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed basicSliceValidator should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *numberValidatorsPool) BorrowValidator() *numberValidator {
- s := p.Get().(*numberValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled number should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *numberValidatorsPool) RedeemValidator(s *numberValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed number should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed number should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *stringValidatorsPool) BorrowValidator() *stringValidator {
- s := p.Get().(*stringValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled string should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *stringValidatorsPool) RedeemValidator(s *stringValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed string should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed string should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *schemaPropsValidatorsPool) BorrowValidator() *schemaPropsValidator {
- s := p.Get().(*schemaPropsValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled param should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *schemaPropsValidatorsPool) RedeemValidator(s *schemaPropsValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed schemaProps should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed schemaProps should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *formatValidatorsPool) BorrowValidator() *formatValidator {
- s := p.Get().(*formatValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled format should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *formatValidatorsPool) RedeemValidator(s *formatValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed format should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed format should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *typeValidatorsPool) BorrowValidator() *typeValidator {
- s := p.Get().(*typeValidator)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled type should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *typeValidatorsPool) RedeemValidator(s *typeValidator) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed type should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic(fmt.Errorf("redeemed type should have been allocated from a fresh or recycled pointer. Got status %s, already redeamed at: %s", x, p.redeemMap[s]))
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *schemasPool) BorrowSchema() *spec.Schema {
- s := p.Get().(*spec.Schema)
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled spec.Schema should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *schemasPool) RedeemSchema(s *spec.Schema) {
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed spec.Schema should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed spec.Schema should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *resultsPool) BorrowResult() *Result {
- s := p.Get().(*Result).cleared()
-
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- p.debugMap[s] = statusFresh
- } else {
- if x != statusRedeemed {
- panic("recycled result should have been redeemed")
- }
- p.debugMap[s] = statusRecycled
- }
- p.allocMap[s] = caller()
-
- return s
-}
-
-func (p *resultsPool) RedeemResult(s *Result) {
- if s == emptyResult {
- if len(s.Errors) > 0 || len(s.Warnings) > 0 {
- panic("empty result should not mutate")
- }
- return
- }
- p.mx.Lock()
- defer p.mx.Unlock()
- x, ok := p.debugMap[s]
- if !ok {
- panic("redeemed Result should have been allocated")
- }
- if x != statusRecycled && x != statusFresh {
- panic("redeemed Result should have been allocated from a fresh or recycled pointer")
- }
- p.debugMap[s] = statusRedeemed
- p.redeemMap[s] = caller()
- p.Put(s)
-}
-
-func (p *allPools) allIsRedeemed(t testing.TB) bool {
- outcome := true
- for k, v := range p.poolOfSchemaValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("schemaValidator should be redeemed. Allocated by: %s", p.poolOfSchemaValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfObjectValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("objectValidator should be redeemed. Allocated by: %s", p.poolOfObjectValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfSliceValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("sliceValidator should be redeemed. Allocated by: %s", p.poolOfSliceValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfItemsValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("itemsValidator should be redeemed. Allocated by: %s", p.poolOfItemsValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfBasicCommonValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("basicCommonValidator should be redeemed. Allocated by: %s", p.poolOfBasicCommonValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfHeaderValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("headerValidator should be redeemed. Allocated by: %s", p.poolOfHeaderValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfParamValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("paramValidator should be redeemed. Allocated by: %s", p.poolOfParamValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfBasicSliceValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("basicSliceValidator should be redeemed. Allocated by: %s", p.poolOfBasicSliceValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfNumberValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("numberValidator should be redeemed. Allocated by: %s", p.poolOfNumberValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfStringValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("stringValidator should be redeemed. Allocated by: %s", p.poolOfStringValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfSchemaPropsValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("schemaPropsValidator should be redeemed. Allocated by: %s", p.poolOfSchemaPropsValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfFormatValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("formatValidator should be redeemed. Allocated by: %s", p.poolOfFormatValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfTypeValidators.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("typeValidator should be redeemed. Allocated by: %s", p.poolOfTypeValidators.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfSchemas.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("schemas should be redeemed. Allocated by: %s", p.poolOfSchemas.allocMap[k])
- outcome = false
- }
- for k, v := range p.poolOfResults.debugMap {
- if v == statusRedeemed {
- continue
- }
- t.Logf("result should be redeemed. Allocated by: %s", p.poolOfResults.allocMap[k])
- outcome = false
- }
-
- return outcome
-}
-
-func caller() string {
- pc, _, _, _ := runtime.Caller(3) //nolint:dogsled
- from, line := runtime.FuncForPC(pc).FileLine(pc)
-
- return fmt.Sprintf("%s:%d", from, line)
-}
diff --git a/vendor/github.com/go-openapi/validate/result.go b/vendor/github.com/go-openapi/validate/result.go
deleted file mode 100644
index c80804a93d..0000000000
--- a/vendor/github.com/go-openapi/validate/result.go
+++ /dev/null
@@ -1,563 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- stderrors "errors"
- "reflect"
- "strings"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
-)
-
-var emptyResult = &Result{MatchCount: 1}
-
-// Result represents a validation result set, composed of
-// errors and warnings.
-//
-// It is used to keep track of all detected errors and warnings during
-// the validation of a specification.
-//
-// Matchcount is used to determine
-// which errors are relevant in the case of AnyOf, OneOf
-// schema validation. Results from the validation branch
-// with most matches get eventually selected.
-//
-// TODO: keep path of key originating the error
-type Result struct {
- Errors []error
- Warnings []error
- MatchCount int
-
- // the object data
- data interface{}
-
- // Schemata for the root object
- rootObjectSchemata schemata
- // Schemata for object fields
- fieldSchemata []fieldSchemata
- // Schemata for slice items
- itemSchemata []itemSchemata
-
- cachedFieldSchemata map[FieldKey][]*spec.Schema
- cachedItemSchemata map[ItemKey][]*spec.Schema
-
- wantsRedeemOnMerge bool
-}
-
-// FieldKey is a pair of an object and a field, usable as a key for a map.
-type FieldKey struct {
- object reflect.Value // actually a map[string]interface{}, but the latter cannot be a key
- field string
-}
-
-// ItemKey is a pair of a slice and an index, usable as a key for a map.
-type ItemKey struct {
- slice reflect.Value // actually a []interface{}, but the latter cannot be a key
- index int
-}
-
-// NewFieldKey returns a pair of an object and field usable as a key of a map.
-func NewFieldKey(obj map[string]interface{}, field string) FieldKey {
- return FieldKey{object: reflect.ValueOf(obj), field: field}
-}
-
-// Object returns the underlying object of this key.
-func (fk *FieldKey) Object() map[string]interface{} {
- return fk.object.Interface().(map[string]interface{})
-}
-
-// Field returns the underlying field of this key.
-func (fk *FieldKey) Field() string {
- return fk.field
-}
-
-// NewItemKey returns a pair of a slice and index usable as a key of a map.
-func NewItemKey(slice interface{}, i int) ItemKey {
- return ItemKey{slice: reflect.ValueOf(slice), index: i}
-}
-
-// Slice returns the underlying slice of this key.
-func (ik *ItemKey) Slice() []interface{} {
- return ik.slice.Interface().([]interface{})
-}
-
-// Index returns the underlying index of this key.
-func (ik *ItemKey) Index() int {
- return ik.index
-}
-
-type fieldSchemata struct {
- obj map[string]interface{}
- field string
- schemata schemata
-}
-
-type itemSchemata struct {
- slice reflect.Value
- index int
- schemata schemata
-}
-
-// Merge merges this result with the other one(s), preserving match counts etc.
-func (r *Result) Merge(others ...*Result) *Result {
- for _, other := range others {
- if other == nil {
- continue
- }
- r.mergeWithoutRootSchemata(other)
- r.rootObjectSchemata.Append(other.rootObjectSchemata)
- if other.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(other)
- }
- }
- return r
-}
-
-// Data returns the original data object used for validation. Mutating this renders
-// the result invalid.
-func (r *Result) Data() interface{} {
- return r.data
-}
-
-// RootObjectSchemata returns the schemata which apply to the root object.
-func (r *Result) RootObjectSchemata() []*spec.Schema {
- return r.rootObjectSchemata.Slice()
-}
-
-// FieldSchemata returns the schemata which apply to fields in objects.
-func (r *Result) FieldSchemata() map[FieldKey][]*spec.Schema {
- if r.cachedFieldSchemata != nil {
- return r.cachedFieldSchemata
- }
-
- ret := make(map[FieldKey][]*spec.Schema, len(r.fieldSchemata))
- for _, fs := range r.fieldSchemata {
- key := NewFieldKey(fs.obj, fs.field)
- if fs.schemata.one != nil {
- ret[key] = append(ret[key], fs.schemata.one)
- } else if len(fs.schemata.multiple) > 0 {
- ret[key] = append(ret[key], fs.schemata.multiple...)
- }
- }
- r.cachedFieldSchemata = ret
-
- return ret
-}
-
-// ItemSchemata returns the schemata which apply to items in slices.
-func (r *Result) ItemSchemata() map[ItemKey][]*spec.Schema {
- if r.cachedItemSchemata != nil {
- return r.cachedItemSchemata
- }
-
- ret := make(map[ItemKey][]*spec.Schema, len(r.itemSchemata))
- for _, ss := range r.itemSchemata {
- key := NewItemKey(ss.slice, ss.index)
- if ss.schemata.one != nil {
- ret[key] = append(ret[key], ss.schemata.one)
- } else if len(ss.schemata.multiple) > 0 {
- ret[key] = append(ret[key], ss.schemata.multiple...)
- }
- }
- r.cachedItemSchemata = ret
- return ret
-}
-
-func (r *Result) resetCaches() {
- r.cachedFieldSchemata = nil
- r.cachedItemSchemata = nil
-}
-
-// mergeForField merges other into r, assigning other's root schemata to the given Object and field name.
-//
-//nolint:unparam
-func (r *Result) mergeForField(obj map[string]interface{}, field string, other *Result) *Result {
- if other == nil {
- return r
- }
- r.mergeWithoutRootSchemata(other)
-
- if other.rootObjectSchemata.Len() > 0 {
- if r.fieldSchemata == nil {
- r.fieldSchemata = make([]fieldSchemata, len(obj))
- }
- // clone other schemata, as other is about to be redeemed to the pool
- r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{
- obj: obj,
- field: field,
- schemata: other.rootObjectSchemata.Clone(),
- })
- }
- if other.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(other)
- }
-
- return r
-}
-
-// mergeForSlice merges other into r, assigning other's root schemata to the given slice and index.
-//
-//nolint:unparam
-func (r *Result) mergeForSlice(slice reflect.Value, i int, other *Result) *Result {
- if other == nil {
- return r
- }
- r.mergeWithoutRootSchemata(other)
-
- if other.rootObjectSchemata.Len() > 0 {
- if r.itemSchemata == nil {
- r.itemSchemata = make([]itemSchemata, slice.Len())
- }
- // clone other schemata, as other is about to be redeemed to the pool
- r.itemSchemata = append(r.itemSchemata, itemSchemata{
- slice: slice,
- index: i,
- schemata: other.rootObjectSchemata.Clone(),
- })
- }
-
- if other.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(other)
- }
-
- return r
-}
-
-// addRootObjectSchemata adds the given schemata for the root object of the result.
-//
-// Since the slice schemata might be reused, it is shallow-cloned before saving it into the result.
-func (r *Result) addRootObjectSchemata(s *spec.Schema) {
- clone := *s
- r.rootObjectSchemata.Append(schemata{one: &clone})
-}
-
-// addPropertySchemata adds the given schemata for the object and field.
-//
-// Since the slice schemata might be reused, it is shallow-cloned before saving it into the result.
-func (r *Result) addPropertySchemata(obj map[string]interface{}, fld string, schema *spec.Schema) {
- if r.fieldSchemata == nil {
- r.fieldSchemata = make([]fieldSchemata, 0, len(obj))
- }
- clone := *schema
- r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{obj: obj, field: fld, schemata: schemata{one: &clone}})
-}
-
-/*
-// addSliceSchemata adds the given schemata for the slice and index.
-// The slice schemata might be reused. I.e. do not modify it after being added to a result.
-func (r *Result) addSliceSchemata(slice reflect.Value, i int, schema *spec.Schema) {
- if r.itemSchemata == nil {
- r.itemSchemata = make([]itemSchemata, 0, slice.Len())
- }
- r.itemSchemata = append(r.itemSchemata, itemSchemata{slice: slice, index: i, schemata: schemata{one: schema}})
-}
-*/
-
-// mergeWithoutRootSchemata merges other into r, ignoring the rootObject schemata.
-func (r *Result) mergeWithoutRootSchemata(other *Result) {
- r.resetCaches()
- r.AddErrors(other.Errors...)
- r.AddWarnings(other.Warnings...)
- r.MatchCount += other.MatchCount
-
- if other.fieldSchemata != nil {
- if r.fieldSchemata == nil {
- r.fieldSchemata = make([]fieldSchemata, 0, len(other.fieldSchemata))
- }
- for _, field := range other.fieldSchemata {
- field.schemata = field.schemata.Clone()
- r.fieldSchemata = append(r.fieldSchemata, field)
- }
- }
-
- if other.itemSchemata != nil {
- if r.itemSchemata == nil {
- r.itemSchemata = make([]itemSchemata, 0, len(other.itemSchemata))
- }
- for _, field := range other.itemSchemata {
- field.schemata = field.schemata.Clone()
- r.itemSchemata = append(r.itemSchemata, field)
- }
- }
-}
-
-// MergeAsErrors merges this result with the other one(s), preserving match counts etc.
-//
-// Warnings from input are merged as Errors in the returned merged Result.
-func (r *Result) MergeAsErrors(others ...*Result) *Result {
- for _, other := range others {
- if other != nil {
- r.resetCaches()
- r.AddErrors(other.Errors...)
- r.AddErrors(other.Warnings...)
- r.MatchCount += other.MatchCount
- if other.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(other)
- }
- }
- }
- return r
-}
-
-// MergeAsWarnings merges this result with the other one(s), preserving match counts etc.
-//
-// Errors from input are merged as Warnings in the returned merged Result.
-func (r *Result) MergeAsWarnings(others ...*Result) *Result {
- for _, other := range others {
- if other != nil {
- r.resetCaches()
- r.AddWarnings(other.Errors...)
- r.AddWarnings(other.Warnings...)
- r.MatchCount += other.MatchCount
- if other.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(other)
- }
- }
- }
- return r
-}
-
-// AddErrors adds errors to this validation result (if not already reported).
-//
-// Since the same check may be passed several times while exploring the
-// spec structure (via $ref, ...) reported messages are kept
-// unique.
-func (r *Result) AddErrors(errors ...error) {
- for _, e := range errors {
- found := false
- if e != nil {
- for _, isReported := range r.Errors {
- if e.Error() == isReported.Error() {
- found = true
- break
- }
- }
- if !found {
- r.Errors = append(r.Errors, e)
- }
- }
- }
-}
-
-// AddWarnings adds warnings to this validation result (if not already reported).
-func (r *Result) AddWarnings(warnings ...error) {
- for _, e := range warnings {
- found := false
- if e != nil {
- for _, isReported := range r.Warnings {
- if e.Error() == isReported.Error() {
- found = true
- break
- }
- }
- if !found {
- r.Warnings = append(r.Warnings, e)
- }
- }
- }
-}
-
-func (r *Result) keepRelevantErrors() *Result {
- // TODO: this one is going to disapear...
- // keepRelevantErrors strips a result from standard errors and keeps
- // the ones which are supposedly more accurate.
- //
- // The original result remains unaffected (creates a new instance of Result).
- // This method is used to work around the "matchCount" filter which would otherwise
- // strip our result from some accurate error reporting from lower level validators.
- //
- // NOTE: this implementation with a placeholder (IMPORTANT!) is neither clean nor
- // very efficient. On the other hand, relying on go-openapi/errors to manipulate
- // codes would require to change a lot here. So, for the moment, let's go with
- // placeholders.
- strippedErrors := []error{}
- for _, e := range r.Errors {
- if strings.HasPrefix(e.Error(), "IMPORTANT!") {
- strippedErrors = append(strippedErrors, stderrors.New(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
- }
- }
- strippedWarnings := []error{}
- for _, e := range r.Warnings {
- if strings.HasPrefix(e.Error(), "IMPORTANT!") {
- strippedWarnings = append(strippedWarnings, stderrors.New(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
- }
- }
- var strippedResult *Result
- if r.wantsRedeemOnMerge {
- strippedResult = pools.poolOfResults.BorrowResult()
- } else {
- strippedResult = new(Result)
- }
- strippedResult.Errors = strippedErrors
- strippedResult.Warnings = strippedWarnings
- return strippedResult
-}
-
-// IsValid returns true when this result is valid.
-//
-// Returns true on a nil *Result.
-func (r *Result) IsValid() bool {
- if r == nil {
- return true
- }
- return len(r.Errors) == 0
-}
-
-// HasErrors returns true when this result is invalid.
-//
-// Returns false on a nil *Result.
-func (r *Result) HasErrors() bool {
- if r == nil {
- return false
- }
- return !r.IsValid()
-}
-
-// HasWarnings returns true when this result contains warnings.
-//
-// Returns false on a nil *Result.
-func (r *Result) HasWarnings() bool {
- if r == nil {
- return false
- }
- return len(r.Warnings) > 0
-}
-
-// HasErrorsOrWarnings returns true when this result contains
-// either errors or warnings.
-//
-// Returns false on a nil *Result.
-func (r *Result) HasErrorsOrWarnings() bool {
- if r == nil {
- return false
- }
- return len(r.Errors) > 0 || len(r.Warnings) > 0
-}
-
-// Inc increments the match count
-func (r *Result) Inc() {
- r.MatchCount++
-}
-
-// AsError renders this result as an error interface
-//
-// TODO: reporting / pretty print with path ordered and indented
-func (r *Result) AsError() error {
- if r.IsValid() {
- return nil
- }
- return errors.CompositeValidationError(r.Errors...)
-}
-
-func (r *Result) cleared() *Result {
- // clear the Result to be reusable. Keep allocated capacity.
- r.Errors = r.Errors[:0]
- r.Warnings = r.Warnings[:0]
- r.MatchCount = 0
- r.data = nil
- r.rootObjectSchemata.one = nil
- r.rootObjectSchemata.multiple = r.rootObjectSchemata.multiple[:0]
- r.fieldSchemata = r.fieldSchemata[:0]
- r.itemSchemata = r.itemSchemata[:0]
- for k := range r.cachedFieldSchemata {
- delete(r.cachedFieldSchemata, k)
- }
- for k := range r.cachedItemSchemata {
- delete(r.cachedItemSchemata, k)
- }
- r.wantsRedeemOnMerge = true // mark this result as eligible for redeem when merged into another
-
- return r
-}
-
-// schemata is an arbitrary number of schemata. It does a distinction between zero,
-// one and many schemata to avoid slice allocations.
-type schemata struct {
- // one is set if there is exactly one schema. In that case multiple must be nil.
- one *spec.Schema
- // multiple is an arbitrary number of schemas. If it is set, one must be nil.
- multiple []*spec.Schema
-}
-
-func (s *schemata) Len() int {
- if s.one != nil {
- return 1
- }
- return len(s.multiple)
-}
-
-func (s *schemata) Slice() []*spec.Schema {
- if s == nil {
- return nil
- }
- if s.one != nil {
- return []*spec.Schema{s.one}
- }
- return s.multiple
-}
-
-// appendSchemata appends the schemata in other to s. It mutates s in-place.
-func (s *schemata) Append(other schemata) {
- if other.one == nil && len(other.multiple) == 0 {
- return
- }
- if s.one == nil && len(s.multiple) == 0 {
- *s = other
- return
- }
-
- if s.one != nil {
- if other.one != nil {
- s.multiple = []*spec.Schema{s.one, other.one}
- } else {
- t := make([]*spec.Schema, 0, 1+len(other.multiple))
- s.multiple = append(append(t, s.one), other.multiple...)
- }
- s.one = nil
- } else {
- if other.one != nil {
- s.multiple = append(s.multiple, other.one)
- } else {
- if cap(s.multiple) >= len(s.multiple)+len(other.multiple) {
- s.multiple = append(s.multiple, other.multiple...)
- } else {
- t := make([]*spec.Schema, 0, len(s.multiple)+len(other.multiple))
- s.multiple = append(append(t, s.multiple...), other.multiple...)
- }
- }
- }
-}
-
-func (s schemata) Clone() schemata {
- var clone schemata
-
- if s.one != nil {
- clone.one = new(spec.Schema)
- *clone.one = *s.one
- }
-
- if len(s.multiple) > 0 {
- clone.multiple = make([]*spec.Schema, len(s.multiple))
- for idx := 0; idx < len(s.multiple); idx++ {
- sp := new(spec.Schema)
- *sp = *s.multiple[idx]
- clone.multiple[idx] = sp
- }
- }
-
- return clone
-}
diff --git a/vendor/github.com/go-openapi/validate/rexp.go b/vendor/github.com/go-openapi/validate/rexp.go
deleted file mode 100644
index 76de03e1f4..0000000000
--- a/vendor/github.com/go-openapi/validate/rexp.go
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- re "regexp"
- "sync"
- "sync/atomic"
-)
-
-// Cache for compiled regular expressions
-var (
- cacheMutex = &sync.Mutex{}
- reDict = atomic.Value{} // map[string]*re.Regexp
-)
-
-func compileRegexp(pattern string) (*re.Regexp, error) {
- if cache, ok := reDict.Load().(map[string]*re.Regexp); ok {
- if r := cache[pattern]; r != nil {
- return r, nil
- }
- }
-
- r, err := re.Compile(pattern)
- if err != nil {
- return nil, err
- }
- cacheRegexp(r)
- return r, nil
-}
-
-func mustCompileRegexp(pattern string) *re.Regexp {
- if cache, ok := reDict.Load().(map[string]*re.Regexp); ok {
- if r := cache[pattern]; r != nil {
- return r
- }
- }
-
- r := re.MustCompile(pattern)
- cacheRegexp(r)
- return r
-}
-
-func cacheRegexp(r *re.Regexp) {
- cacheMutex.Lock()
- defer cacheMutex.Unlock()
-
- if cache, ok := reDict.Load().(map[string]*re.Regexp); !ok || cache[r.String()] == nil {
- newCache := map[string]*re.Regexp{
- r.String(): r,
- }
-
- for k, v := range cache {
- newCache[k] = v
- }
-
- reDict.Store(newCache)
- }
-}
diff --git a/vendor/github.com/go-openapi/validate/schema.go b/vendor/github.com/go-openapi/validate/schema.go
deleted file mode 100644
index db65264fd1..0000000000
--- a/vendor/github.com/go-openapi/validate/schema.go
+++ /dev/null
@@ -1,354 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "encoding/json"
- "reflect"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// SchemaValidator validates data against a JSON schema
-type SchemaValidator struct {
- Path string
- in string
- Schema *spec.Schema
- validators [8]valueValidator
- Root interface{}
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-// AgainstSchema validates the specified data against the provided schema, using a registry of supported formats.
-//
-// When no pre-parsed *spec.Schema structure is provided, it uses a JSON schema as default. See example.
-func AgainstSchema(schema *spec.Schema, data interface{}, formats strfmt.Registry, options ...Option) error {
- res := NewSchemaValidator(schema, nil, "", formats,
- append(options, WithRecycleValidators(true), withRecycleResults(true))...,
- ).Validate(data)
- defer func() {
- pools.poolOfResults.RedeemResult(res)
- }()
-
- if res.HasErrors() {
- return errors.CompositeValidationError(res.Errors...)
- }
-
- return nil
-}
-
-// NewSchemaValidator creates a new schema validator.
-//
-// Panics if the provided schema is invalid.
-func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, options ...Option) *SchemaValidator {
- opts := new(SchemaValidatorOptions)
- for _, o := range options {
- o(opts)
- }
-
- return newSchemaValidator(schema, rootSchema, root, formats, opts)
-}
-
-func newSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, opts *SchemaValidatorOptions) *SchemaValidator {
- if schema == nil {
- return nil
- }
-
- if rootSchema == nil {
- rootSchema = schema
- }
-
- if schema.ID != "" || schema.Ref.String() != "" || schema.Ref.IsRoot() {
- err := spec.ExpandSchema(schema, rootSchema, nil)
- if err != nil {
- msg := invalidSchemaProvidedMsg(err).Error()
- panic(msg)
- }
- }
-
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var s *SchemaValidator
- if opts.recycleValidators {
- s = pools.poolOfSchemaValidators.BorrowValidator()
- } else {
- s = new(SchemaValidator)
- }
-
- s.Path = root
- s.in = "body"
- s.Schema = schema
- s.Root = rootSchema
- s.Options = opts
- s.KnownFormats = formats
-
- s.validators = [8]valueValidator{
- s.typeValidator(),
- s.schemaPropsValidator(),
- s.stringValidator(),
- s.formatValidator(),
- s.numberValidator(),
- s.sliceValidator(),
- s.commonValidator(),
- s.objectValidator(),
- }
-
- return s
-}
-
-// SetPath sets the path for this schema valdiator
-func (s *SchemaValidator) SetPath(path string) {
- s.Path = path
-}
-
-// Applies returns true when this schema validator applies
-func (s *SchemaValidator) Applies(source interface{}, _ reflect.Kind) bool {
- _, ok := source.(*spec.Schema)
- return ok
-}
-
-// Validate validates the data against the schema
-func (s *SchemaValidator) Validate(data interface{}) *Result {
- if s == nil {
- return emptyResult
- }
-
- if s.Options.recycleValidators {
- defer func() {
- s.redeemChildren()
- s.redeem() // one-time use validator
- }()
- }
-
- var result *Result
- if s.Options.recycleResult {
- result = pools.poolOfResults.BorrowResult()
- result.data = data
- } else {
- result = &Result{data: data}
- }
-
- if s.Schema != nil && !s.Options.skipSchemataResult {
- result.addRootObjectSchemata(s.Schema)
- }
-
- if data == nil {
- // early exit with minimal validation
- result.Merge(s.validators[0].Validate(data)) // type validator
- result.Merge(s.validators[6].Validate(data)) // common validator
-
- if s.Options.recycleValidators {
- s.validators[0] = nil
- s.validators[6] = nil
- }
-
- return result
- }
-
- tpe := reflect.TypeOf(data)
- kind := tpe.Kind()
- for kind == reflect.Ptr {
- tpe = tpe.Elem()
- kind = tpe.Kind()
- }
- d := data
-
- if kind == reflect.Struct {
- // NOTE: since reflect retrieves the true nature of types
- // this means that all strfmt types passed here (e.g. strfmt.Datetime, etc..)
- // are converted here to strings, and structs are systematically converted
- // to map[string]interface{}.
- d = swag.ToDynamicJSON(data)
- }
-
- // TODO: this part should be handed over to type validator
- // Handle special case of json.Number data (number marshalled as string)
- isnumber := s.Schema.Type.Contains(numberType) || s.Schema.Type.Contains(integerType)
- if num, ok := data.(json.Number); ok && isnumber {
- if s.Schema.Type.Contains(integerType) { // avoid lossy conversion
- in, erri := num.Int64()
- if erri != nil {
- result.AddErrors(invalidTypeConversionMsg(s.Path, erri))
- result.Inc()
-
- return result
- }
- d = in
- } else {
- nf, errf := num.Float64()
- if errf != nil {
- result.AddErrors(invalidTypeConversionMsg(s.Path, errf))
- result.Inc()
-
- return result
- }
- d = nf
- }
-
- tpe = reflect.TypeOf(d)
- kind = tpe.Kind()
- }
-
- for idx, v := range s.validators {
- if !v.Applies(s.Schema, kind) {
- if s.Options.recycleValidators {
- // Validate won't be called, so relinquish this validator
- if redeemableChildren, ok := v.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := v.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- s.validators[idx] = nil // prevents further (unsafe) usage
- }
-
- continue
- }
-
- result.Merge(v.Validate(d))
- if s.Options.recycleValidators {
- s.validators[idx] = nil // prevents further (unsafe) usage
- }
- result.Inc()
- }
- result.Inc()
-
- return result
-}
-
-func (s *SchemaValidator) typeValidator() valueValidator {
- return newTypeValidator(
- s.Path,
- s.in,
- s.Schema.Type,
- s.Schema.Nullable,
- s.Schema.Format,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) commonValidator() valueValidator {
- return newBasicCommonValidator(
- s.Path,
- s.in,
- s.Schema.Default,
- s.Schema.Enum,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) sliceValidator() valueValidator {
- return newSliceValidator(
- s.Path,
- s.in,
- s.Schema.MaxItems,
- s.Schema.MinItems,
- s.Schema.UniqueItems,
- s.Schema.AdditionalItems,
- s.Schema.Items,
- s.Root,
- s.KnownFormats,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) numberValidator() valueValidator {
- return newNumberValidator(
- s.Path,
- s.in,
- s.Schema.Default,
- s.Schema.MultipleOf,
- s.Schema.Maximum,
- s.Schema.ExclusiveMaximum,
- s.Schema.Minimum,
- s.Schema.ExclusiveMinimum,
- "",
- "",
- s.Options,
- )
-}
-
-func (s *SchemaValidator) stringValidator() valueValidator {
- return newStringValidator(
- s.Path,
- s.in,
- nil,
- false,
- false,
- s.Schema.MaxLength,
- s.Schema.MinLength,
- s.Schema.Pattern,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) formatValidator() valueValidator {
- return newFormatValidator(
- s.Path,
- s.in,
- s.Schema.Format,
- s.KnownFormats,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) schemaPropsValidator() valueValidator {
- sch := s.Schema
- return newSchemaPropsValidator(
- s.Path, s.in, sch.AllOf, sch.OneOf, sch.AnyOf, sch.Not, sch.Dependencies, s.Root, s.KnownFormats,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) objectValidator() valueValidator {
- return newObjectValidator(
- s.Path,
- s.in,
- s.Schema.MaxProperties,
- s.Schema.MinProperties,
- s.Schema.Required,
- s.Schema.Properties,
- s.Schema.AdditionalProperties,
- s.Schema.PatternProperties,
- s.Root,
- s.KnownFormats,
- s.Options,
- )
-}
-
-func (s *SchemaValidator) redeem() {
- pools.poolOfSchemaValidators.RedeemValidator(s)
-}
-
-func (s *SchemaValidator) redeemChildren() {
- for i, validator := range s.validators {
- if validator == nil {
- continue
- }
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- s.validators[i] = nil // free up allocated children if not in pool
- }
-}
diff --git a/vendor/github.com/go-openapi/validate/schema_messages.go b/vendor/github.com/go-openapi/validate/schema_messages.go
deleted file mode 100644
index 786e2e3554..0000000000
--- a/vendor/github.com/go-openapi/validate/schema_messages.go
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "github.com/go-openapi/errors"
-)
-
-// Error messages related to schema validation and returned as results.
-const (
- // ArrayDoesNotAllowAdditionalItemsError when an additionalItems construct is not verified by the array values provided.
- //
- // TODO: should move to package go-openapi/errors
- ArrayDoesNotAllowAdditionalItemsError = "array doesn't allow for additional items"
-
- // HasDependencyError indicates that a dependencies construct was not verified
- HasDependencyError = "%q has a dependency on %s"
-
- // InvalidSchemaProvidedError indicates that the schema provided to validate a value cannot be properly compiled
- InvalidSchemaProvidedError = "Invalid schema provided to SchemaValidator: %v"
-
- // InvalidTypeConversionError indicates that a numerical conversion for the given type could not be carried on
- InvalidTypeConversionError = "invalid type conversion in %s: %v "
-
- // MustValidateAtLeastOneSchemaError indicates that in a AnyOf construct, none of the schema constraints specified were verified
- MustValidateAtLeastOneSchemaError = "%q must validate at least one schema (anyOf)"
-
- // MustValidateOnlyOneSchemaError indicates that in a OneOf construct, either none of the schema constraints specified were verified, or several were
- MustValidateOnlyOneSchemaError = "%q must validate one and only one schema (oneOf). %s"
-
- // MustValidateAllSchemasError indicates that in a AllOf construct, at least one of the schema constraints specified were not verified
- //
- // TODO: punctuation in message
- MustValidateAllSchemasError = "%q must validate all the schemas (allOf)%s"
-
- // MustNotValidateSchemaError indicates that in a Not construct, the schema constraint specified was verified
- MustNotValidateSchemaError = "%q must not validate the schema (not)"
-)
-
-// Warning messages related to schema validation and returned as results
-const ()
-
-func invalidSchemaProvidedMsg(err error) errors.Error {
- return errors.New(InternalErrorCode, InvalidSchemaProvidedError, err)
-}
-func invalidTypeConversionMsg(path string, err error) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidTypeConversionError, path, err)
-}
-func mustValidateOnlyOneSchemaMsg(path, additionalMsg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, MustValidateOnlyOneSchemaError, path, additionalMsg)
-}
-func mustValidateAtLeastOneSchemaMsg(path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, MustValidateAtLeastOneSchemaError, path)
-}
-func mustValidateAllSchemasMsg(path, additionalMsg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, MustValidateAllSchemasError, path, additionalMsg)
-}
-func mustNotValidatechemaMsg(path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, MustNotValidateSchemaError, path)
-}
-func hasADependencyMsg(path, depkey string) errors.Error {
- return errors.New(errors.CompositeErrorCode, HasDependencyError, path, depkey)
-}
-func arrayDoesNotAllowAdditionalItemsMsg() errors.Error {
- return errors.New(errors.CompositeErrorCode, ArrayDoesNotAllowAdditionalItemsError)
-}
diff --git a/vendor/github.com/go-openapi/validate/schema_option.go b/vendor/github.com/go-openapi/validate/schema_option.go
deleted file mode 100644
index 65eeebeaab..0000000000
--- a/vendor/github.com/go-openapi/validate/schema_option.go
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-// SchemaValidatorOptions defines optional rules for schema validation
-type SchemaValidatorOptions struct {
- EnableObjectArrayTypeCheck bool
- EnableArrayMustHaveItemsCheck bool
- recycleValidators bool
- recycleResult bool
- skipSchemataResult bool
-}
-
-// Option sets optional rules for schema validation
-type Option func(*SchemaValidatorOptions)
-
-// EnableObjectArrayTypeCheck activates the swagger rule: an items must be in type: array
-func EnableObjectArrayTypeCheck(enable bool) Option {
- return func(svo *SchemaValidatorOptions) {
- svo.EnableObjectArrayTypeCheck = enable
- }
-}
-
-// EnableArrayMustHaveItemsCheck activates the swagger rule: an array must have items defined
-func EnableArrayMustHaveItemsCheck(enable bool) Option {
- return func(svo *SchemaValidatorOptions) {
- svo.EnableArrayMustHaveItemsCheck = enable
- }
-}
-
-// SwaggerSchema activates swagger schema validation rules
-func SwaggerSchema(enable bool) Option {
- return func(svo *SchemaValidatorOptions) {
- svo.EnableObjectArrayTypeCheck = enable
- svo.EnableArrayMustHaveItemsCheck = enable
- }
-}
-
-// WithRecycleValidators saves memory allocations and makes validators
-// available for a single use of Validate() only.
-//
-// When a validator is recycled, called MUST not call the Validate() method twice.
-func WithRecycleValidators(enable bool) Option {
- return func(svo *SchemaValidatorOptions) {
- svo.recycleValidators = enable
- }
-}
-
-func withRecycleResults(enable bool) Option {
- return func(svo *SchemaValidatorOptions) {
- svo.recycleResult = enable
- }
-}
-
-// WithSkipSchemataResult skips the deep audit payload stored in validation Result
-func WithSkipSchemataResult(enable bool) Option {
- return func(svo *SchemaValidatorOptions) {
- svo.skipSchemataResult = enable
- }
-}
-
-// Options returns the current set of options
-func (svo SchemaValidatorOptions) Options() []Option {
- return []Option{
- EnableObjectArrayTypeCheck(svo.EnableObjectArrayTypeCheck),
- EnableArrayMustHaveItemsCheck(svo.EnableArrayMustHaveItemsCheck),
- WithRecycleValidators(svo.recycleValidators),
- withRecycleResults(svo.recycleResult),
- WithSkipSchemataResult(svo.skipSchemataResult),
- }
-}
diff --git a/vendor/github.com/go-openapi/validate/schema_props.go b/vendor/github.com/go-openapi/validate/schema_props.go
deleted file mode 100644
index 1ca379244d..0000000000
--- a/vendor/github.com/go-openapi/validate/schema_props.go
+++ /dev/null
@@ -1,356 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
- "reflect"
-
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-type schemaPropsValidator struct {
- Path string
- In string
- AllOf []spec.Schema
- OneOf []spec.Schema
- AnyOf []spec.Schema
- Not *spec.Schema
- Dependencies spec.Dependencies
- anyOfValidators []*SchemaValidator
- allOfValidators []*SchemaValidator
- oneOfValidators []*SchemaValidator
- notValidator *SchemaValidator
- Root interface{}
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-func (s *schemaPropsValidator) SetPath(path string) {
- s.Path = path
-}
-
-func newSchemaPropsValidator(
- path string, in string, allOf, oneOf, anyOf []spec.Schema, not *spec.Schema, deps spec.Dependencies, root interface{}, formats strfmt.Registry,
- opts *SchemaValidatorOptions) *schemaPropsValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- anyValidators := make([]*SchemaValidator, 0, len(anyOf))
- for i := range anyOf {
- anyValidators = append(anyValidators, newSchemaValidator(&anyOf[i], root, path, formats, opts))
- }
- allValidators := make([]*SchemaValidator, 0, len(allOf))
- for i := range allOf {
- allValidators = append(allValidators, newSchemaValidator(&allOf[i], root, path, formats, opts))
- }
- oneValidators := make([]*SchemaValidator, 0, len(oneOf))
- for i := range oneOf {
- oneValidators = append(oneValidators, newSchemaValidator(&oneOf[i], root, path, formats, opts))
- }
-
- var notValidator *SchemaValidator
- if not != nil {
- notValidator = newSchemaValidator(not, root, path, formats, opts)
- }
-
- var s *schemaPropsValidator
- if opts.recycleValidators {
- s = pools.poolOfSchemaPropsValidators.BorrowValidator()
- } else {
- s = new(schemaPropsValidator)
- }
-
- s.Path = path
- s.In = in
- s.AllOf = allOf
- s.OneOf = oneOf
- s.AnyOf = anyOf
- s.Not = not
- s.Dependencies = deps
- s.anyOfValidators = anyValidators
- s.allOfValidators = allValidators
- s.oneOfValidators = oneValidators
- s.notValidator = notValidator
- s.Root = root
- s.KnownFormats = formats
- s.Options = opts
-
- return s
-}
-
-func (s *schemaPropsValidator) Applies(source interface{}, _ reflect.Kind) bool {
- _, isSchema := source.(*spec.Schema)
- return isSchema
-}
-
-func (s *schemaPropsValidator) Validate(data interface{}) *Result {
- var mainResult *Result
- if s.Options.recycleResult {
- mainResult = pools.poolOfResults.BorrowResult()
- } else {
- mainResult = new(Result)
- }
-
- // Intermediary error results
-
- // IMPORTANT! messages from underlying validators
- var keepResultAnyOf, keepResultOneOf, keepResultAllOf *Result
-
- if s.Options.recycleValidators {
- defer func() {
- s.redeemChildren()
- s.redeem()
-
- // results are redeemed when merged
- }()
- }
-
- if len(s.anyOfValidators) > 0 {
- keepResultAnyOf = pools.poolOfResults.BorrowResult()
- s.validateAnyOf(data, mainResult, keepResultAnyOf)
- }
-
- if len(s.oneOfValidators) > 0 {
- keepResultOneOf = pools.poolOfResults.BorrowResult()
- s.validateOneOf(data, mainResult, keepResultOneOf)
- }
-
- if len(s.allOfValidators) > 0 {
- keepResultAllOf = pools.poolOfResults.BorrowResult()
- s.validateAllOf(data, mainResult, keepResultAllOf)
- }
-
- if s.notValidator != nil {
- s.validateNot(data, mainResult)
- }
-
- if s.Dependencies != nil && len(s.Dependencies) > 0 && reflect.TypeOf(data).Kind() == reflect.Map {
- s.validateDependencies(data, mainResult)
- }
-
- mainResult.Inc()
-
- // In the end we retain best failures for schema validation
- // plus, if any, composite errors which may explain special cases (tagged as IMPORTANT!).
- return mainResult.Merge(keepResultAllOf, keepResultOneOf, keepResultAnyOf)
-}
-
-func (s *schemaPropsValidator) validateAnyOf(data interface{}, mainResult, keepResultAnyOf *Result) {
- // Validates at least one in anyOf schemas
- var bestFailures *Result
-
- for i, anyOfSchema := range s.anyOfValidators {
- result := anyOfSchema.Validate(data)
- if s.Options.recycleValidators {
- s.anyOfValidators[i] = nil
- }
- // We keep inner IMPORTANT! errors no matter what MatchCount tells us
- keepResultAnyOf.Merge(result.keepRelevantErrors()) // merges (and redeems) a new instance of Result
-
- if result.IsValid() {
- if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(bestFailures)
- }
-
- _ = keepResultAnyOf.cleared()
- mainResult.Merge(result)
-
- return
- }
-
- // MatchCount is used to select errors from the schema with most positive checks
- if bestFailures == nil || result.MatchCount > bestFailures.MatchCount {
- if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(bestFailures)
- }
- bestFailures = result
-
- continue
- }
-
- if result.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(result) // this result is ditched
- }
- }
-
- mainResult.AddErrors(mustValidateAtLeastOneSchemaMsg(s.Path))
- mainResult.Merge(bestFailures)
-}
-
-func (s *schemaPropsValidator) validateOneOf(data interface{}, mainResult, keepResultOneOf *Result) {
- // Validates exactly one in oneOf schemas
- var (
- firstSuccess, bestFailures *Result
- validated int
- )
-
- for i, oneOfSchema := range s.oneOfValidators {
- result := oneOfSchema.Validate(data)
- if s.Options.recycleValidators {
- s.oneOfValidators[i] = nil
- }
-
- // We keep inner IMPORTANT! errors no matter what MatchCount tells us
- keepResultOneOf.Merge(result.keepRelevantErrors()) // merges (and redeems) a new instance of Result
-
- if result.IsValid() {
- validated++
- _ = keepResultOneOf.cleared()
-
- if firstSuccess == nil {
- firstSuccess = result
- } else if result.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(result) // this result is ditched
- }
-
- continue
- }
-
- // MatchCount is used to select errors from the schema with most positive checks
- if validated == 0 && (bestFailures == nil || result.MatchCount > bestFailures.MatchCount) {
- if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(bestFailures)
- }
- bestFailures = result
- } else if result.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(result) // this result is ditched
- }
- }
-
- switch validated {
- case 0:
- mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, "Found none valid"))
- mainResult.Merge(bestFailures)
- // firstSucess necessarily nil
- case 1:
- mainResult.Merge(firstSuccess)
- if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(bestFailures)
- }
- default:
- mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, fmt.Sprintf("Found %d valid alternatives", validated)))
- mainResult.Merge(bestFailures)
- if firstSuccess != nil && firstSuccess.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(firstSuccess)
- }
- }
-}
-
-func (s *schemaPropsValidator) validateAllOf(data interface{}, mainResult, keepResultAllOf *Result) {
- // Validates all of allOf schemas
- var validated int
-
- for i, allOfSchema := range s.allOfValidators {
- result := allOfSchema.Validate(data)
- if s.Options.recycleValidators {
- s.allOfValidators[i] = nil
- }
- // We keep inner IMPORTANT! errors no matter what MatchCount tells us
- keepResultAllOf.Merge(result.keepRelevantErrors())
- if result.IsValid() {
- validated++
- }
- mainResult.Merge(result)
- }
-
- switch validated {
- case 0:
- mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, ". None validated"))
- case len(s.allOfValidators):
- default:
- mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, ""))
- }
-}
-
-func (s *schemaPropsValidator) validateNot(data interface{}, mainResult *Result) {
- result := s.notValidator.Validate(data)
- if s.Options.recycleValidators {
- s.notValidator = nil
- }
- // We keep inner IMPORTANT! errors no matter what MatchCount tells us
- if result.IsValid() {
- mainResult.AddErrors(mustNotValidatechemaMsg(s.Path))
- }
- if result.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(result) // this result is ditched
- }
-}
-
-func (s *schemaPropsValidator) validateDependencies(data interface{}, mainResult *Result) {
- val := data.(map[string]interface{})
- for key := range val {
- dep, ok := s.Dependencies[key]
- if !ok {
- continue
- }
-
- if dep.Schema != nil {
- mainResult.Merge(
- newSchemaValidator(dep.Schema, s.Root, s.Path+"."+key, s.KnownFormats, s.Options).Validate(data),
- )
- continue
- }
-
- if len(dep.Property) > 0 {
- for _, depKey := range dep.Property {
- if _, ok := val[depKey]; !ok {
- mainResult.AddErrors(hasADependencyMsg(s.Path, depKey))
- }
- }
- }
- }
-}
-
-func (s *schemaPropsValidator) redeem() {
- pools.poolOfSchemaPropsValidators.RedeemValidator(s)
-}
-
-func (s *schemaPropsValidator) redeemChildren() {
- for _, v := range s.anyOfValidators {
- if v == nil {
- continue
- }
- v.redeemChildren()
- v.redeem()
- }
- s.anyOfValidators = nil
-
- for _, v := range s.allOfValidators {
- if v == nil {
- continue
- }
- v.redeemChildren()
- v.redeem()
- }
- s.allOfValidators = nil
-
- for _, v := range s.oneOfValidators {
- if v == nil {
- continue
- }
- v.redeemChildren()
- v.redeem()
- }
- s.oneOfValidators = nil
-
- if s.notValidator != nil {
- s.notValidator.redeemChildren()
- s.notValidator.redeem()
- s.notValidator = nil
- }
-}
diff --git a/vendor/github.com/go-openapi/validate/slice_validator.go b/vendor/github.com/go-openapi/validate/slice_validator.go
deleted file mode 100644
index 13bb02087d..0000000000
--- a/vendor/github.com/go-openapi/validate/slice_validator.go
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
- "reflect"
-
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-type schemaSliceValidator struct {
- Path string
- In string
- MaxItems *int64
- MinItems *int64
- UniqueItems bool
- AdditionalItems *spec.SchemaOrBool
- Items *spec.SchemaOrArray
- Root interface{}
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-func newSliceValidator(path, in string,
- maxItems, minItems *int64, uniqueItems bool,
- additionalItems *spec.SchemaOrBool, items *spec.SchemaOrArray,
- root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *schemaSliceValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var v *schemaSliceValidator
- if opts.recycleValidators {
- v = pools.poolOfSliceValidators.BorrowValidator()
- } else {
- v = new(schemaSliceValidator)
- }
-
- v.Path = path
- v.In = in
- v.MaxItems = maxItems
- v.MinItems = minItems
- v.UniqueItems = uniqueItems
- v.AdditionalItems = additionalItems
- v.Items = items
- v.Root = root
- v.KnownFormats = formats
- v.Options = opts
-
- return v
-}
-
-func (s *schemaSliceValidator) SetPath(path string) {
- s.Path = path
-}
-
-func (s *schemaSliceValidator) Applies(source interface{}, kind reflect.Kind) bool {
- _, ok := source.(*spec.Schema)
- r := ok && kind == reflect.Slice
- return r
-}
-
-func (s *schemaSliceValidator) Validate(data interface{}) *Result {
- if s.Options.recycleValidators {
- defer func() {
- s.redeem()
- }()
- }
-
- var result *Result
- if s.Options.recycleResult {
- result = pools.poolOfResults.BorrowResult()
- } else {
- result = new(Result)
- }
- if data == nil {
- return result
- }
- val := reflect.ValueOf(data)
- size := val.Len()
-
- if s.Items != nil && s.Items.Schema != nil {
- for i := 0; i < size; i++ {
- validator := newSchemaValidator(s.Items.Schema, s.Root, s.Path, s.KnownFormats, s.Options)
- validator.SetPath(fmt.Sprintf("%s.%d", s.Path, i))
- value := val.Index(i)
- result.mergeForSlice(val, i, validator.Validate(value.Interface()))
- }
- }
-
- itemsSize := 0
- if s.Items != nil && len(s.Items.Schemas) > 0 {
- itemsSize = len(s.Items.Schemas)
- for i := 0; i < itemsSize; i++ {
- if size <= i {
- break
- }
-
- validator := newSchemaValidator(&s.Items.Schemas[i], s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options)
- result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface()))
- }
- }
- if s.AdditionalItems != nil && itemsSize < size {
- if s.Items != nil && len(s.Items.Schemas) > 0 && !s.AdditionalItems.Allows {
- result.AddErrors(arrayDoesNotAllowAdditionalItemsMsg())
- }
- if s.AdditionalItems.Schema != nil {
- for i := itemsSize; i < size-itemsSize+1; i++ {
- validator := newSchemaValidator(s.AdditionalItems.Schema, s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options)
- result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface()))
- }
- }
- }
-
- if s.MinItems != nil {
- if err := MinItems(s.Path, s.In, int64(size), *s.MinItems); err != nil {
- result.AddErrors(err)
- }
- }
- if s.MaxItems != nil {
- if err := MaxItems(s.Path, s.In, int64(size), *s.MaxItems); err != nil {
- result.AddErrors(err)
- }
- }
- if s.UniqueItems {
- if err := UniqueItems(s.Path, s.In, val.Interface()); err != nil {
- result.AddErrors(err)
- }
- }
- result.Inc()
- return result
-}
-
-func (s *schemaSliceValidator) redeem() {
- pools.poolOfSliceValidators.RedeemValidator(s)
-}
diff --git a/vendor/github.com/go-openapi/validate/spec.go b/vendor/github.com/go-openapi/validate/spec.go
deleted file mode 100644
index 965452566e..0000000000
--- a/vendor/github.com/go-openapi/validate/spec.go
+++ /dev/null
@@ -1,852 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/json"
- "fmt"
- "sort"
- "strings"
-
- "github.com/go-openapi/analysis"
- "github.com/go-openapi/errors"
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/loads"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// Spec validates an OpenAPI 2.0 specification document.
-//
-// Returns an error flattening in a single standard error, all validation messages.
-//
-// - TODO: $ref should not have siblings
-// - TODO: make sure documentation reflects all checks and warnings
-// - TODO: check on discriminators
-// - TODO: explicit message on unsupported keywords (better than "forbidden property"...)
-// - TODO: full list of unresolved refs
-// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples
-// - TODO: option to determine if we validate for go-swagger or in a more general context
-// - TODO: check on required properties to support anyOf, allOf, oneOf
-//
-// NOTE: SecurityScopes are maps: no need to check uniqueness
-func Spec(doc *loads.Document, formats strfmt.Registry) error {
- errs, _ /*warns*/ := NewSpecValidator(doc.Schema(), formats).Validate(doc)
- if errs.HasErrors() {
- return errors.CompositeValidationError(errs.Errors...)
- }
- return nil
-}
-
-// SpecValidator validates a swagger 2.0 spec
-type SpecValidator struct {
- schema *spec.Schema // swagger 2.0 schema
- spec *loads.Document
- analyzer *analysis.Spec
- expanded *loads.Document
- KnownFormats strfmt.Registry
- Options Opts // validation options
- schemaOptions *SchemaValidatorOptions
-}
-
-// NewSpecValidator creates a new swagger spec validator instance
-func NewSpecValidator(schema *spec.Schema, formats strfmt.Registry) *SpecValidator {
- // schema options that apply to all called validators
- schemaOptions := new(SchemaValidatorOptions)
- for _, o := range []Option{
- SwaggerSchema(true),
- WithRecycleValidators(true),
- // withRecycleResults(true),
- } {
- o(schemaOptions)
- }
-
- return &SpecValidator{
- schema: schema,
- KnownFormats: formats,
- Options: defaultOpts,
- schemaOptions: schemaOptions,
- }
-}
-
-// Validate validates the swagger spec
-func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
- s.schemaOptions.skipSchemataResult = s.Options.SkipSchemataResult
- var sd *loads.Document
- errs, warnings := new(Result), new(Result)
-
- if v, ok := data.(*loads.Document); ok {
- sd = v
- }
- if sd == nil {
- errs.AddErrors(invalidDocumentMsg())
- return errs, warnings // no point in continuing
- }
- s.spec = sd
- s.analyzer = analysis.New(sd.Spec())
-
- // Raw spec unmarshalling errors
- var obj interface{}
- if err := json.Unmarshal(sd.Raw(), &obj); err != nil {
- // NOTE: under normal conditions, the *load.Document has been already unmarshalled
- // So this one is just a paranoid check on the behavior of the spec package
- panic(InvalidDocumentError)
- }
-
- defer func() {
- // errs holds all errors and warnings,
- // warnings only warnings
- errs.MergeAsWarnings(warnings)
- warnings.AddErrors(errs.Warnings...)
- }()
-
- // Swagger schema validator
- schv := newSchemaValidator(s.schema, nil, "", s.KnownFormats, s.schemaOptions)
- errs.Merge(schv.Validate(obj)) // error -
- // There may be a point in continuing to try and determine more accurate errors
- if !s.Options.ContinueOnErrors && errs.HasErrors() {
- return errs, warnings // no point in continuing
- }
-
- errs.Merge(s.validateReferencesValid()) // error -
- // There may be a point in continuing to try and determine more accurate errors
- if !s.Options.ContinueOnErrors && errs.HasErrors() {
- return errs, warnings // no point in continuing
- }
-
- errs.Merge(s.validateDuplicateOperationIDs())
- errs.Merge(s.validateDuplicatePropertyNames()) // error -
- errs.Merge(s.validateParameters()) // error -
- errs.Merge(s.validateItems()) // error -
-
- // Properties in required definition MUST validate their schema
- // Properties SHOULD NOT be declared as both required and readOnly (warning)
- errs.Merge(s.validateRequiredDefinitions()) // error and warning
-
- // There may be a point in continuing to try and determine more accurate errors
- if !s.Options.ContinueOnErrors && errs.HasErrors() {
- return errs, warnings // no point in continuing
- }
-
- // Values provided as default MUST validate their schema
- df := &defaultValidator{SpecValidator: s, schemaOptions: s.schemaOptions}
- errs.Merge(df.Validate())
-
- // Values provided as examples MUST validate their schema
- // Value provided as examples in a response without schema generate a warning
- // Known limitations: examples in responses for mime type not application/json are ignored (warning)
- ex := &exampleValidator{SpecValidator: s, schemaOptions: s.schemaOptions}
- errs.Merge(ex.Validate())
-
- errs.Merge(s.validateNonEmptyPathParamNames())
-
- // errs.Merge(s.validateRefNoSibling()) // warning only
- errs.Merge(s.validateReferenced()) // warning only
-
- return errs, warnings
-}
-
-func (s *SpecValidator) validateNonEmptyPathParamNames() *Result {
- res := pools.poolOfResults.BorrowResult()
- if s.spec.Spec().Paths == nil {
- // There is no Paths object: error
- res.AddErrors(noValidPathMsg())
-
- return res
- }
-
- if s.spec.Spec().Paths.Paths == nil {
- // Paths may be empty: warning
- res.AddWarnings(noValidPathMsg())
-
- return res
- }
-
- for k := range s.spec.Spec().Paths.Paths {
- if strings.Contains(k, "{}") {
- res.AddErrors(emptyPathParameterMsg(k))
- }
- }
-
- return res
-}
-
-func (s *SpecValidator) validateDuplicateOperationIDs() *Result {
- // OperationID, if specified, must be unique across the board
- var analyzer *analysis.Spec
- if s.expanded != nil {
- // $ref are valid: we can analyze operations on an expanded spec
- analyzer = analysis.New(s.expanded.Spec())
- } else {
- // fallback on possible incomplete picture because of previous errors
- analyzer = s.analyzer
- }
- res := pools.poolOfResults.BorrowResult()
- known := make(map[string]int)
- for _, v := range analyzer.OperationIDs() {
- if v != "" {
- known[v]++
- }
- }
- for k, v := range known {
- if v > 1 {
- res.AddErrors(nonUniqueOperationIDMsg(k, v))
- }
- }
- return res
-}
-
-type dupProp struct {
- Name string
- Definition string
-}
-
-func (s *SpecValidator) validateDuplicatePropertyNames() *Result {
- // definition can't declare a property that's already defined by one of its ancestors
- res := pools.poolOfResults.BorrowResult()
- for k, sch := range s.spec.Spec().Definitions {
- if len(sch.AllOf) == 0 {
- continue
- }
-
- knownanc := map[string]struct{}{
- "#/definitions/" + k: {},
- }
-
- ancs, rec := s.validateCircularAncestry(k, sch, knownanc)
- if rec != nil && (rec.HasErrors() || !rec.HasWarnings()) {
- res.Merge(rec)
- }
- if len(ancs) > 0 {
- res.AddErrors(circularAncestryDefinitionMsg(k, ancs))
- return res
- }
-
- knowns := make(map[string]struct{})
- dups, rep := s.validateSchemaPropertyNames(k, sch, knowns)
- if rep != nil && (rep.HasErrors() || rep.HasWarnings()) {
- res.Merge(rep)
- }
- if len(dups) > 0 {
- var pns []string
- for _, v := range dups {
- pns = append(pns, v.Definition+"."+v.Name)
- }
- res.AddErrors(duplicatePropertiesMsg(k, pns))
- }
-
- }
- return res
-}
-
-func (s *SpecValidator) resolveRef(ref *spec.Ref) (*spec.Schema, error) {
- if s.spec.SpecFilePath() != "" {
- return spec.ResolveRefWithBase(s.spec.Spec(), ref, &spec.ExpandOptions{RelativeBase: s.spec.SpecFilePath()})
- }
- // NOTE: it looks like with the new spec resolver, this code is now unrecheable
- return spec.ResolveRef(s.spec.Spec(), ref)
-}
-
-func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema, knowns map[string]struct{}) ([]dupProp, *Result) {
- var dups []dupProp
-
- schn := nm
- schc := &sch
- res := pools.poolOfResults.BorrowResult()
-
- for schc.Ref.String() != "" {
- // gather property names
- reso, err := s.resolveRef(&schc.Ref)
- if err != nil {
- errorHelp.addPointerError(res, err, schc.Ref.String(), nm)
- return dups, res
- }
- schc = reso
- schn = sch.Ref.String()
- }
-
- if len(schc.AllOf) > 0 {
- for _, chld := range schc.AllOf {
- dup, rep := s.validateSchemaPropertyNames(schn, chld, knowns)
- if rep != nil && (rep.HasErrors() || rep.HasWarnings()) {
- res.Merge(rep)
- }
- dups = append(dups, dup...)
- }
- return dups, res
- }
-
- for k := range schc.Properties {
- _, ok := knowns[k]
- if ok {
- dups = append(dups, dupProp{Name: k, Definition: schn})
- } else {
- knowns[k] = struct{}{}
- }
- }
-
- return dups, res
-}
-
-func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, knowns map[string]struct{}) ([]string, *Result) {
- res := pools.poolOfResults.BorrowResult()
-
- if sch.Ref.String() == "" && len(sch.AllOf) == 0 { // Safeguard. We should not be able to actually get there
- return nil, res
- }
- var ancs []string
-
- schn := nm
- schc := &sch
-
- for schc.Ref.String() != "" {
- reso, err := s.resolveRef(&schc.Ref)
- if err != nil {
- errorHelp.addPointerError(res, err, schc.Ref.String(), nm)
- return ancs, res
- }
- schc = reso
- schn = sch.Ref.String()
- }
-
- if schn != nm && schn != "" {
- if _, ok := knowns[schn]; ok {
- ancs = append(ancs, schn)
- }
- knowns[schn] = struct{}{}
-
- if len(ancs) > 0 {
- return ancs, res
- }
- }
-
- if len(schc.AllOf) > 0 {
- for _, chld := range schc.AllOf {
- if chld.Ref.String() != "" || len(chld.AllOf) > 0 {
- anc, rec := s.validateCircularAncestry(schn, chld, knowns)
- if rec != nil && (rec.HasErrors() || !rec.HasWarnings()) {
- res.Merge(rec)
- }
- ancs = append(ancs, anc...)
- if len(ancs) > 0 {
- return ancs, res
- }
- }
- }
- }
- return ancs, res
-}
-
-func (s *SpecValidator) validateItems() *Result {
- // validate parameter, items, schema and response objects for presence of item if type is array
- res := pools.poolOfResults.BorrowResult()
-
- for method, pi := range s.analyzer.Operations() {
- for path, op := range pi {
- for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
-
- if param.TypeName() == arrayType && param.ItemsTypeName() == "" {
- res.AddErrors(arrayInParamRequiresItemsMsg(param.Name, op.ID))
- continue
- }
- if param.In != swaggerBody {
- if param.Items != nil {
- items := param.Items
- for items.TypeName() == arrayType {
- if items.ItemsTypeName() == "" {
- res.AddErrors(arrayInParamRequiresItemsMsg(param.Name, op.ID))
- break
- }
- items = items.Items
- }
- }
- } else {
- // In: body
- if param.Schema != nil {
- res.Merge(s.validateSchemaItems(*param.Schema, fmt.Sprintf("body param %q", param.Name), op.ID))
- }
- }
- }
-
- var responses []spec.Response
- if op.Responses != nil {
- if op.Responses.Default != nil {
- responses = append(responses, *op.Responses.Default)
- }
- if op.Responses.StatusCodeResponses != nil {
- for _, v := range op.Responses.StatusCodeResponses {
- responses = append(responses, v)
- }
- }
- }
-
- for _, resp := range responses {
- // Response headers with array
- for hn, hv := range resp.Headers {
- if hv.TypeName() == arrayType && hv.ItemsTypeName() == "" {
- res.AddErrors(arrayInHeaderRequiresItemsMsg(hn, op.ID))
- }
- }
- if resp.Schema != nil {
- res.Merge(s.validateSchemaItems(*resp.Schema, "response body", op.ID))
- }
- }
- }
- }
- return res
-}
-
-// Verifies constraints on array type
-func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID string) *Result {
- res := pools.poolOfResults.BorrowResult()
- if !schema.Type.Contains(arrayType) {
- return res
- }
-
- if schema.Items == nil || schema.Items.Len() == 0 {
- res.AddErrors(arrayRequiresItemsMsg(prefix, opID))
- return res
- }
-
- if schema.Items.Schema != nil {
- schema = *schema.Items.Schema
- if _, err := compileRegexp(schema.Pattern); err != nil {
- res.AddErrors(invalidItemsPatternMsg(prefix, opID, schema.Pattern))
- }
-
- res.Merge(s.validateSchemaItems(schema, prefix, opID))
- }
- return res
-}
-
-func (s *SpecValidator) validatePathParamPresence(path string, fromPath, fromOperation []string) *Result {
- // Each defined operation path parameters must correspond to a named element in the API's path pattern.
- // (For example, you cannot have a path parameter named id for the following path /pets/{petId} but you must have a path parameter named petId.)
- res := pools.poolOfResults.BorrowResult()
- for _, l := range fromPath {
- var matched bool
- for _, r := range fromOperation {
- if l == "{"+r+"}" {
- matched = true
- break
- }
- }
- if !matched {
- res.AddErrors(noParameterInPathMsg(l))
- }
- }
-
- for _, p := range fromOperation {
- var matched bool
- for _, r := range fromPath {
- if "{"+p+"}" == r {
- matched = true
- break
- }
- }
- if !matched {
- res.AddErrors(pathParamNotInPathMsg(path, p))
- }
- }
-
- return res
-}
-
-func (s *SpecValidator) validateReferenced() *Result {
- var res Result
- res.MergeAsWarnings(s.validateReferencedParameters())
- res.MergeAsWarnings(s.validateReferencedResponses())
- res.MergeAsWarnings(s.validateReferencedDefinitions())
- return &res
-}
-
-func (s *SpecValidator) validateReferencedParameters() *Result {
- // Each referenceable definition should have references.
- params := s.spec.Spec().Parameters
- if len(params) == 0 {
- return nil
- }
-
- expected := make(map[string]struct{})
- for k := range params {
- expected["#/parameters/"+jsonpointer.Escape(k)] = struct{}{}
- }
- for _, k := range s.analyzer.AllParameterReferences() {
- delete(expected, k)
- }
-
- if len(expected) == 0 {
- return nil
- }
- result := pools.poolOfResults.BorrowResult()
- for k := range expected {
- result.AddWarnings(unusedParamMsg(k))
- }
- return result
-}
-
-func (s *SpecValidator) validateReferencedResponses() *Result {
- // Each referenceable definition should have references.
- responses := s.spec.Spec().Responses
- if len(responses) == 0 {
- return nil
- }
-
- expected := make(map[string]struct{})
- for k := range responses {
- expected["#/responses/"+jsonpointer.Escape(k)] = struct{}{}
- }
- for _, k := range s.analyzer.AllResponseReferences() {
- delete(expected, k)
- }
-
- if len(expected) == 0 {
- return nil
- }
- result := pools.poolOfResults.BorrowResult()
- for k := range expected {
- result.AddWarnings(unusedResponseMsg(k))
- }
- return result
-}
-
-func (s *SpecValidator) validateReferencedDefinitions() *Result {
- // Each referenceable definition must have references.
- defs := s.spec.Spec().Definitions
- if len(defs) == 0 {
- return nil
- }
-
- expected := make(map[string]struct{})
- for k := range defs {
- expected["#/definitions/"+jsonpointer.Escape(k)] = struct{}{}
- }
- for _, k := range s.analyzer.AllDefinitionReferences() {
- delete(expected, k)
- }
-
- if len(expected) == 0 {
- return nil
- }
-
- result := new(Result)
- for k := range expected {
- result.AddWarnings(unusedDefinitionMsg(k))
- }
- return result
-}
-
-func (s *SpecValidator) validateRequiredDefinitions() *Result {
- // Each property listed in the required array must be defined in the properties of the model
- res := pools.poolOfResults.BorrowResult()
-
-DEFINITIONS:
- for d, schema := range s.spec.Spec().Definitions {
- if schema.Required != nil { // Safeguard
- for _, pn := range schema.Required {
- red := s.validateRequiredProperties(pn, d, &schema) //#nosec
- res.Merge(red)
- if !red.IsValid() && !s.Options.ContinueOnErrors {
- break DEFINITIONS // there is an error, let's stop that bleeding
- }
- }
- }
- }
- return res
-}
-
-func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Schema) *Result {
- // Takes care of recursive property definitions, which may be nested in additionalProperties schemas
- res := pools.poolOfResults.BorrowResult()
- propertyMatch := false
- patternMatch := false
- additionalPropertiesMatch := false
- isReadOnly := false
-
- // Regular properties
- if _, ok := v.Properties[path]; ok {
- propertyMatch = true
- isReadOnly = v.Properties[path].ReadOnly
- }
-
- // NOTE: patternProperties are not supported in swagger. Even though, we continue validation here
- // We check all defined patterns: if one regexp is invalid, croaks an error
- for pp, pv := range v.PatternProperties {
- re, err := compileRegexp(pp)
- if err != nil {
- res.AddErrors(invalidPatternMsg(pp, in))
- } else if re.MatchString(path) {
- patternMatch = true
- if !propertyMatch {
- isReadOnly = pv.ReadOnly
- }
- }
- }
-
- if !(propertyMatch || patternMatch) {
- if v.AdditionalProperties != nil {
- if v.AdditionalProperties.Allows && v.AdditionalProperties.Schema == nil {
- additionalPropertiesMatch = true
- } else if v.AdditionalProperties.Schema != nil {
- // additionalProperties as schema are upported in swagger
- // recursively validates additionalProperties schema
- // TODO : anyOf, allOf, oneOf like in schemaPropsValidator
- red := s.validateRequiredProperties(path, in, v.AdditionalProperties.Schema)
- if red.IsValid() {
- additionalPropertiesMatch = true
- if !propertyMatch && !patternMatch {
- isReadOnly = v.AdditionalProperties.Schema.ReadOnly
- }
- }
- res.Merge(red)
- }
- }
- }
-
- if !(propertyMatch || patternMatch || additionalPropertiesMatch) {
- res.AddErrors(requiredButNotDefinedMsg(path, in))
- }
-
- if isReadOnly {
- res.AddWarnings(readOnlyAndRequiredMsg(in, path))
- }
- return res
-}
-
-func (s *SpecValidator) validateParameters() *Result {
- // - for each method, path is unique, regardless of path parameters
- // e.g. GET:/petstore/{id}, GET:/petstore/{pet}, GET:/petstore are
- // considered duplicate paths, if StrictPathParamUniqueness is enabled.
- // - each parameter should have a unique `name` and `type` combination
- // - each operation should have only 1 parameter of type body
- // - there must be at most 1 parameter in body
- // - parameters with pattern property must specify valid patterns
- // - $ref in parameters must resolve
- // - path param must be required
- res := pools.poolOfResults.BorrowResult()
- rexGarbledPathSegment := mustCompileRegexp(`.*[{}\s]+.*`)
- for method, pi := range s.expandedAnalyzer().Operations() {
- methodPaths := make(map[string]map[string]string)
- for path, op := range pi {
- if s.Options.StrictPathParamUniqueness {
- pathToAdd := pathHelp.stripParametersInPath(path)
-
- // Warn on garbled path afer param stripping
- if rexGarbledPathSegment.MatchString(pathToAdd) {
- res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd))
- }
-
- // Check uniqueness of stripped paths
- if _, found := methodPaths[method][pathToAdd]; found {
-
- // Sort names for stable, testable output
- if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 {
- res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd]))
- } else {
- res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path))
- }
- } else {
- if _, found := methodPaths[method]; !found {
- methodPaths[method] = map[string]string{}
- }
- methodPaths[method][pathToAdd] = path // Original non stripped path
-
- }
- }
-
- var bodyParams []string
- var paramNames []string
- var hasForm, hasBody bool
-
- // Check parameters names uniqueness for operation
- // TODO: should be done after param expansion
- res.Merge(s.checkUniqueParams(path, method, op))
-
- // pick the root schema from the swagger specification which describes a parameter
- origSchema, ok := s.schema.Definitions["parameter"]
- if !ok {
- panic("unexpected swagger schema: missing #/definitions/parameter")
- }
- // clone it once to avoid expanding a global schema (e.g. swagger spec)
- paramSchema, err := deepCloneSchema(origSchema)
- if err != nil {
- panic(fmt.Errorf("can't clone schema: %v", err))
- }
-
- for _, pr := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
- // An expanded parameter must validate the Parameter schema (an unexpanded $ref always passes high-level schema validation)
- schv := newSchemaValidator(¶mSchema, s.schema, fmt.Sprintf("%s.%s.parameters.%s", path, method, pr.Name), s.KnownFormats, s.schemaOptions)
- obj := swag.ToDynamicJSON(pr)
- res.Merge(schv.Validate(obj))
-
- // Validate pattern regexp for parameters with a Pattern property
- if _, err := compileRegexp(pr.Pattern); err != nil {
- res.AddErrors(invalidPatternInParamMsg(op.ID, pr.Name, pr.Pattern))
- }
-
- // There must be at most one parameter in body: list them all
- if pr.In == swaggerBody {
- bodyParams = append(bodyParams, fmt.Sprintf("%q", pr.Name))
- hasBody = true
- }
-
- if pr.In == "path" {
- paramNames = append(paramNames, pr.Name)
- // Path declared in path must have the required: true property
- if !pr.Required {
- res.AddErrors(pathParamRequiredMsg(op.ID, pr.Name))
- }
- }
-
- if pr.In == "formData" {
- hasForm = true
- }
-
- if !(pr.Type == numberType || pr.Type == integerType) &&
- (pr.Maximum != nil || pr.Minimum != nil || pr.MultipleOf != nil) {
- // A non-numeric parameter has validation keywords for numeric instances (number and integer)
- res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
- }
-
- if !(pr.Type == stringType) &&
- // A non-string parameter has validation keywords for strings
- (pr.MaxLength != nil || pr.MinLength != nil || pr.Pattern != "") {
- res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
- }
-
- if !(pr.Type == arrayType) &&
- // A non-array parameter has validation keywords for arrays
- (pr.MaxItems != nil || pr.MinItems != nil || pr.UniqueItems) {
- res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
- }
- }
-
- // In:formData and In:body are mutually exclusive
- if hasBody && hasForm {
- res.AddErrors(bothFormDataAndBodyMsg(op.ID))
- }
- // There must be at most one body param
- // Accurately report situations when more than 1 body param is declared (possibly unnamed)
- if len(bodyParams) > 1 {
- sort.Strings(bodyParams)
- res.AddErrors(multipleBodyParamMsg(op.ID, bodyParams))
- }
-
- // Check uniqueness of parameters in path
- paramsInPath := pathHelp.extractPathParams(path)
- for i, p := range paramsInPath {
- for j, q := range paramsInPath {
- if p == q && i > j {
- res.AddErrors(pathParamNotUniqueMsg(path, p, q))
- break
- }
- }
- }
-
- // Warns about possible malformed params in path
- rexGarbledParam := mustCompileRegexp(`{.*[{}\s]+.*}`)
- for _, p := range paramsInPath {
- if rexGarbledParam.MatchString(p) {
- res.AddWarnings(pathParamGarbledMsg(path, p))
- }
- }
-
- // Match params from path vs params from params section
- res.Merge(s.validatePathParamPresence(path, paramsInPath, paramNames))
- }
- }
- return res
-}
-
-func (s *SpecValidator) validateReferencesValid() *Result {
- // each reference must point to a valid object
- res := pools.poolOfResults.BorrowResult()
- for _, r := range s.analyzer.AllRefs() {
- if !r.IsValidURI(s.spec.SpecFilePath()) { // Safeguard - spec should always yield a valid URI
- res.AddErrors(invalidRefMsg(r.String()))
- }
- }
- if !res.HasErrors() {
- // NOTE: with default settings, loads.Document.Expanded()
- // stops on first error. Anyhow, the expand option to continue
- // on errors fails to report errors at all.
- exp, err := s.spec.Expanded()
- if err != nil {
- res.AddErrors(unresolvedReferencesMsg(err))
- }
- s.expanded = exp
- }
- return res
-}
-
-func (s *SpecValidator) checkUniqueParams(path, method string, op *spec.Operation) *Result {
- // Check for duplicate parameters declaration in param section.
- // Each parameter should have a unique `name` and `type` combination
- // NOTE: this could be factorized in analysis (when constructing the params map)
- // However, there are some issues with such a factorization:
- // - analysis does not seem to fully expand params
- // - param keys may be altered by x-go-name
- res := pools.poolOfResults.BorrowResult()
- pnames := make(map[string]struct{})
-
- if op.Parameters != nil { // Safeguard
- for _, ppr := range op.Parameters {
- var ok bool
- pr, red := paramHelp.resolveParam(path, method, op.ID, &ppr, s) //#nosec
- res.Merge(red)
-
- if pr != nil && pr.Name != "" { // params with empty name does no participate the check
- key := fmt.Sprintf("%s#%s", pr.In, pr.Name)
-
- if _, ok = pnames[key]; ok {
- res.AddErrors(duplicateParamNameMsg(pr.In, pr.Name, op.ID))
- }
- pnames[key] = struct{}{}
- }
- }
- }
- return res
-}
-
-// SetContinueOnErrors sets the ContinueOnErrors option for this validator.
-func (s *SpecValidator) SetContinueOnErrors(c bool) {
- s.Options.ContinueOnErrors = c
-}
-
-// expandedAnalyzer returns expanded.Analyzer when it is available.
-// otherwise just analyzer.
-func (s *SpecValidator) expandedAnalyzer() *analysis.Spec {
- if s.expanded != nil && s.expanded.Analyzer != nil {
- return s.expanded.Analyzer
- }
- return s.analyzer
-}
-
-func deepCloneSchema(src spec.Schema) (spec.Schema, error) {
- var b bytes.Buffer
- if err := gob.NewEncoder(&b).Encode(src); err != nil {
- return spec.Schema{}, err
- }
-
- var dst spec.Schema
- if err := gob.NewDecoder(&b).Decode(&dst); err != nil {
- return spec.Schema{}, err
- }
-
- return dst, nil
-}
diff --git a/vendor/github.com/go-openapi/validate/spec_messages.go b/vendor/github.com/go-openapi/validate/spec_messages.go
deleted file mode 100644
index 6d1f0f819c..0000000000
--- a/vendor/github.com/go-openapi/validate/spec_messages.go
+++ /dev/null
@@ -1,366 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "net/http"
-
- "github.com/go-openapi/errors"
-)
-
-// Error messages related to spec validation and returned as results.
-const (
- // ArrayRequiresItemsError ...
- ArrayRequiresItemsError = "%s for %q is a collection without an element type (array requires items definition)"
-
- // ArrayInParamRequiresItemsError ...
- ArrayInParamRequiresItemsError = "param %q for %q is a collection without an element type (array requires item definition)"
-
- // ArrayInHeaderRequiresItemsError ...
- ArrayInHeaderRequiresItemsError = "header %q for %q is a collection without an element type (array requires items definition)"
-
- // BothFormDataAndBodyError indicates that an operation specifies both a body and a formData parameter, which is forbidden
- BothFormDataAndBodyError = "operation %q has both formData and body parameters. Only one such In: type may be used for a given operation"
-
- // CannotResolveRefError when a $ref could not be resolved
- CannotResolveReferenceError = "could not resolve reference in %s to $ref %s: %v"
-
- // CircularAncestryDefinitionError ...
- CircularAncestryDefinitionError = "definition %q has circular ancestry: %v"
-
- // DefaultValueDoesNotValidateError results from an invalid default value provided
- DefaultValueDoesNotValidateError = "default value for %s in %s does not validate its schema"
-
- // DefaultValueItemsDoesNotValidateError results from an invalid default value provided for Items
- DefaultValueItemsDoesNotValidateError = "default value for %s.items in %s does not validate its schema"
-
- // DefaultValueHeaderDoesNotValidateError results from an invalid default value provided in header
- DefaultValueHeaderDoesNotValidateError = "in operation %q, default value in header %s for %s does not validate its schema"
-
- // DefaultValueHeaderItemsDoesNotValidateError results from an invalid default value provided in header.items
- DefaultValueHeaderItemsDoesNotValidateError = "in operation %q, default value in header.items %s for %s does not validate its schema"
-
- // DefaultValueInDoesNotValidateError ...
- DefaultValueInDoesNotValidateError = "in operation %q, default value in %s does not validate its schema"
-
- // DuplicateParamNameError ...
- DuplicateParamNameError = "duplicate parameter name %q for %q in operation %q"
-
- // DuplicatePropertiesError ...
- DuplicatePropertiesError = "definition %q contains duplicate properties: %v"
-
- // ExampleValueDoesNotValidateError results from an invalid example value provided
- ExampleValueDoesNotValidateError = "example value for %s in %s does not validate its schema"
-
- // ExampleValueItemsDoesNotValidateError results from an invalid example value provided for Items
- ExampleValueItemsDoesNotValidateError = "example value for %s.items in %s does not validate its schema"
-
- // ExampleValueHeaderDoesNotValidateError results from an invalid example value provided in header
- ExampleValueHeaderDoesNotValidateError = "in operation %q, example value in header %s for %s does not validate its schema"
-
- // ExampleValueHeaderItemsDoesNotValidateError results from an invalid example value provided in header.items
- ExampleValueHeaderItemsDoesNotValidateError = "in operation %q, example value in header.items %s for %s does not validate its schema"
-
- // ExampleValueInDoesNotValidateError ...
- ExampleValueInDoesNotValidateError = "in operation %q, example value in %s does not validate its schema"
-
- // EmptyPathParameterError means that a path parameter was found empty (e.g. "{}")
- EmptyPathParameterError = "%q contains an empty path parameter"
-
- // InvalidDocumentError states that spec validation only processes spec.Document objects
- InvalidDocumentError = "spec validator can only validate spec.Document objects"
-
- // InvalidItemsPatternError indicates an Items definition with invalid pattern
- InvalidItemsPatternError = "%s for %q has invalid items pattern: %q"
-
- // InvalidParameterDefinitionError indicates an error detected on a parameter definition
- InvalidParameterDefinitionError = "invalid definition for parameter %s in %s in operation %q"
-
- // InvalidParameterDefinitionAsSchemaError indicates an error detected on a parameter definition, which was mistaken with a schema definition.
- // Most likely, this situation is encountered whenever a $ref has been added as a sibling of the parameter definition.
- InvalidParameterDefinitionAsSchemaError = "invalid definition as Schema for parameter %s in %s in operation %q"
-
- // InvalidPatternError ...
- InvalidPatternError = "pattern %q is invalid in %s"
-
- // InvalidPatternInError indicates an invalid pattern in a schema or items definition
- InvalidPatternInError = "%s in %s has invalid pattern: %q"
-
- // InvalidPatternInHeaderError indicates a header definition with an invalid pattern
- InvalidPatternInHeaderError = "in operation %q, header %s for %s has invalid pattern %q: %v"
-
- // InvalidPatternInParamError ...
- InvalidPatternInParamError = "operation %q has invalid pattern in param %q: %q"
-
- // InvalidReferenceError indicates that a $ref property could not be resolved
- InvalidReferenceError = "invalid ref %q"
-
- // InvalidResponseDefinitionAsSchemaError indicates an error detected on a response definition, which was mistaken with a schema definition.
- // Most likely, this situation is encountered whenever a $ref has been added as a sibling of the response definition.
- InvalidResponseDefinitionAsSchemaError = "invalid definition as Schema for response %s in %s"
-
- // MultipleBodyParamError indicates that an operation specifies multiple parameter with in: body
- MultipleBodyParamError = "operation %q has more than 1 body param: %v"
-
- // NonUniqueOperationIDError indicates that the same operationId has been specified several times
- NonUniqueOperationIDError = "%q is defined %d times"
-
- // NoParameterInPathError indicates that a path was found without any parameter
- NoParameterInPathError = "path param %q has no parameter definition"
-
- // NoValidPathErrorOrWarning indicates that no single path could be validated. If Paths is empty, this message is only a warning.
- NoValidPathErrorOrWarning = "spec has no valid path defined"
-
- // NoValidResponseError indicates that no valid response description could be found for an operation
- NoValidResponseError = "operation %q has no valid response"
-
- // PathOverlapError ...
- PathOverlapError = "path %s overlaps with %s"
-
- // PathParamNotInPathError indicates that a parameter specified with in: path was not found in the path specification
- PathParamNotInPathError = "path param %q is not present in path %q"
-
- // PathParamNotUniqueError ...
- PathParamNotUniqueError = "params in path %q must be unique: %q conflicts with %q"
-
- // PathParamNotRequiredError ...
- PathParamRequiredError = "in operation %q,path param %q must be declared as required"
-
- // RefNotAllowedInHeaderError indicates a $ref was found in a header definition, which is not allowed by Swagger
- RefNotAllowedInHeaderError = "IMPORTANT!in %q: $ref are not allowed in headers. In context for header %q%s"
-
- // RequiredButNotDefinedError ...
- RequiredButNotDefinedError = "%q is present in required but not defined as property in definition %q"
-
- // SomeParametersBrokenError indicates that some parameters could not be resolved, which might result in partial checks to be carried on
- SomeParametersBrokenError = "some parameters definitions are broken in %q.%s. Cannot carry on full checks on parameters for operation %s"
-
- // UnresolvedReferencesError indicates that at least one $ref could not be resolved
- UnresolvedReferencesError = "some references could not be resolved in spec. First found: %v"
-)
-
-// Warning messages related to spec validation and returned as results
-const (
- // ExamplesWithoutSchemaWarning indicates that examples are provided for a response,but not schema to validate the example against
- ExamplesWithoutSchemaWarning = "Examples provided without schema in operation %q, %s"
-
- // ExamplesMimeNotSupportedWarning indicates that examples are provided with a mime type different than application/json, which
- // the validator dos not support yetl
- ExamplesMimeNotSupportedWarning = "No validation attempt for examples for media types other than application/json, in operation %q, %s"
-
- // PathParamGarbledWarning ...
- PathParamGarbledWarning = "in path %q, param %q contains {,} or white space. Albeit not stricly illegal, this is probably no what you want"
-
- // ParamValidationTypeMismatch indicates that parameter has validation which does not match its type
- ParamValidationTypeMismatch = "validation keywords of parameter %q in path %q don't match its type %s"
-
- // PathStrippedParamGarbledWarning ...
- PathStrippedParamGarbledWarning = "path stripped from path parameters %s contains {,} or white space. This is probably no what you want."
-
- // ReadOnlyAndRequiredWarning ...
- ReadOnlyAndRequiredWarning = "Required property %s in %q should not be marked as both required and readOnly"
-
- // RefShouldNotHaveSiblingsWarning indicates that a $ref was found with a sibling definition. This results in the $ref taking over its siblings,
- // which is most likely not wanted.
- RefShouldNotHaveSiblingsWarning = "$ref property should have no sibling in %q.%s"
-
- // RequiredHasDefaultWarning indicates that a required parameter property should not have a default
- RequiredHasDefaultWarning = "%s in %s has a default value and is required as parameter"
-
- // UnusedDefinitionWarning ...
- UnusedDefinitionWarning = "definition %q is not used anywhere"
-
- // UnusedParamWarning ...
- UnusedParamWarning = "parameter %q is not used anywhere"
-
- // UnusedResponseWarning ...
- UnusedResponseWarning = "response %q is not used anywhere"
-
- InvalidObject = "expected an object in %q.%s"
-)
-
-// Additional error codes
-const (
- // InternalErrorCode reports an internal technical error
- InternalErrorCode = http.StatusInternalServerError
- // NotFoundErrorCode indicates that a resource (e.g. a $ref) could not be found
- NotFoundErrorCode = http.StatusNotFound
-)
-
-func invalidDocumentMsg() errors.Error {
- return errors.New(InternalErrorCode, InvalidDocumentError)
-}
-func invalidRefMsg(path string) errors.Error {
- return errors.New(NotFoundErrorCode, InvalidReferenceError, path)
-}
-func unresolvedReferencesMsg(err error) errors.Error {
- return errors.New(errors.CompositeErrorCode, UnresolvedReferencesError, err)
-}
-func noValidPathMsg() errors.Error {
- return errors.New(errors.CompositeErrorCode, NoValidPathErrorOrWarning)
-}
-func emptyPathParameterMsg(path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, EmptyPathParameterError, path)
-}
-func nonUniqueOperationIDMsg(path string, i int) errors.Error {
- return errors.New(errors.CompositeErrorCode, NonUniqueOperationIDError, path, i)
-}
-func circularAncestryDefinitionMsg(path string, args interface{}) errors.Error {
- return errors.New(errors.CompositeErrorCode, CircularAncestryDefinitionError, path, args)
-}
-func duplicatePropertiesMsg(path string, args interface{}) errors.Error {
- return errors.New(errors.CompositeErrorCode, DuplicatePropertiesError, path, args)
-}
-func pathParamNotInPathMsg(path, param string) errors.Error {
- return errors.New(errors.CompositeErrorCode, PathParamNotInPathError, param, path)
-}
-func arrayRequiresItemsMsg(path, operation string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ArrayRequiresItemsError, path, operation)
-}
-func arrayInParamRequiresItemsMsg(path, operation string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ArrayInParamRequiresItemsError, path, operation)
-}
-func arrayInHeaderRequiresItemsMsg(path, operation string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ArrayInHeaderRequiresItemsError, path, operation)
-}
-func invalidItemsPatternMsg(path, operation, pattern string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidItemsPatternError, path, operation, pattern)
-}
-func invalidPatternMsg(pattern, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidPatternError, pattern, path)
-}
-func requiredButNotDefinedMsg(path, definition string) errors.Error {
- return errors.New(errors.CompositeErrorCode, RequiredButNotDefinedError, path, definition)
-}
-func pathParamGarbledMsg(path, param string) errors.Error {
- return errors.New(errors.CompositeErrorCode, PathParamGarbledWarning, path, param)
-}
-func pathStrippedParamGarbledMsg(path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, PathStrippedParamGarbledWarning, path)
-}
-func pathOverlapMsg(path, arg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, PathOverlapError, path, arg)
-}
-func invalidPatternInParamMsg(operation, param, pattern string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidPatternInParamError, operation, param, pattern)
-}
-func pathParamRequiredMsg(operation, param string) errors.Error {
- return errors.New(errors.CompositeErrorCode, PathParamRequiredError, operation, param)
-}
-func bothFormDataAndBodyMsg(operation string) errors.Error {
- return errors.New(errors.CompositeErrorCode, BothFormDataAndBodyError, operation)
-}
-func multipleBodyParamMsg(operation string, args interface{}) errors.Error {
- return errors.New(errors.CompositeErrorCode, MultipleBodyParamError, operation, args)
-}
-func pathParamNotUniqueMsg(path, param, arg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, PathParamNotUniqueError, path, param, arg)
-}
-func duplicateParamNameMsg(path, param, operation string) errors.Error {
- return errors.New(errors.CompositeErrorCode, DuplicateParamNameError, param, path, operation)
-}
-func unusedParamMsg(arg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, UnusedParamWarning, arg)
-}
-func unusedDefinitionMsg(arg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, UnusedDefinitionWarning, arg)
-}
-func unusedResponseMsg(arg string) errors.Error {
- return errors.New(errors.CompositeErrorCode, UnusedResponseWarning, arg)
-}
-func readOnlyAndRequiredMsg(path, param string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ReadOnlyAndRequiredWarning, param, path)
-}
-func noParameterInPathMsg(param string) errors.Error {
- return errors.New(errors.CompositeErrorCode, NoParameterInPathError, param)
-}
-func requiredHasDefaultMsg(param, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, RequiredHasDefaultWarning, param, path)
-}
-func defaultValueDoesNotValidateMsg(param, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, DefaultValueDoesNotValidateError, param, path)
-}
-func defaultValueItemsDoesNotValidateMsg(param, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, DefaultValueItemsDoesNotValidateError, param, path)
-}
-func noValidResponseMsg(operation string) errors.Error {
- return errors.New(errors.CompositeErrorCode, NoValidResponseError, operation)
-}
-func defaultValueHeaderDoesNotValidateMsg(operation, header, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, DefaultValueHeaderDoesNotValidateError, operation, header, path)
-}
-func defaultValueHeaderItemsDoesNotValidateMsg(operation, header, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, DefaultValueHeaderItemsDoesNotValidateError, operation, header, path)
-}
-func invalidPatternInHeaderMsg(operation, header, path, pattern string, args interface{}) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidPatternInHeaderError, operation, header, path, pattern, args)
-}
-func invalidPatternInMsg(path, in, pattern string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidPatternInError, path, in, pattern)
-}
-func defaultValueInDoesNotValidateMsg(operation, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, DefaultValueInDoesNotValidateError, operation, path)
-}
-func exampleValueDoesNotValidateMsg(param, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExampleValueDoesNotValidateError, param, path)
-}
-func exampleValueItemsDoesNotValidateMsg(param, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExampleValueItemsDoesNotValidateError, param, path)
-}
-func exampleValueHeaderDoesNotValidateMsg(operation, header, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExampleValueHeaderDoesNotValidateError, operation, header, path)
-}
-func exampleValueHeaderItemsDoesNotValidateMsg(operation, header, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExampleValueHeaderItemsDoesNotValidateError, operation, header, path)
-}
-func exampleValueInDoesNotValidateMsg(operation, path string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExampleValueInDoesNotValidateError, operation, path)
-}
-func examplesWithoutSchemaMsg(operation, response string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExamplesWithoutSchemaWarning, operation, response)
-}
-func examplesMimeNotSupportedMsg(operation, response string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ExamplesMimeNotSupportedWarning, operation, response)
-}
-func refNotAllowedInHeaderMsg(path, header, ref string) errors.Error {
- return errors.New(errors.CompositeErrorCode, RefNotAllowedInHeaderError, path, header, ref)
-}
-func cannotResolveRefMsg(path, ref string, err error) errors.Error {
- return errors.New(errors.CompositeErrorCode, CannotResolveReferenceError, path, ref, err)
-}
-func invalidParameterDefinitionMsg(path, method, operationID string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidParameterDefinitionError, path, method, operationID)
-}
-func invalidParameterDefinitionAsSchemaMsg(path, method, operationID string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidParameterDefinitionAsSchemaError, path, method, operationID)
-}
-func parameterValidationTypeMismatchMsg(param, path, typ string) errors.Error {
- return errors.New(errors.CompositeErrorCode, ParamValidationTypeMismatch, param, path, typ)
-}
-func invalidObjectMsg(path, in string) errors.Error {
- return errors.New(errors.CompositeErrorCode, InvalidObject, path, in)
-}
-
-// disabled
-//
-// func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error {
-// return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method)
-// }
-func someParametersBrokenMsg(path, method, operationID string) errors.Error {
- return errors.New(errors.CompositeErrorCode, SomeParametersBrokenError, path, method, operationID)
-}
-func refShouldNotHaveSiblingsMsg(path, operationID string) errors.Error {
- return errors.New(errors.CompositeErrorCode, RefShouldNotHaveSiblingsWarning, operationID, path)
-}
diff --git a/vendor/github.com/go-openapi/validate/type.go b/vendor/github.com/go-openapi/validate/type.go
deleted file mode 100644
index f87abb3d56..0000000000
--- a/vendor/github.com/go-openapi/validate/type.go
+++ /dev/null
@@ -1,213 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "reflect"
- "strings"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-type typeValidator struct {
- Path string
- In string
- Type spec.StringOrArray
- Nullable bool
- Format string
- Options *SchemaValidatorOptions
-}
-
-func newTypeValidator(path, in string, typ spec.StringOrArray, nullable bool, format string, opts *SchemaValidatorOptions) *typeValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var t *typeValidator
- if opts.recycleValidators {
- t = pools.poolOfTypeValidators.BorrowValidator()
- } else {
- t = new(typeValidator)
- }
-
- t.Path = path
- t.In = in
- t.Type = typ
- t.Nullable = nullable
- t.Format = format
- t.Options = opts
-
- return t
-}
-
-func (t *typeValidator) schemaInfoForType(data interface{}) (string, string) {
- // internal type to JSON type with swagger 2.0 format (with go-openapi/strfmt extensions),
- // see https://github.com/go-openapi/strfmt/blob/master/README.md
- // TODO: this switch really is some sort of reverse lookup for formats. It should be provided by strfmt.
- switch data.(type) {
- case []byte, strfmt.Base64, *strfmt.Base64:
- return stringType, stringFormatByte
- case strfmt.CreditCard, *strfmt.CreditCard:
- return stringType, stringFormatCreditCard
- case strfmt.Date, *strfmt.Date:
- return stringType, stringFormatDate
- case strfmt.DateTime, *strfmt.DateTime:
- return stringType, stringFormatDateTime
- case strfmt.Duration, *strfmt.Duration:
- return stringType, stringFormatDuration
- case swag.File, *swag.File:
- return fileType, ""
- case strfmt.Email, *strfmt.Email:
- return stringType, stringFormatEmail
- case strfmt.HexColor, *strfmt.HexColor:
- return stringType, stringFormatHexColor
- case strfmt.Hostname, *strfmt.Hostname:
- return stringType, stringFormatHostname
- case strfmt.IPv4, *strfmt.IPv4:
- return stringType, stringFormatIPv4
- case strfmt.IPv6, *strfmt.IPv6:
- return stringType, stringFormatIPv6
- case strfmt.ISBN, *strfmt.ISBN:
- return stringType, stringFormatISBN
- case strfmt.ISBN10, *strfmt.ISBN10:
- return stringType, stringFormatISBN10
- case strfmt.ISBN13, *strfmt.ISBN13:
- return stringType, stringFormatISBN13
- case strfmt.MAC, *strfmt.MAC:
- return stringType, stringFormatMAC
- case strfmt.ObjectId, *strfmt.ObjectId:
- return stringType, stringFormatBSONObjectID
- case strfmt.Password, *strfmt.Password:
- return stringType, stringFormatPassword
- case strfmt.RGBColor, *strfmt.RGBColor:
- return stringType, stringFormatRGBColor
- case strfmt.SSN, *strfmt.SSN:
- return stringType, stringFormatSSN
- case strfmt.URI, *strfmt.URI:
- return stringType, stringFormatURI
- case strfmt.UUID, *strfmt.UUID:
- return stringType, stringFormatUUID
- case strfmt.UUID3, *strfmt.UUID3:
- return stringType, stringFormatUUID3
- case strfmt.UUID4, *strfmt.UUID4:
- return stringType, stringFormatUUID4
- case strfmt.UUID5, *strfmt.UUID5:
- return stringType, stringFormatUUID5
- // TODO: missing binary (io.ReadCloser)
- // TODO: missing json.Number
- default:
- val := reflect.ValueOf(data)
- tpe := val.Type()
- switch tpe.Kind() { //nolint:exhaustive
- case reflect.Bool:
- return booleanType, ""
- case reflect.String:
- return stringType, ""
- case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint8, reflect.Uint16, reflect.Uint32:
- // NOTE: that is the spec. With go-openapi, is that not uint32 for unsigned integers?
- return integerType, integerFormatInt32
- case reflect.Int, reflect.Int64, reflect.Uint, reflect.Uint64:
- return integerType, integerFormatInt64
- case reflect.Float32:
- // NOTE: is that not numberFormatFloat?
- return numberType, numberFormatFloat32
- case reflect.Float64:
- // NOTE: is that not "double"?
- return numberType, numberFormatFloat64
- // NOTE: go arrays (reflect.Array) are not supported (fixed length)
- case reflect.Slice:
- return arrayType, ""
- case reflect.Map, reflect.Struct:
- return objectType, ""
- case reflect.Interface:
- // What to do here?
- panic("dunno what to do here")
- case reflect.Ptr:
- return t.schemaInfoForType(reflect.Indirect(val).Interface())
- }
- }
- return "", ""
-}
-
-func (t *typeValidator) SetPath(path string) {
- t.Path = path
-}
-
-func (t *typeValidator) Applies(source interface{}, _ reflect.Kind) bool {
- // typeValidator applies to Schema, Parameter and Header objects
- switch source.(type) {
- case *spec.Schema:
- case *spec.Parameter:
- case *spec.Header:
- default:
- return false
- }
-
- return (len(t.Type) > 0 || t.Format != "")
-}
-
-func (t *typeValidator) Validate(data interface{}) *Result {
- if t.Options.recycleValidators {
- defer func() {
- t.redeem()
- }()
- }
-
- if data == nil {
- // nil or zero value for the passed structure require Type: null
- if len(t.Type) > 0 && !t.Type.Contains(nullType) && !t.Nullable { // TODO: if a property is not required it also passes this
- return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), nullType), t.Options.recycleResult)
- }
-
- return emptyResult
- }
-
- // check if the type matches, should be used in every validator chain as first item
- val := reflect.Indirect(reflect.ValueOf(data))
- kind := val.Kind()
-
- // infer schema type (JSON) and format from passed data type
- schType, format := t.schemaInfoForType(data)
-
- // check numerical types
- // TODO: check unsigned ints
- // TODO: check json.Number (see schema.go)
- isLowerInt := t.Format == integerFormatInt64 && format == integerFormatInt32
- isLowerFloat := t.Format == numberFormatFloat64 && format == numberFormatFloat32
- isFloatInt := schType == numberType && swag.IsFloat64AJSONInteger(val.Float()) && t.Type.Contains(integerType)
- isIntFloat := schType == integerType && t.Type.Contains(numberType)
-
- if kind != reflect.String && kind != reflect.Slice && t.Format != "" && !(t.Type.Contains(schType) || format == t.Format || isFloatInt || isIntFloat || isLowerInt || isLowerFloat) {
- // TODO: test case
- return errorHelp.sErr(errors.InvalidType(t.Path, t.In, t.Format, format), t.Options.recycleResult)
- }
-
- if !(t.Type.Contains(numberType) || t.Type.Contains(integerType)) && t.Format != "" && (kind == reflect.String || kind == reflect.Slice) {
- return emptyResult
- }
-
- if !(t.Type.Contains(schType) || isFloatInt || isIntFloat) {
- return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), schType), t.Options.recycleResult)
- }
-
- return emptyResult
-}
-
-func (t *typeValidator) redeem() {
- pools.poolOfTypeValidators.RedeemValidator(t)
-}
diff --git a/vendor/github.com/go-openapi/validate/update-fixtures.sh b/vendor/github.com/go-openapi/validate/update-fixtures.sh
deleted file mode 100644
index 21b06e2b09..0000000000
--- a/vendor/github.com/go-openapi/validate/update-fixtures.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-set -eu -o pipefail
-dir=$(git rev-parse --show-toplevel)
-scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
-
-function finish {
- rm -rf "$scratch"
-}
-trap finish EXIT SIGHUP SIGINT SIGTERM
-
-cd "$scratch"
-git clone https://github.com/json-schema-org/JSON-Schema-Test-Suite Suite
-cp -r Suite/tests/draft4/* "$dir/fixtures/jsonschema_suite"
-cp -a Suite/remotes "$dir/fixtures/jsonschema_suite"
diff --git a/vendor/github.com/go-openapi/validate/validator.go b/vendor/github.com/go-openapi/validate/validator.go
deleted file mode 100644
index c083aecc9d..0000000000
--- a/vendor/github.com/go-openapi/validate/validator.go
+++ /dev/null
@@ -1,1051 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "fmt"
- "reflect"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-// An EntityValidator is an interface for things that can validate entities
-type EntityValidator interface {
- Validate(interface{}) *Result
-}
-
-type valueValidator interface {
- SetPath(path string)
- Applies(interface{}, reflect.Kind) bool
- Validate(interface{}) *Result
-}
-
-type itemsValidator struct {
- items *spec.Items
- root interface{}
- path string
- in string
- validators [6]valueValidator
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-func newItemsValidator(path, in string, items *spec.Items, root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *itemsValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var iv *itemsValidator
- if opts.recycleValidators {
- iv = pools.poolOfItemsValidators.BorrowValidator()
- } else {
- iv = new(itemsValidator)
- }
-
- iv.path = path
- iv.in = in
- iv.items = items
- iv.root = root
- iv.KnownFormats = formats
- iv.Options = opts
- iv.validators = [6]valueValidator{
- iv.typeValidator(),
- iv.stringValidator(),
- iv.formatValidator(),
- iv.numberValidator(),
- iv.sliceValidator(),
- iv.commonValidator(),
- }
- return iv
-}
-
-func (i *itemsValidator) Validate(index int, data interface{}) *Result {
- if i.Options.recycleValidators {
- defer func() {
- i.redeemChildren()
- i.redeem()
- }()
- }
-
- tpe := reflect.TypeOf(data)
- kind := tpe.Kind()
- var result *Result
- if i.Options.recycleResult {
- result = pools.poolOfResults.BorrowResult()
- } else {
- result = new(Result)
- }
-
- path := fmt.Sprintf("%s.%d", i.path, index)
-
- for idx, validator := range i.validators {
- if !validator.Applies(i.root, kind) {
- if i.Options.recycleValidators {
- // Validate won't be called, so relinquish this validator
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- i.validators[idx] = nil // prevents further (unsafe) usage
- }
-
- continue
- }
-
- validator.SetPath(path)
- err := validator.Validate(data)
- if i.Options.recycleValidators {
- i.validators[idx] = nil // prevents further (unsafe) usage
- }
- if err != nil {
- result.Inc()
- if err.HasErrors() {
- result.Merge(err)
-
- break
- }
-
- result.Merge(err)
- }
- }
-
- return result
-}
-
-func (i *itemsValidator) typeValidator() valueValidator {
- return newTypeValidator(
- i.path,
- i.in,
- spec.StringOrArray([]string{i.items.Type}),
- i.items.Nullable,
- i.items.Format,
- i.Options,
- )
-}
-
-func (i *itemsValidator) commonValidator() valueValidator {
- return newBasicCommonValidator(
- "",
- i.in,
- i.items.Default,
- i.items.Enum,
- i.Options,
- )
-}
-
-func (i *itemsValidator) sliceValidator() valueValidator {
- return newBasicSliceValidator(
- "",
- i.in,
- i.items.Default,
- i.items.MaxItems,
- i.items.MinItems,
- i.items.UniqueItems,
- i.items.Items,
- i.root,
- i.KnownFormats,
- i.Options,
- )
-}
-
-func (i *itemsValidator) numberValidator() valueValidator {
- return newNumberValidator(
- "",
- i.in,
- i.items.Default,
- i.items.MultipleOf,
- i.items.Maximum,
- i.items.ExclusiveMaximum,
- i.items.Minimum,
- i.items.ExclusiveMinimum,
- i.items.Type,
- i.items.Format,
- i.Options,
- )
-}
-
-func (i *itemsValidator) stringValidator() valueValidator {
- return newStringValidator(
- "",
- i.in,
- i.items.Default,
- false, // Required
- false, // AllowEmpty
- i.items.MaxLength,
- i.items.MinLength,
- i.items.Pattern,
- i.Options,
- )
-}
-
-func (i *itemsValidator) formatValidator() valueValidator {
- return newFormatValidator(
- "",
- i.in,
- i.items.Format,
- i.KnownFormats,
- i.Options,
- )
-}
-
-func (i *itemsValidator) redeem() {
- pools.poolOfItemsValidators.RedeemValidator(i)
-}
-
-func (i *itemsValidator) redeemChildren() {
- for idx, validator := range i.validators {
- if validator == nil {
- continue
- }
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- i.validators[idx] = nil // free up allocated children if not in pool
- }
-}
-
-type basicCommonValidator struct {
- Path string
- In string
- Default interface{}
- Enum []interface{}
- Options *SchemaValidatorOptions
-}
-
-func newBasicCommonValidator(path, in string, def interface{}, enum []interface{}, opts *SchemaValidatorOptions) *basicCommonValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var b *basicCommonValidator
- if opts.recycleValidators {
- b = pools.poolOfBasicCommonValidators.BorrowValidator()
- } else {
- b = new(basicCommonValidator)
- }
-
- b.Path = path
- b.In = in
- b.Default = def
- b.Enum = enum
- b.Options = opts
-
- return b
-}
-
-func (b *basicCommonValidator) SetPath(path string) {
- b.Path = path
-}
-
-func (b *basicCommonValidator) Applies(source interface{}, _ reflect.Kind) bool {
- switch source.(type) {
- case *spec.Parameter, *spec.Schema, *spec.Header:
- return true
- default:
- return false
- }
-}
-
-func (b *basicCommonValidator) Validate(data interface{}) (res *Result) {
- if b.Options.recycleValidators {
- defer func() {
- b.redeem()
- }()
- }
-
- if len(b.Enum) == 0 {
- return nil
- }
-
- for _, enumValue := range b.Enum {
- actualType := reflect.TypeOf(enumValue)
- if actualType == nil { // Safeguard
- continue
- }
-
- expectedValue := reflect.ValueOf(data)
- if expectedValue.IsValid() &&
- expectedValue.Type().ConvertibleTo(actualType) &&
- reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) {
- return nil
- }
- }
-
- return errorHelp.sErr(errors.EnumFail(b.Path, b.In, data, b.Enum), b.Options.recycleResult)
-}
-
-func (b *basicCommonValidator) redeem() {
- pools.poolOfBasicCommonValidators.RedeemValidator(b)
-}
-
-// A HeaderValidator has very limited subset of validations to apply
-type HeaderValidator struct {
- name string
- header *spec.Header
- validators [6]valueValidator
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-// NewHeaderValidator creates a new header validator object
-func NewHeaderValidator(name string, header *spec.Header, formats strfmt.Registry, options ...Option) *HeaderValidator {
- opts := new(SchemaValidatorOptions)
- for _, o := range options {
- o(opts)
- }
-
- return newHeaderValidator(name, header, formats, opts)
-}
-
-func newHeaderValidator(name string, header *spec.Header, formats strfmt.Registry, opts *SchemaValidatorOptions) *HeaderValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var p *HeaderValidator
- if opts.recycleValidators {
- p = pools.poolOfHeaderValidators.BorrowValidator()
- } else {
- p = new(HeaderValidator)
- }
-
- p.name = name
- p.header = header
- p.KnownFormats = formats
- p.Options = opts
- p.validators = [6]valueValidator{
- newTypeValidator(
- name,
- "header",
- spec.StringOrArray([]string{header.Type}),
- header.Nullable,
- header.Format,
- p.Options,
- ),
- p.stringValidator(),
- p.formatValidator(),
- p.numberValidator(),
- p.sliceValidator(),
- p.commonValidator(),
- }
-
- return p
-}
-
-// Validate the value of the header against its schema
-func (p *HeaderValidator) Validate(data interface{}) *Result {
- if p.Options.recycleValidators {
- defer func() {
- p.redeemChildren()
- p.redeem()
- }()
- }
-
- if data == nil {
- return nil
- }
-
- var result *Result
- if p.Options.recycleResult {
- result = pools.poolOfResults.BorrowResult()
- } else {
- result = new(Result)
- }
-
- tpe := reflect.TypeOf(data)
- kind := tpe.Kind()
-
- for idx, validator := range p.validators {
- if !validator.Applies(p.header, kind) {
- if p.Options.recycleValidators {
- // Validate won't be called, so relinquish this validator
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- p.validators[idx] = nil // prevents further (unsafe) usage
- }
-
- continue
- }
-
- err := validator.Validate(data)
- if p.Options.recycleValidators {
- p.validators[idx] = nil // prevents further (unsafe) usage
- }
- if err != nil {
- if err.HasErrors() {
- result.Merge(err)
- break
- }
- result.Merge(err)
- }
- }
-
- return result
-}
-
-func (p *HeaderValidator) commonValidator() valueValidator {
- return newBasicCommonValidator(
- p.name,
- "response",
- p.header.Default,
- p.header.Enum,
- p.Options,
- )
-}
-
-func (p *HeaderValidator) sliceValidator() valueValidator {
- return newBasicSliceValidator(
- p.name,
- "response",
- p.header.Default,
- p.header.MaxItems,
- p.header.MinItems,
- p.header.UniqueItems,
- p.header.Items,
- p.header,
- p.KnownFormats,
- p.Options,
- )
-}
-
-func (p *HeaderValidator) numberValidator() valueValidator {
- return newNumberValidator(
- p.name,
- "response",
- p.header.Default,
- p.header.MultipleOf,
- p.header.Maximum,
- p.header.ExclusiveMaximum,
- p.header.Minimum,
- p.header.ExclusiveMinimum,
- p.header.Type,
- p.header.Format,
- p.Options,
- )
-}
-
-func (p *HeaderValidator) stringValidator() valueValidator {
- return newStringValidator(
- p.name,
- "response",
- p.header.Default,
- true,
- false,
- p.header.MaxLength,
- p.header.MinLength,
- p.header.Pattern,
- p.Options,
- )
-}
-
-func (p *HeaderValidator) formatValidator() valueValidator {
- return newFormatValidator(
- p.name,
- "response",
- p.header.Format,
- p.KnownFormats,
- p.Options,
- )
-}
-
-func (p *HeaderValidator) redeem() {
- pools.poolOfHeaderValidators.RedeemValidator(p)
-}
-
-func (p *HeaderValidator) redeemChildren() {
- for idx, validator := range p.validators {
- if validator == nil {
- continue
- }
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- p.validators[idx] = nil // free up allocated children if not in pool
- }
-}
-
-// A ParamValidator has very limited subset of validations to apply
-type ParamValidator struct {
- param *spec.Parameter
- validators [6]valueValidator
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-// NewParamValidator creates a new param validator object
-func NewParamValidator(param *spec.Parameter, formats strfmt.Registry, options ...Option) *ParamValidator {
- opts := new(SchemaValidatorOptions)
- for _, o := range options {
- o(opts)
- }
-
- return newParamValidator(param, formats, opts)
-}
-
-func newParamValidator(param *spec.Parameter, formats strfmt.Registry, opts *SchemaValidatorOptions) *ParamValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var p *ParamValidator
- if opts.recycleValidators {
- p = pools.poolOfParamValidators.BorrowValidator()
- } else {
- p = new(ParamValidator)
- }
-
- p.param = param
- p.KnownFormats = formats
- p.Options = opts
- p.validators = [6]valueValidator{
- newTypeValidator(
- param.Name,
- param.In,
- spec.StringOrArray([]string{param.Type}),
- param.Nullable,
- param.Format,
- p.Options,
- ),
- p.stringValidator(),
- p.formatValidator(),
- p.numberValidator(),
- p.sliceValidator(),
- p.commonValidator(),
- }
-
- return p
-}
-
-// Validate the data against the description of the parameter
-func (p *ParamValidator) Validate(data interface{}) *Result {
- if data == nil {
- return nil
- }
-
- var result *Result
- if p.Options.recycleResult {
- result = pools.poolOfResults.BorrowResult()
- } else {
- result = new(Result)
- }
-
- tpe := reflect.TypeOf(data)
- kind := tpe.Kind()
-
- if p.Options.recycleValidators {
- defer func() {
- p.redeemChildren()
- p.redeem()
- }()
- }
-
- // TODO: validate type
- for idx, validator := range p.validators {
- if !validator.Applies(p.param, kind) {
- if p.Options.recycleValidators {
- // Validate won't be called, so relinquish this validator
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- p.validators[idx] = nil // prevents further (unsafe) usage
- }
-
- continue
- }
-
- err := validator.Validate(data)
- if p.Options.recycleValidators {
- p.validators[idx] = nil // prevents further (unsafe) usage
- }
- if err != nil {
- if err.HasErrors() {
- result.Merge(err)
- break
- }
- result.Merge(err)
- }
- }
-
- return result
-}
-
-func (p *ParamValidator) commonValidator() valueValidator {
- return newBasicCommonValidator(
- p.param.Name,
- p.param.In,
- p.param.Default,
- p.param.Enum,
- p.Options,
- )
-}
-
-func (p *ParamValidator) sliceValidator() valueValidator {
- return newBasicSliceValidator(
- p.param.Name,
- p.param.In,
- p.param.Default,
- p.param.MaxItems,
- p.param.MinItems,
- p.param.UniqueItems,
- p.param.Items,
- p.param,
- p.KnownFormats,
- p.Options,
- )
-}
-
-func (p *ParamValidator) numberValidator() valueValidator {
- return newNumberValidator(
- p.param.Name,
- p.param.In,
- p.param.Default,
- p.param.MultipleOf,
- p.param.Maximum,
- p.param.ExclusiveMaximum,
- p.param.Minimum,
- p.param.ExclusiveMinimum,
- p.param.Type,
- p.param.Format,
- p.Options,
- )
-}
-
-func (p *ParamValidator) stringValidator() valueValidator {
- return newStringValidator(
- p.param.Name,
- p.param.In,
- p.param.Default,
- p.param.Required,
- p.param.AllowEmptyValue,
- p.param.MaxLength,
- p.param.MinLength,
- p.param.Pattern,
- p.Options,
- )
-}
-
-func (p *ParamValidator) formatValidator() valueValidator {
- return newFormatValidator(
- p.param.Name,
- p.param.In,
- p.param.Format,
- p.KnownFormats,
- p.Options,
- )
-}
-
-func (p *ParamValidator) redeem() {
- pools.poolOfParamValidators.RedeemValidator(p)
-}
-
-func (p *ParamValidator) redeemChildren() {
- for idx, validator := range p.validators {
- if validator == nil {
- continue
- }
- if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
- redeemableChildren.redeemChildren()
- }
- if redeemable, ok := validator.(interface{ redeem() }); ok {
- redeemable.redeem()
- }
- p.validators[idx] = nil // free up allocated children if not in pool
- }
-}
-
-type basicSliceValidator struct {
- Path string
- In string
- Default interface{}
- MaxItems *int64
- MinItems *int64
- UniqueItems bool
- Items *spec.Items
- Source interface{}
- KnownFormats strfmt.Registry
- Options *SchemaValidatorOptions
-}
-
-func newBasicSliceValidator(
- path, in string,
- def interface{}, maxItems, minItems *int64, uniqueItems bool, items *spec.Items,
- source interface{}, formats strfmt.Registry,
- opts *SchemaValidatorOptions) *basicSliceValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var s *basicSliceValidator
- if opts.recycleValidators {
- s = pools.poolOfBasicSliceValidators.BorrowValidator()
- } else {
- s = new(basicSliceValidator)
- }
-
- s.Path = path
- s.In = in
- s.Default = def
- s.MaxItems = maxItems
- s.MinItems = minItems
- s.UniqueItems = uniqueItems
- s.Items = items
- s.Source = source
- s.KnownFormats = formats
- s.Options = opts
-
- return s
-}
-
-func (s *basicSliceValidator) SetPath(path string) {
- s.Path = path
-}
-
-func (s *basicSliceValidator) Applies(source interface{}, kind reflect.Kind) bool {
- switch source.(type) {
- case *spec.Parameter, *spec.Items, *spec.Header:
- return kind == reflect.Slice
- default:
- return false
- }
-}
-
-func (s *basicSliceValidator) Validate(data interface{}) *Result {
- if s.Options.recycleValidators {
- defer func() {
- s.redeem()
- }()
- }
- val := reflect.ValueOf(data)
-
- size := int64(val.Len())
- if s.MinItems != nil {
- if err := MinItems(s.Path, s.In, size, *s.MinItems); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
-
- if s.MaxItems != nil {
- if err := MaxItems(s.Path, s.In, size, *s.MaxItems); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
-
- if s.UniqueItems {
- if err := UniqueItems(s.Path, s.In, data); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
-
- if s.Items == nil {
- return nil
- }
-
- for i := 0; i < int(size); i++ {
- itemsValidator := newItemsValidator(s.Path, s.In, s.Items, s.Source, s.KnownFormats, s.Options)
- ele := val.Index(i)
- if err := itemsValidator.Validate(i, ele.Interface()); err != nil {
- if err.HasErrors() {
- return err
- }
- if err.wantsRedeemOnMerge {
- pools.poolOfResults.RedeemResult(err)
- }
- }
- }
-
- return nil
-}
-
-func (s *basicSliceValidator) redeem() {
- pools.poolOfBasicSliceValidators.RedeemValidator(s)
-}
-
-type numberValidator struct {
- Path string
- In string
- Default interface{}
- MultipleOf *float64
- Maximum *float64
- ExclusiveMaximum bool
- Minimum *float64
- ExclusiveMinimum bool
- // Allows for more accurate behavior regarding integers
- Type string
- Format string
- Options *SchemaValidatorOptions
-}
-
-func newNumberValidator(
- path, in string, def interface{},
- multipleOf, maximum *float64, exclusiveMaximum bool, minimum *float64, exclusiveMinimum bool,
- typ, format string,
- opts *SchemaValidatorOptions) *numberValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var n *numberValidator
- if opts.recycleValidators {
- n = pools.poolOfNumberValidators.BorrowValidator()
- } else {
- n = new(numberValidator)
- }
-
- n.Path = path
- n.In = in
- n.Default = def
- n.MultipleOf = multipleOf
- n.Maximum = maximum
- n.ExclusiveMaximum = exclusiveMaximum
- n.Minimum = minimum
- n.ExclusiveMinimum = exclusiveMinimum
- n.Type = typ
- n.Format = format
- n.Options = opts
-
- return n
-}
-
-func (n *numberValidator) SetPath(path string) {
- n.Path = path
-}
-
-func (n *numberValidator) Applies(source interface{}, kind reflect.Kind) bool {
- switch source.(type) {
- case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header:
- isInt := kind >= reflect.Int && kind <= reflect.Uint64
- isFloat := kind == reflect.Float32 || kind == reflect.Float64
- return isInt || isFloat
- default:
- return false
- }
-}
-
-// Validate provides a validator for generic JSON numbers,
-//
-// By default, numbers are internally represented as float64.
-// Formats float, or float32 may alter this behavior by mapping to float32.
-// A special validation process is followed for integers, with optional "format":
-// this is an attempt to provide a validation with native types.
-//
-// NOTE: since the constraint specified (boundary, multipleOf) is unmarshalled
-// as float64, loss of information remains possible (e.g. on very large integers).
-//
-// Since this value directly comes from the unmarshalling, it is not possible
-// at this stage of processing to check further and guarantee the correctness of such values.
-//
-// Normally, the JSON Number.MAX_SAFE_INTEGER (resp. Number.MIN_SAFE_INTEGER)
-// would check we do not get such a loss.
-//
-// If this is the case, replace AddErrors() by AddWarnings() and IsValid() by !HasWarnings().
-//
-// TODO: consider replacing boundary check errors by simple warnings.
-//
-// TODO: default boundaries with MAX_SAFE_INTEGER are not checked (specific to json.Number?)
-func (n *numberValidator) Validate(val interface{}) *Result {
- if n.Options.recycleValidators {
- defer func() {
- n.redeem()
- }()
- }
-
- var res, resMultiple, resMinimum, resMaximum *Result
- if n.Options.recycleResult {
- res = pools.poolOfResults.BorrowResult()
- } else {
- res = new(Result)
- }
-
- // Used only to attempt to validate constraint on value,
- // even though value or constraint specified do not match type and format
- data := valueHelp.asFloat64(val)
-
- // Is the provided value within the range of the specified numeric type and format?
- res.AddErrors(IsValueValidAgainstRange(val, n.Type, n.Format, "Checked", n.Path))
-
- if n.MultipleOf != nil {
- resMultiple = pools.poolOfResults.BorrowResult()
-
- // Is the constraint specifier within the range of the specific numeric type and format?
- resMultiple.AddErrors(IsValueValidAgainstRange(*n.MultipleOf, n.Type, n.Format, "MultipleOf", n.Path))
- if resMultiple.IsValid() {
- // Constraint validated with compatible types
- if err := MultipleOfNativeType(n.Path, n.In, val, *n.MultipleOf); err != nil {
- resMultiple.Merge(errorHelp.sErr(err, n.Options.recycleResult))
- }
- } else {
- // Constraint nevertheless validated, converted as general number
- if err := MultipleOf(n.Path, n.In, data, *n.MultipleOf); err != nil {
- resMultiple.Merge(errorHelp.sErr(err, n.Options.recycleResult))
- }
- }
- }
-
- if n.Maximum != nil {
- resMaximum = pools.poolOfResults.BorrowResult()
-
- // Is the constraint specifier within the range of the specific numeric type and format?
- resMaximum.AddErrors(IsValueValidAgainstRange(*n.Maximum, n.Type, n.Format, "Maximum boundary", n.Path))
- if resMaximum.IsValid() {
- // Constraint validated with compatible types
- if err := MaximumNativeType(n.Path, n.In, val, *n.Maximum, n.ExclusiveMaximum); err != nil {
- resMaximum.Merge(errorHelp.sErr(err, n.Options.recycleResult))
- }
- } else {
- // Constraint nevertheless validated, converted as general number
- if err := Maximum(n.Path, n.In, data, *n.Maximum, n.ExclusiveMaximum); err != nil {
- resMaximum.Merge(errorHelp.sErr(err, n.Options.recycleResult))
- }
- }
- }
-
- if n.Minimum != nil {
- resMinimum = pools.poolOfResults.BorrowResult()
-
- // Is the constraint specifier within the range of the specific numeric type and format?
- resMinimum.AddErrors(IsValueValidAgainstRange(*n.Minimum, n.Type, n.Format, "Minimum boundary", n.Path))
- if resMinimum.IsValid() {
- // Constraint validated with compatible types
- if err := MinimumNativeType(n.Path, n.In, val, *n.Minimum, n.ExclusiveMinimum); err != nil {
- resMinimum.Merge(errorHelp.sErr(err, n.Options.recycleResult))
- }
- } else {
- // Constraint nevertheless validated, converted as general number
- if err := Minimum(n.Path, n.In, data, *n.Minimum, n.ExclusiveMinimum); err != nil {
- resMinimum.Merge(errorHelp.sErr(err, n.Options.recycleResult))
- }
- }
- }
- res.Merge(resMultiple, resMinimum, resMaximum)
- res.Inc()
-
- return res
-}
-
-func (n *numberValidator) redeem() {
- pools.poolOfNumberValidators.RedeemValidator(n)
-}
-
-type stringValidator struct {
- Path string
- In string
- Default interface{}
- Required bool
- AllowEmptyValue bool
- MaxLength *int64
- MinLength *int64
- Pattern string
- Options *SchemaValidatorOptions
-}
-
-func newStringValidator(
- path, in string,
- def interface{}, required, allowEmpty bool, maxLength, minLength *int64, pattern string,
- opts *SchemaValidatorOptions) *stringValidator {
- if opts == nil {
- opts = new(SchemaValidatorOptions)
- }
-
- var s *stringValidator
- if opts.recycleValidators {
- s = pools.poolOfStringValidators.BorrowValidator()
- } else {
- s = new(stringValidator)
- }
-
- s.Path = path
- s.In = in
- s.Default = def
- s.Required = required
- s.AllowEmptyValue = allowEmpty
- s.MaxLength = maxLength
- s.MinLength = minLength
- s.Pattern = pattern
- s.Options = opts
-
- return s
-}
-
-func (s *stringValidator) SetPath(path string) {
- s.Path = path
-}
-
-func (s *stringValidator) Applies(source interface{}, kind reflect.Kind) bool {
- switch source.(type) {
- case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header:
- return kind == reflect.String
- default:
- return false
- }
-}
-
-func (s *stringValidator) Validate(val interface{}) *Result {
- if s.Options.recycleValidators {
- defer func() {
- s.redeem()
- }()
- }
-
- data, ok := val.(string)
- if !ok {
- return errorHelp.sErr(errors.InvalidType(s.Path, s.In, stringType, val), s.Options.recycleResult)
- }
-
- if s.Required && !s.AllowEmptyValue && (s.Default == nil || s.Default == "") {
- if err := RequiredString(s.Path, s.In, data); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
-
- if s.MaxLength != nil {
- if err := MaxLength(s.Path, s.In, data, *s.MaxLength); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
-
- if s.MinLength != nil {
- if err := MinLength(s.Path, s.In, data, *s.MinLength); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
-
- if s.Pattern != "" {
- if err := Pattern(s.Path, s.In, data, s.Pattern); err != nil {
- return errorHelp.sErr(err, s.Options.recycleResult)
- }
- }
- return nil
-}
-
-func (s *stringValidator) redeem() {
- pools.poolOfStringValidators.RedeemValidator(s)
-}
diff --git a/vendor/github.com/go-openapi/validate/values.go b/vendor/github.com/go-openapi/validate/values.go
deleted file mode 100644
index 5f6f5ee61e..0000000000
--- a/vendor/github.com/go-openapi/validate/values.go
+++ /dev/null
@@ -1,450 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package validate
-
-import (
- "context"
- "fmt"
- "reflect"
- "strings"
- "unicode/utf8"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// Enum validates if the data is a member of the enum
-func Enum(path, in string, data interface{}, enum interface{}) *errors.Validation {
- return EnumCase(path, in, data, enum, true)
-}
-
-// EnumCase validates if the data is a member of the enum and may respect case-sensitivity for strings
-func EnumCase(path, in string, data interface{}, enum interface{}, caseSensitive bool) *errors.Validation {
- val := reflect.ValueOf(enum)
- if val.Kind() != reflect.Slice {
- return nil
- }
-
- dataString := convertEnumCaseStringKind(data, caseSensitive)
- var values []interface{}
- for i := 0; i < val.Len(); i++ {
- ele := val.Index(i)
- enumValue := ele.Interface()
- if data != nil {
- if reflect.DeepEqual(data, enumValue) {
- return nil
- }
- enumString := convertEnumCaseStringKind(enumValue, caseSensitive)
- if dataString != nil && enumString != nil && strings.EqualFold(*dataString, *enumString) {
- return nil
- }
- actualType := reflect.TypeOf(enumValue)
- if actualType == nil { // Safeguard. Frankly, I don't know how we may get a nil
- continue
- }
- expectedValue := reflect.ValueOf(data)
- if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
- // Attempt comparison after type conversion
- if reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) {
- return nil
- }
- }
- }
- values = append(values, enumValue)
- }
- return errors.EnumFail(path, in, data, values)
-}
-
-// convertEnumCaseStringKind converts interface if it is kind of string and case insensitivity is set
-func convertEnumCaseStringKind(value interface{}, caseSensitive bool) *string {
- if caseSensitive {
- return nil
- }
-
- val := reflect.ValueOf(value)
- if val.Kind() != reflect.String {
- return nil
- }
-
- str := fmt.Sprintf("%v", value)
- return &str
-}
-
-// MinItems validates that there are at least n items in a slice
-func MinItems(path, in string, size, min int64) *errors.Validation {
- if size < min {
- return errors.TooFewItems(path, in, min, size)
- }
- return nil
-}
-
-// MaxItems validates that there are at most n items in a slice
-func MaxItems(path, in string, size, max int64) *errors.Validation {
- if size > max {
- return errors.TooManyItems(path, in, max, size)
- }
- return nil
-}
-
-// UniqueItems validates that the provided slice has unique elements
-func UniqueItems(path, in string, data interface{}) *errors.Validation {
- val := reflect.ValueOf(data)
- if val.Kind() != reflect.Slice {
- return nil
- }
- var unique []interface{}
- for i := 0; i < val.Len(); i++ {
- v := val.Index(i).Interface()
- for _, u := range unique {
- if reflect.DeepEqual(v, u) {
- return errors.DuplicateItems(path, in)
- }
- }
- unique = append(unique, v)
- }
- return nil
-}
-
-// MinLength validates a string for minimum length
-func MinLength(path, in, data string, minLength int64) *errors.Validation {
- strLen := int64(utf8.RuneCountInString(data))
- if strLen < minLength {
- return errors.TooShort(path, in, minLength, data)
- }
- return nil
-}
-
-// MaxLength validates a string for maximum length
-func MaxLength(path, in, data string, maxLength int64) *errors.Validation {
- strLen := int64(utf8.RuneCountInString(data))
- if strLen > maxLength {
- return errors.TooLong(path, in, maxLength, data)
- }
- return nil
-}
-
-// ReadOnly validates an interface for readonly
-func ReadOnly(ctx context.Context, path, in string, data interface{}) *errors.Validation {
-
- // read only is only validated when operationType is request
- if op := extractOperationType(ctx); op != request {
- return nil
- }
-
- // data must be of zero value of its type
- val := reflect.ValueOf(data)
- if val.IsValid() {
- if reflect.DeepEqual(reflect.Zero(val.Type()).Interface(), val.Interface()) {
- return nil
- }
- } else {
- return nil
- }
-
- return errors.ReadOnly(path, in, data)
-}
-
-// Required validates an interface for requiredness
-func Required(path, in string, data interface{}) *errors.Validation {
- val := reflect.ValueOf(data)
- if val.IsValid() {
- if reflect.DeepEqual(reflect.Zero(val.Type()).Interface(), val.Interface()) {
- return errors.Required(path, in, data)
- }
- return nil
- }
- return errors.Required(path, in, data)
-}
-
-// RequiredString validates a string for requiredness
-func RequiredString(path, in, data string) *errors.Validation {
- if data == "" {
- return errors.Required(path, in, data)
- }
- return nil
-}
-
-// RequiredNumber validates a number for requiredness
-func RequiredNumber(path, in string, data float64) *errors.Validation {
- if data == 0 {
- return errors.Required(path, in, data)
- }
- return nil
-}
-
-// Pattern validates a string against a regular expression
-func Pattern(path, in, data, pattern string) *errors.Validation {
- re, err := compileRegexp(pattern)
- if err != nil {
- return errors.FailedPattern(path, in, fmt.Sprintf("%s, but pattern is invalid: %s", pattern, err.Error()), data)
- }
- if !re.MatchString(data) {
- return errors.FailedPattern(path, in, pattern, data)
- }
- return nil
-}
-
-// MaximumInt validates if a number is smaller than a given maximum
-func MaximumInt(path, in string, data, max int64, exclusive bool) *errors.Validation {
- if (!exclusive && data > max) || (exclusive && data >= max) {
- return errors.ExceedsMaximumInt(path, in, max, exclusive, data)
- }
- return nil
-}
-
-// MaximumUint validates if a number is smaller than a given maximum
-func MaximumUint(path, in string, data, max uint64, exclusive bool) *errors.Validation {
- if (!exclusive && data > max) || (exclusive && data >= max) {
- return errors.ExceedsMaximumUint(path, in, max, exclusive, data)
- }
- return nil
-}
-
-// Maximum validates if a number is smaller than a given maximum
-func Maximum(path, in string, data, max float64, exclusive bool) *errors.Validation {
- if (!exclusive && data > max) || (exclusive && data >= max) {
- return errors.ExceedsMaximum(path, in, max, exclusive, data)
- }
- return nil
-}
-
-// Minimum validates if a number is smaller than a given minimum
-func Minimum(path, in string, data, min float64, exclusive bool) *errors.Validation {
- if (!exclusive && data < min) || (exclusive && data <= min) {
- return errors.ExceedsMinimum(path, in, min, exclusive, data)
- }
- return nil
-}
-
-// MinimumInt validates if a number is smaller than a given minimum
-func MinimumInt(path, in string, data, min int64, exclusive bool) *errors.Validation {
- if (!exclusive && data < min) || (exclusive && data <= min) {
- return errors.ExceedsMinimumInt(path, in, min, exclusive, data)
- }
- return nil
-}
-
-// MinimumUint validates if a number is smaller than a given minimum
-func MinimumUint(path, in string, data, min uint64, exclusive bool) *errors.Validation {
- if (!exclusive && data < min) || (exclusive && data <= min) {
- return errors.ExceedsMinimumUint(path, in, min, exclusive, data)
- }
- return nil
-}
-
-// MultipleOf validates if the provided number is a multiple of the factor
-func MultipleOf(path, in string, data, factor float64) *errors.Validation {
- // multipleOf factor must be positive
- if factor <= 0 {
- return errors.MultipleOfMustBePositive(path, in, factor)
- }
- var mult float64
- if factor < 1 {
- mult = 1 / factor * data
- } else {
- mult = data / factor
- }
- if !swag.IsFloat64AJSONInteger(mult) {
- return errors.NotMultipleOf(path, in, factor, data)
- }
- return nil
-}
-
-// MultipleOfInt validates if the provided integer is a multiple of the factor
-func MultipleOfInt(path, in string, data int64, factor int64) *errors.Validation {
- // multipleOf factor must be positive
- if factor <= 0 {
- return errors.MultipleOfMustBePositive(path, in, factor)
- }
- mult := data / factor
- if mult*factor != data {
- return errors.NotMultipleOf(path, in, factor, data)
- }
- return nil
-}
-
-// MultipleOfUint validates if the provided unsigned integer is a multiple of the factor
-func MultipleOfUint(path, in string, data, factor uint64) *errors.Validation {
- // multipleOf factor must be positive
- if factor == 0 {
- return errors.MultipleOfMustBePositive(path, in, factor)
- }
- mult := data / factor
- if mult*factor != data {
- return errors.NotMultipleOf(path, in, factor, data)
- }
- return nil
-}
-
-// FormatOf validates if a string matches a format in the format registry
-func FormatOf(path, in, format, data string, registry strfmt.Registry) *errors.Validation {
- if registry == nil {
- registry = strfmt.Default
- }
- if ok := registry.ContainsName(format); !ok {
- return errors.InvalidTypeName(format)
- }
- if ok := registry.Validates(format, data); !ok {
- return errors.InvalidType(path, in, format, data)
- }
- return nil
-}
-
-// MaximumNativeType provides native type constraint validation as a facade
-// to various numeric types versions of Maximum constraint check.
-//
-// Assumes that any possible loss conversion during conversion has been
-// checked beforehand.
-//
-// NOTE: currently, the max value is marshalled as a float64, no matter what,
-// which means there may be a loss during conversions (e.g. for very large integers)
-//
-// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
-func MaximumNativeType(path, in string, val interface{}, max float64, exclusive bool) *errors.Validation {
- kind := reflect.ValueOf(val).Type().Kind()
- switch kind { //nolint:exhaustive
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- value := valueHelp.asInt64(val)
- return MaximumInt(path, in, value, int64(max), exclusive)
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- value := valueHelp.asUint64(val)
- if max < 0 {
- return errors.ExceedsMaximum(path, in, max, exclusive, val)
- }
- return MaximumUint(path, in, value, uint64(max), exclusive)
- case reflect.Float32, reflect.Float64:
- fallthrough
- default:
- value := valueHelp.asFloat64(val)
- return Maximum(path, in, value, max, exclusive)
- }
-}
-
-// MinimumNativeType provides native type constraint validation as a facade
-// to various numeric types versions of Minimum constraint check.
-//
-// Assumes that any possible loss conversion during conversion has been
-// checked beforehand.
-//
-// NOTE: currently, the min value is marshalled as a float64, no matter what,
-// which means there may be a loss during conversions (e.g. for very large integers)
-//
-// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
-func MinimumNativeType(path, in string, val interface{}, min float64, exclusive bool) *errors.Validation {
- kind := reflect.ValueOf(val).Type().Kind()
- switch kind { //nolint:exhaustive
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- value := valueHelp.asInt64(val)
- return MinimumInt(path, in, value, int64(min), exclusive)
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- value := valueHelp.asUint64(val)
- if min < 0 {
- return nil
- }
- return MinimumUint(path, in, value, uint64(min), exclusive)
- case reflect.Float32, reflect.Float64:
- fallthrough
- default:
- value := valueHelp.asFloat64(val)
- return Minimum(path, in, value, min, exclusive)
- }
-}
-
-// MultipleOfNativeType provides native type constraint validation as a facade
-// to various numeric types version of MultipleOf constraint check.
-//
-// Assumes that any possible loss conversion during conversion has been
-// checked beforehand.
-//
-// NOTE: currently, the multipleOf factor is marshalled as a float64, no matter what,
-// which means there may be a loss during conversions (e.g. for very large integers)
-//
-// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
-func MultipleOfNativeType(path, in string, val interface{}, multipleOf float64) *errors.Validation {
- kind := reflect.ValueOf(val).Type().Kind()
- switch kind { //nolint:exhaustive
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- value := valueHelp.asInt64(val)
- return MultipleOfInt(path, in, value, int64(multipleOf))
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- value := valueHelp.asUint64(val)
- return MultipleOfUint(path, in, value, uint64(multipleOf))
- case reflect.Float32, reflect.Float64:
- fallthrough
- default:
- value := valueHelp.asFloat64(val)
- return MultipleOf(path, in, value, multipleOf)
- }
-}
-
-// IsValueValidAgainstRange checks that a numeric value is compatible with
-// the range defined by Type and Format, that is, may be converted without loss.
-//
-// NOTE: this check is about type capacity and not formal verification such as: 1.0 != 1L
-func IsValueValidAgainstRange(val interface{}, typeName, format, prefix, path string) error {
- kind := reflect.ValueOf(val).Type().Kind()
-
- // What is the string representation of val
- var stringRep string
- switch kind { //nolint:exhaustive
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
- stringRep = swag.FormatUint64(valueHelp.asUint64(val))
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- stringRep = swag.FormatInt64(valueHelp.asInt64(val))
- case reflect.Float32, reflect.Float64:
- stringRep = swag.FormatFloat64(valueHelp.asFloat64(val))
- default:
- return fmt.Errorf("%s value number range checking called with invalid (non numeric) val type in %s", prefix, path)
- }
-
- var errVal error
-
- switch typeName {
- case integerType:
- switch format {
- case integerFormatInt32:
- _, errVal = swag.ConvertInt32(stringRep)
- case integerFormatUInt32:
- _, errVal = swag.ConvertUint32(stringRep)
- case integerFormatUInt64:
- _, errVal = swag.ConvertUint64(stringRep)
- case integerFormatInt64:
- fallthrough
- default:
- _, errVal = swag.ConvertInt64(stringRep)
- }
- case numberType:
- fallthrough
- default:
- switch format {
- case numberFormatFloat, numberFormatFloat32:
- _, errVal = swag.ConvertFloat32(stringRep)
- case numberFormatDouble, numberFormatFloat64:
- fallthrough
- default:
- // No check can be performed here since
- // no number beyond float64 is supported
- }
- }
- if errVal != nil { // We don't report the actual errVal from strconv
- if format != "" {
- errVal = fmt.Errorf("%s value must be of type %s with format %s in %s", prefix, typeName, format, path)
- } else {
- errVal = fmt.Errorf("%s value must be of type %s (default format) in %s", prefix, typeName, path)
- }
- }
- return errVal
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.editorconfig b/vendor/github.com/go-viper/mapstructure/v2/.editorconfig
deleted file mode 100644
index 1f664d13a5..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/.editorconfig
+++ /dev/null
@@ -1,18 +0,0 @@
-root = true
-
-[*]
-charset = utf-8
-end_of_line = lf
-indent_size = 4
-indent_style = space
-insert_final_newline = true
-trim_trailing_whitespace = true
-
-[*.go]
-indent_style = tab
-
-[{Makefile,*.mk}]
-indent_style = tab
-
-[*.nix]
-indent_size = 2
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.envrc b/vendor/github.com/go-viper/mapstructure/v2/.envrc
deleted file mode 100644
index 2e0f9f5f71..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/.envrc
+++ /dev/null
@@ -1,4 +0,0 @@
-if ! has nix_direnv_version || ! nix_direnv_version 3.0.4; then
- source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.4/direnvrc" "sha256-DzlYZ33mWF/Gs8DDeyjr8mnVmQGx7ASYqA5WlxwvBG4="
-fi
-use flake . --impure
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.gitignore b/vendor/github.com/go-viper/mapstructure/v2/.gitignore
deleted file mode 100644
index 470e7ca2bd..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-/.devenv/
-/.direnv/
-/.pre-commit-config.yaml
-/bin/
-/build/
-/var/
diff --git a/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml b/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml
deleted file mode 100644
index 763143aa77..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml
+++ /dev/null
@@ -1,23 +0,0 @@
-run:
- timeout: 5m
-
-linters-settings:
- gci:
- sections:
- - standard
- - default
- - prefix(github.com/go-viper/mapstructure)
- golint:
- min-confidence: 0
- goimports:
- local-prefixes: github.com/go-viper/maptstructure
-
-linters:
- disable-all: true
- enable:
- - gci
- - gofmt
- - gofumpt
- - goimports
- - staticcheck
- # - stylecheck
diff --git a/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md b/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md
deleted file mode 100644
index afd44e5f5f..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md
+++ /dev/null
@@ -1,104 +0,0 @@
-> [!WARNING]
-> As of v2 of this library, change log can be found in GitHub releases.
-
-## 1.5.1
-
-* Wrap errors so they're compatible with `errors.Is` and `errors.As` [GH-282]
-* Fix map of slices not decoding properly in certain cases. [GH-266]
-
-## 1.5.0
-
-* New option `IgnoreUntaggedFields` to ignore decoding to any fields
- without `mapstructure` (or the configured tag name) set [GH-277]
-* New option `ErrorUnset` which makes it an error if any fields
- in a target struct are not set by the decoding process. [GH-225]
-* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240]
-* Decoding to slice from array no longer crashes [GH-265]
-* Decode nested struct pointers to map [GH-271]
-* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280]
-* Fix issue where fields with `,omitempty` would sometimes decode
- into a map with an empty string key [GH-281]
-
-## 1.4.3
-
-* Fix cases where `json.Number` didn't decode properly [GH-261]
-
-## 1.4.2
-
-* Custom name matchers to support any sort of casing, formatting, etc. for
- field names. [GH-250]
-* Fix possible panic in ComposeDecodeHookFunc [GH-251]
-
-## 1.4.1
-
-* Fix regression where `*time.Time` value would be set to empty and not be sent
- to decode hooks properly [GH-232]
-
-## 1.4.0
-
-* A new decode hook type `DecodeHookFuncValue` has been added that has
- access to the full values. [GH-183]
-* Squash is now supported with embedded fields that are struct pointers [GH-205]
-* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206]
-
-## 1.3.3
-
-* Decoding maps from maps creates a settable value for decode hooks [GH-203]
-
-## 1.3.2
-
-* Decode into interface type with a struct value is supported [GH-187]
-
-## 1.3.1
-
-* Squash should only squash embedded structs. [GH-194]
-
-## 1.3.0
-
-* Added `",omitempty"` support. This will ignore zero values in the source
- structure when encoding. [GH-145]
-
-## 1.2.3
-
-* Fix duplicate entries in Keys list with pointer values. [GH-185]
-
-## 1.2.2
-
-* Do not add unsettable (unexported) values to the unused metadata key
- or "remain" value. [GH-150]
-
-## 1.2.1
-
-* Go modules checksum mismatch fix
-
-## 1.2.0
-
-* Added support to capture unused values in a field using the `",remain"` value
- in the mapstructure tag. There is an example to showcase usage.
-* Added `DecoderConfig` option to always squash embedded structs
-* `json.Number` can decode into `uint` types
-* Empty slices are preserved and not replaced with nil slices
-* Fix panic that can occur in when decoding a map into a nil slice of structs
-* Improved package documentation for godoc
-
-## 1.1.2
-
-* Fix error when decode hook decodes interface implementation into interface
- type. [GH-140]
-
-## 1.1.1
-
-* Fix panic that can happen in `decodePtr`
-
-## 1.1.0
-
-* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133]
-* Support struct to struct decoding [GH-137]
-* If source map value is nil, then destination map value is nil (instead of empty)
-* If source slice value is nil, then destination slice value is nil (instead of empty)
-* If source pointer is nil, then destination pointer is set to nil (instead of
- allocated zero value of type)
-
-## 1.0.0
-
-* Initial tagged stable release.
diff --git a/vendor/github.com/go-viper/mapstructure/v2/LICENSE b/vendor/github.com/go-viper/mapstructure/v2/LICENSE
deleted file mode 100644
index f9c841a51e..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2013 Mitchell Hashimoto
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/vendor/github.com/go-viper/mapstructure/v2/README.md b/vendor/github.com/go-viper/mapstructure/v2/README.md
deleted file mode 100644
index dd5ec69ddf..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/README.md
+++ /dev/null
@@ -1,80 +0,0 @@
-# mapstructure
-
-[](https://github.com/go-viper/mapstructure/actions?query=workflow%3ACI)
-[](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2)
-
-
-mapstructure is a Go library for decoding generic map values to structures
-and vice versa, while providing helpful error handling.
-
-This library is most useful when decoding values from some data stream (JSON,
-Gob, etc.) where you don't _quite_ know the structure of the underlying data
-until you read a part of it. You can therefore read a `map[string]interface{}`
-and use this library to decode it into the proper underlying native Go
-structure.
-
-## Installation
-
-```shell
-go get github.com/go-viper/mapstructure/v2
-```
-
-## Migrating from `github.com/mitchellh/mapstructure`
-
-[@mitchehllh](https://github.com/mitchellh) announced his intent to archive some of his unmaintained projects (see [here](https://gist.github.com/mitchellh/90029601268e59a29e64e55bab1c5bdc) and [here](https://github.com/mitchellh/mapstructure/issues/349)). This is a repository achieved the "blessed fork" status.
-
-You can migrate to this package by changing your import paths in your Go files to `github.com/go-viper/mapstructure/v2`.
-The API is the same, so you don't need to change anything else.
-
-Here is a script that can help you with the migration:
-
-```shell
-sed -i 's/github.com\/mitchellh\/mapstructure/github.com\/go-viper\/mapstructure\/v2/g' $(find . -type f -name '*.go')
-```
-
-If you need more time to migrate your code, that is absolutely fine.
-
-Some of the latest fixes are backported to the v1 release branch of this package, so you can use the Go modules `replace` feature until you are ready to migrate:
-
-```shell
-replace github.com/mitchellh/mapstructure => github.com/go-viper/mapstructure v1.6.0
-```
-
-## Usage & Example
-
-For usage and examples see the [documentation](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2).
-
-The `Decode` function has examples associated with it there.
-
-## But Why?!
-
-Go offers fantastic standard libraries for decoding formats such as JSON.
-The standard method is to have a struct pre-created, and populate that struct
-from the bytes of the encoded format. This is great, but the problem is if
-you have configuration or an encoding that changes slightly depending on
-specific fields. For example, consider this JSON:
-
-```json
-{
- "type": "person",
- "name": "Mitchell"
-}
-```
-
-Perhaps we can't populate a specific structure without first reading
-the "type" field from the JSON. We could always do two passes over the
-decoding of the JSON (reading the "type" first, and the rest later).
-However, it is much simpler to just decode this into a `map[string]interface{}`
-structure, read the "type" key, then use something like this library
-to decode it into the proper structure.
-
-## Credits
-
-Mapstructure was originally created by [@mitchellh](https://github.com/mitchellh).
-This is a maintained fork of the original library.
-
-Read more about the reasons for the fork [here](https://github.com/mitchellh/mapstructure/issues/349).
-
-## License
-
-The project is licensed under the [MIT License](LICENSE).
diff --git a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
deleted file mode 100644
index 1f3c69d4b8..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
+++ /dev/null
@@ -1,630 +0,0 @@
-package mapstructure
-
-import (
- "encoding"
- "errors"
- "fmt"
- "net"
- "net/netip"
- "net/url"
- "reflect"
- "strconv"
- "strings"
- "time"
-)
-
-// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns
-// it into the proper DecodeHookFunc type, such as DecodeHookFuncType.
-func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc {
- // Create variables here so we can reference them with the reflect pkg
- var f1 DecodeHookFuncType
- var f2 DecodeHookFuncKind
- var f3 DecodeHookFuncValue
-
- // Fill in the variables into this interface and the rest is done
- // automatically using the reflect package.
- potential := []interface{}{f1, f2, f3}
-
- v := reflect.ValueOf(h)
- vt := v.Type()
- for _, raw := range potential {
- pt := reflect.ValueOf(raw).Type()
- if vt.ConvertibleTo(pt) {
- return v.Convert(pt).Interface()
- }
- }
-
- return nil
-}
-
-// cachedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns
-// it into a closure to be used directly
-// if the type fails to convert we return a closure always erroring to keep the previous behaviour
-func cachedDecodeHook(raw DecodeHookFunc) func(from reflect.Value, to reflect.Value) (interface{}, error) {
- switch f := typedDecodeHook(raw).(type) {
- case DecodeHookFuncType:
- return func(from reflect.Value, to reflect.Value) (interface{}, error) {
- return f(from.Type(), to.Type(), from.Interface())
- }
- case DecodeHookFuncKind:
- return func(from reflect.Value, to reflect.Value) (interface{}, error) {
- return f(from.Kind(), to.Kind(), from.Interface())
- }
- case DecodeHookFuncValue:
- return func(from reflect.Value, to reflect.Value) (interface{}, error) {
- return f(from, to)
- }
- default:
- return func(from reflect.Value, to reflect.Value) (interface{}, error) {
- return nil, errors.New("invalid decode hook signature")
- }
- }
-}
-
-// DecodeHookExec executes the given decode hook. This should be used
-// since it'll naturally degrade to the older backwards compatible DecodeHookFunc
-// that took reflect.Kind instead of reflect.Type.
-func DecodeHookExec(
- raw DecodeHookFunc,
- from reflect.Value, to reflect.Value,
-) (interface{}, error) {
- switch f := typedDecodeHook(raw).(type) {
- case DecodeHookFuncType:
- return f(from.Type(), to.Type(), from.Interface())
- case DecodeHookFuncKind:
- return f(from.Kind(), to.Kind(), from.Interface())
- case DecodeHookFuncValue:
- return f(from, to)
- default:
- return nil, errors.New("invalid decode hook signature")
- }
-}
-
-// ComposeDecodeHookFunc creates a single DecodeHookFunc that
-// automatically composes multiple DecodeHookFuncs.
-//
-// The composed funcs are called in order, with the result of the
-// previous transformation.
-func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
- cached := make([]func(from reflect.Value, to reflect.Value) (interface{}, error), 0, len(fs))
- for _, f := range fs {
- cached = append(cached, cachedDecodeHook(f))
- }
- return func(f reflect.Value, t reflect.Value) (interface{}, error) {
- var err error
- data := f.Interface()
-
- newFrom := f
- for _, c := range cached {
- data, err = c(newFrom, t)
- if err != nil {
- return nil, err
- }
- newFrom = reflect.ValueOf(data)
- }
-
- return data, nil
- }
-}
-
-// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned.
-// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages.
-func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc {
- cached := make([]func(from reflect.Value, to reflect.Value) (interface{}, error), 0, len(ff))
- for _, f := range ff {
- cached = append(cached, cachedDecodeHook(f))
- }
- return func(a, b reflect.Value) (interface{}, error) {
- var allErrs string
- var out interface{}
- var err error
-
- for _, c := range cached {
- out, err = c(a, b)
- if err != nil {
- allErrs += err.Error() + "\n"
- continue
- }
-
- return out, nil
- }
-
- return nil, errors.New(allErrs)
- }
-}
-
-// StringToSliceHookFunc returns a DecodeHookFunc that converts
-// string to []string by splitting on the given sep.
-func StringToSliceHookFunc(sep string) DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.SliceOf(f) {
- return data, nil
- }
-
- raw := data.(string)
- if raw == "" {
- return []string{}, nil
- }
-
- return strings.Split(raw, sep), nil
- }
-}
-
-// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts
-// strings to time.Duration.
-func StringToTimeDurationHookFunc() DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(time.Duration(5)) {
- return data, nil
- }
-
- // Convert it by parsing
- return time.ParseDuration(data.(string))
- }
-}
-
-// StringToURLHookFunc returns a DecodeHookFunc that converts
-// strings to *url.URL.
-func StringToURLHookFunc() DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(&url.URL{}) {
- return data, nil
- }
-
- // Convert it by parsing
- return url.Parse(data.(string))
- }
-}
-
-// StringToIPHookFunc returns a DecodeHookFunc that converts
-// strings to net.IP
-func StringToIPHookFunc() DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(net.IP{}) {
- return data, nil
- }
-
- // Convert it by parsing
- ip := net.ParseIP(data.(string))
- if ip == nil {
- return net.IP{}, fmt.Errorf("failed parsing ip %v", data)
- }
-
- return ip, nil
- }
-}
-
-// StringToIPNetHookFunc returns a DecodeHookFunc that converts
-// strings to net.IPNet
-func StringToIPNetHookFunc() DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(net.IPNet{}) {
- return data, nil
- }
-
- // Convert it by parsing
- _, net, err := net.ParseCIDR(data.(string))
- return net, err
- }
-}
-
-// StringToTimeHookFunc returns a DecodeHookFunc that converts
-// strings to time.Time.
-func StringToTimeHookFunc(layout string) DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(time.Time{}) {
- return data, nil
- }
-
- // Convert it by parsing
- return time.Parse(layout, data.(string))
- }
-}
-
-// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to
-// the decoder.
-//
-// Note that this is significantly different from the WeaklyTypedInput option
-// of the DecoderConfig.
-func WeaklyTypedHook(
- f reflect.Kind,
- t reflect.Kind,
- data interface{},
-) (interface{}, error) {
- dataVal := reflect.ValueOf(data)
- switch t {
- case reflect.String:
- switch f {
- case reflect.Bool:
- if dataVal.Bool() {
- return "1", nil
- }
- return "0", nil
- case reflect.Float32:
- return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil
- case reflect.Int:
- return strconv.FormatInt(dataVal.Int(), 10), nil
- case reflect.Slice:
- dataType := dataVal.Type()
- elemKind := dataType.Elem().Kind()
- if elemKind == reflect.Uint8 {
- return string(dataVal.Interface().([]uint8)), nil
- }
- case reflect.Uint:
- return strconv.FormatUint(dataVal.Uint(), 10), nil
- }
- }
-
- return data, nil
-}
-
-func RecursiveStructToMapHookFunc() DecodeHookFunc {
- return func(f reflect.Value, t reflect.Value) (interface{}, error) {
- if f.Kind() != reflect.Struct {
- return f.Interface(), nil
- }
-
- var i interface{} = struct{}{}
- if t.Type() != reflect.TypeOf(&i).Elem() {
- return f.Interface(), nil
- }
-
- m := make(map[string]interface{})
- t.Set(reflect.ValueOf(m))
-
- return f.Interface(), nil
- }
-}
-
-// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies
-// strings to the UnmarshalText function, when the target type
-// implements the encoding.TextUnmarshaler interface
-func TextUnmarshallerHookFunc() DecodeHookFuncType {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- result := reflect.New(t).Interface()
- unmarshaller, ok := result.(encoding.TextUnmarshaler)
- if !ok {
- return data, nil
- }
- str, ok := data.(string)
- if !ok {
- str = reflect.Indirect(reflect.ValueOf(&data)).Elem().String()
- }
- if err := unmarshaller.UnmarshalText([]byte(str)); err != nil {
- return nil, err
- }
- return result, nil
- }
-}
-
-// StringToNetIPAddrHookFunc returns a DecodeHookFunc that converts
-// strings to netip.Addr.
-func StringToNetIPAddrHookFunc() DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(netip.Addr{}) {
- return data, nil
- }
-
- // Convert it by parsing
- return netip.ParseAddr(data.(string))
- }
-}
-
-// StringToNetIPAddrPortHookFunc returns a DecodeHookFunc that converts
-// strings to netip.AddrPort.
-func StringToNetIPAddrPortHookFunc() DecodeHookFunc {
- return func(
- f reflect.Type,
- t reflect.Type,
- data interface{},
- ) (interface{}, error) {
- if f.Kind() != reflect.String {
- return data, nil
- }
- if t != reflect.TypeOf(netip.AddrPort{}) {
- return data, nil
- }
-
- // Convert it by parsing
- return netip.ParseAddrPort(data.(string))
- }
-}
-
-// StringToBasicTypeHookFunc returns a DecodeHookFunc that converts
-// strings to basic types.
-// int8, uint8, int16, uint16, int32, uint32, int64, uint64, int, uint, float32, float64, bool, byte, rune, complex64, complex128
-func StringToBasicTypeHookFunc() DecodeHookFunc {
- return ComposeDecodeHookFunc(
- StringToInt8HookFunc(),
- StringToUint8HookFunc(),
- StringToInt16HookFunc(),
- StringToUint16HookFunc(),
- StringToInt32HookFunc(),
- StringToUint32HookFunc(),
- StringToInt64HookFunc(),
- StringToUint64HookFunc(),
- StringToIntHookFunc(),
- StringToUintHookFunc(),
- StringToFloat32HookFunc(),
- StringToFloat64HookFunc(),
- StringToBoolHookFunc(),
- // byte and rune are aliases for uint8 and int32 respectively
- // StringToByteHookFunc(),
- // StringToRuneHookFunc(),
- StringToComplex64HookFunc(),
- StringToComplex128HookFunc(),
- )
-}
-
-// StringToInt8HookFunc returns a DecodeHookFunc that converts
-// strings to int8.
-func StringToInt8HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Int8 {
- return data, nil
- }
-
- // Convert it by parsing
- i64, err := strconv.ParseInt(data.(string), 0, 8)
- return int8(i64), err
- }
-}
-
-// StringToUint8HookFunc returns a DecodeHookFunc that converts
-// strings to uint8.
-func StringToUint8HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Uint8 {
- return data, nil
- }
-
- // Convert it by parsing
- u64, err := strconv.ParseUint(data.(string), 0, 8)
- return uint8(u64), err
- }
-}
-
-// StringToInt16HookFunc returns a DecodeHookFunc that converts
-// strings to int16.
-func StringToInt16HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Int16 {
- return data, nil
- }
-
- // Convert it by parsing
- i64, err := strconv.ParseInt(data.(string), 0, 16)
- return int16(i64), err
- }
-}
-
-// StringToUint16HookFunc returns a DecodeHookFunc that converts
-// strings to uint16.
-func StringToUint16HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Uint16 {
- return data, nil
- }
-
- // Convert it by parsing
- u64, err := strconv.ParseUint(data.(string), 0, 16)
- return uint16(u64), err
- }
-}
-
-// StringToInt32HookFunc returns a DecodeHookFunc that converts
-// strings to int32.
-func StringToInt32HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Int32 {
- return data, nil
- }
-
- // Convert it by parsing
- i64, err := strconv.ParseInt(data.(string), 0, 32)
- return int32(i64), err
- }
-}
-
-// StringToUint32HookFunc returns a DecodeHookFunc that converts
-// strings to uint32.
-func StringToUint32HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Uint32 {
- return data, nil
- }
-
- // Convert it by parsing
- u64, err := strconv.ParseUint(data.(string), 0, 32)
- return uint32(u64), err
- }
-}
-
-// StringToInt64HookFunc returns a DecodeHookFunc that converts
-// strings to int64.
-func StringToInt64HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Int64 {
- return data, nil
- }
-
- // Convert it by parsing
- return strconv.ParseInt(data.(string), 0, 64)
- }
-}
-
-// StringToUint64HookFunc returns a DecodeHookFunc that converts
-// strings to uint64.
-func StringToUint64HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Uint64 {
- return data, nil
- }
-
- // Convert it by parsing
- return strconv.ParseUint(data.(string), 0, 64)
- }
-}
-
-// StringToIntHookFunc returns a DecodeHookFunc that converts
-// strings to int.
-func StringToIntHookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Int {
- return data, nil
- }
-
- // Convert it by parsing
- i64, err := strconv.ParseInt(data.(string), 0, 0)
- return int(i64), err
- }
-}
-
-// StringToUintHookFunc returns a DecodeHookFunc that converts
-// strings to uint.
-func StringToUintHookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Uint {
- return data, nil
- }
-
- // Convert it by parsing
- u64, err := strconv.ParseUint(data.(string), 0, 0)
- return uint(u64), err
- }
-}
-
-// StringToFloat32HookFunc returns a DecodeHookFunc that converts
-// strings to float32.
-func StringToFloat32HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Float32 {
- return data, nil
- }
-
- // Convert it by parsing
- f64, err := strconv.ParseFloat(data.(string), 32)
- return float32(f64), err
- }
-}
-
-// StringToFloat64HookFunc returns a DecodeHookFunc that converts
-// strings to float64.
-func StringToFloat64HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Float64 {
- return data, nil
- }
-
- // Convert it by parsing
- return strconv.ParseFloat(data.(string), 64)
- }
-}
-
-// StringToBoolHookFunc returns a DecodeHookFunc that converts
-// strings to bool.
-func StringToBoolHookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Bool {
- return data, nil
- }
-
- // Convert it by parsing
- return strconv.ParseBool(data.(string))
- }
-}
-
-// StringToByteHookFunc returns a DecodeHookFunc that converts
-// strings to byte.
-func StringToByteHookFunc() DecodeHookFunc {
- return StringToUint8HookFunc()
-}
-
-// StringToRuneHookFunc returns a DecodeHookFunc that converts
-// strings to rune.
-func StringToRuneHookFunc() DecodeHookFunc {
- return StringToInt32HookFunc()
-}
-
-// StringToComplex64HookFunc returns a DecodeHookFunc that converts
-// strings to complex64.
-func StringToComplex64HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Complex64 {
- return data, nil
- }
-
- // Convert it by parsing
- c128, err := strconv.ParseComplex(data.(string), 64)
- return complex64(c128), err
- }
-}
-
-// StringToComplex128HookFunc returns a DecodeHookFunc that converts
-// strings to complex128.
-func StringToComplex128HookFunc() DecodeHookFunc {
- return func(f reflect.Type, t reflect.Type, data interface{}) (interface{}, error) {
- if f.Kind() != reflect.String || t.Kind() != reflect.Complex128 {
- return data, nil
- }
-
- // Convert it by parsing
- return strconv.ParseComplex(data.(string), 128)
- }
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/flake.lock b/vendor/github.com/go-viper/mapstructure/v2/flake.lock
deleted file mode 100644
index 4bea8154e0..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/flake.lock
+++ /dev/null
@@ -1,472 +0,0 @@
-{
- "nodes": {
- "cachix": {
- "inputs": {
- "devenv": "devenv_2",
- "flake-compat": [
- "devenv",
- "flake-compat"
- ],
- "nixpkgs": [
- "devenv",
- "nixpkgs"
- ],
- "pre-commit-hooks": [
- "devenv",
- "pre-commit-hooks"
- ]
- },
- "locked": {
- "lastModified": 1712055811,
- "narHash": "sha256-7FcfMm5A/f02yyzuavJe06zLa9hcMHsagE28ADcmQvk=",
- "owner": "cachix",
- "repo": "cachix",
- "rev": "02e38da89851ec7fec3356a5c04bc8349cae0e30",
- "type": "github"
- },
- "original": {
- "owner": "cachix",
- "repo": "cachix",
- "type": "github"
- }
- },
- "devenv": {
- "inputs": {
- "cachix": "cachix",
- "flake-compat": "flake-compat_2",
- "nix": "nix_2",
- "nixpkgs": "nixpkgs_2",
- "pre-commit-hooks": "pre-commit-hooks"
- },
- "locked": {
- "lastModified": 1717245169,
- "narHash": "sha256-+mW3rTBjGU8p1THJN0lX/Dd/8FbnF+3dB+mJuSaxewE=",
- "owner": "cachix",
- "repo": "devenv",
- "rev": "c3f9f053c077c6f88a3de5276d9178c62baa3fc3",
- "type": "github"
- },
- "original": {
- "owner": "cachix",
- "repo": "devenv",
- "type": "github"
- }
- },
- "devenv_2": {
- "inputs": {
- "flake-compat": [
- "devenv",
- "cachix",
- "flake-compat"
- ],
- "nix": "nix",
- "nixpkgs": "nixpkgs",
- "poetry2nix": "poetry2nix",
- "pre-commit-hooks": [
- "devenv",
- "cachix",
- "pre-commit-hooks"
- ]
- },
- "locked": {
- "lastModified": 1708704632,
- "narHash": "sha256-w+dOIW60FKMaHI1q5714CSibk99JfYxm0CzTinYWr+Q=",
- "owner": "cachix",
- "repo": "devenv",
- "rev": "2ee4450b0f4b95a1b90f2eb5ffea98b90e48c196",
- "type": "github"
- },
- "original": {
- "owner": "cachix",
- "ref": "python-rewrite",
- "repo": "devenv",
- "type": "github"
- }
- },
- "flake-compat": {
- "flake": false,
- "locked": {
- "lastModified": 1673956053,
- "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
- "owner": "edolstra",
- "repo": "flake-compat",
- "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
- "type": "github"
- },
- "original": {
- "owner": "edolstra",
- "repo": "flake-compat",
- "type": "github"
- }
- },
- "flake-compat_2": {
- "flake": false,
- "locked": {
- "lastModified": 1696426674,
- "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
- "owner": "edolstra",
- "repo": "flake-compat",
- "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
- "type": "github"
- },
- "original": {
- "owner": "edolstra",
- "repo": "flake-compat",
- "type": "github"
- }
- },
- "flake-parts": {
- "inputs": {
- "nixpkgs-lib": "nixpkgs-lib"
- },
- "locked": {
- "lastModified": 1717285511,
- "narHash": "sha256-iKzJcpdXih14qYVcZ9QC9XuZYnPc6T8YImb6dX166kw=",
- "owner": "hercules-ci",
- "repo": "flake-parts",
- "rev": "2a55567fcf15b1b1c7ed712a2c6fadaec7412ea8",
- "type": "github"
- },
- "original": {
- "owner": "hercules-ci",
- "repo": "flake-parts",
- "type": "github"
- }
- },
- "flake-utils": {
- "inputs": {
- "systems": "systems"
- },
- "locked": {
- "lastModified": 1689068808,
- "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
- "owner": "numtide",
- "repo": "flake-utils",
- "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
- "type": "github"
- },
- "original": {
- "owner": "numtide",
- "repo": "flake-utils",
- "type": "github"
- }
- },
- "flake-utils_2": {
- "inputs": {
- "systems": "systems_2"
- },
- "locked": {
- "lastModified": 1710146030,
- "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
- "owner": "numtide",
- "repo": "flake-utils",
- "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
- "type": "github"
- },
- "original": {
- "owner": "numtide",
- "repo": "flake-utils",
- "type": "github"
- }
- },
- "gitignore": {
- "inputs": {
- "nixpkgs": [
- "devenv",
- "pre-commit-hooks",
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1709087332,
- "narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
- "owner": "hercules-ci",
- "repo": "gitignore.nix",
- "rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
- "type": "github"
- },
- "original": {
- "owner": "hercules-ci",
- "repo": "gitignore.nix",
- "type": "github"
- }
- },
- "nix": {
- "inputs": {
- "flake-compat": "flake-compat",
- "nixpkgs": [
- "devenv",
- "cachix",
- "devenv",
- "nixpkgs"
- ],
- "nixpkgs-regression": "nixpkgs-regression"
- },
- "locked": {
- "lastModified": 1712911606,
- "narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=",
- "owner": "domenkozar",
- "repo": "nix",
- "rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12",
- "type": "github"
- },
- "original": {
- "owner": "domenkozar",
- "ref": "devenv-2.21",
- "repo": "nix",
- "type": "github"
- }
- },
- "nix-github-actions": {
- "inputs": {
- "nixpkgs": [
- "devenv",
- "cachix",
- "devenv",
- "poetry2nix",
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1688870561,
- "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
- "owner": "nix-community",
- "repo": "nix-github-actions",
- "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
- "type": "github"
- },
- "original": {
- "owner": "nix-community",
- "repo": "nix-github-actions",
- "type": "github"
- }
- },
- "nix_2": {
- "inputs": {
- "flake-compat": [
- "devenv",
- "flake-compat"
- ],
- "nixpkgs": [
- "devenv",
- "nixpkgs"
- ],
- "nixpkgs-regression": "nixpkgs-regression_2"
- },
- "locked": {
- "lastModified": 1712911606,
- "narHash": "sha256-BGvBhepCufsjcUkXnEEXhEVjwdJAwPglCC2+bInc794=",
- "owner": "domenkozar",
- "repo": "nix",
- "rev": "b24a9318ea3f3600c1e24b4a00691ee912d4de12",
- "type": "github"
- },
- "original": {
- "owner": "domenkozar",
- "ref": "devenv-2.21",
- "repo": "nix",
- "type": "github"
- }
- },
- "nixpkgs": {
- "locked": {
- "lastModified": 1692808169,
- "narHash": "sha256-x9Opq06rIiwdwGeK2Ykj69dNc2IvUH1fY55Wm7atwrE=",
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "9201b5ff357e781bf014d0330d18555695df7ba8",
- "type": "github"
- },
- "original": {
- "owner": "NixOS",
- "ref": "nixpkgs-unstable",
- "repo": "nixpkgs",
- "type": "github"
- }
- },
- "nixpkgs-lib": {
- "locked": {
- "lastModified": 1717284937,
- "narHash": "sha256-lIbdfCsf8LMFloheeE6N31+BMIeixqyQWbSr2vk79EQ=",
- "type": "tarball",
- "url": "https://github.com/NixOS/nixpkgs/archive/eb9ceca17df2ea50a250b6b27f7bf6ab0186f198.tar.gz"
- },
- "original": {
- "type": "tarball",
- "url": "https://github.com/NixOS/nixpkgs/archive/eb9ceca17df2ea50a250b6b27f7bf6ab0186f198.tar.gz"
- }
- },
- "nixpkgs-regression": {
- "locked": {
- "lastModified": 1643052045,
- "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
- "type": "github"
- },
- "original": {
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
- "type": "github"
- }
- },
- "nixpkgs-regression_2": {
- "locked": {
- "lastModified": 1643052045,
- "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
- "type": "github"
- },
- "original": {
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
- "type": "github"
- }
- },
- "nixpkgs-stable": {
- "locked": {
- "lastModified": 1710695816,
- "narHash": "sha256-3Eh7fhEID17pv9ZxrPwCLfqXnYP006RKzSs0JptsN84=",
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "614b4613980a522ba49f0d194531beddbb7220d3",
- "type": "github"
- },
- "original": {
- "owner": "NixOS",
- "ref": "nixos-23.11",
- "repo": "nixpkgs",
- "type": "github"
- }
- },
- "nixpkgs_2": {
- "locked": {
- "lastModified": 1713361204,
- "narHash": "sha256-TA6EDunWTkc5FvDCqU3W2T3SFn0gRZqh6D/hJnM02MM=",
- "owner": "cachix",
- "repo": "devenv-nixpkgs",
- "rev": "285676e87ad9f0ca23d8714a6ab61e7e027020c6",
- "type": "github"
- },
- "original": {
- "owner": "cachix",
- "ref": "rolling",
- "repo": "devenv-nixpkgs",
- "type": "github"
- }
- },
- "nixpkgs_3": {
- "locked": {
- "lastModified": 1717112898,
- "narHash": "sha256-7R2ZvOnvd9h8fDd65p0JnB7wXfUvreox3xFdYWd1BnY=",
- "owner": "NixOS",
- "repo": "nixpkgs",
- "rev": "6132b0f6e344ce2fe34fc051b72fb46e34f668e0",
- "type": "github"
- },
- "original": {
- "owner": "NixOS",
- "ref": "nixpkgs-unstable",
- "repo": "nixpkgs",
- "type": "github"
- }
- },
- "poetry2nix": {
- "inputs": {
- "flake-utils": "flake-utils",
- "nix-github-actions": "nix-github-actions",
- "nixpkgs": [
- "devenv",
- "cachix",
- "devenv",
- "nixpkgs"
- ]
- },
- "locked": {
- "lastModified": 1692876271,
- "narHash": "sha256-IXfZEkI0Mal5y1jr6IRWMqK8GW2/f28xJenZIPQqkY0=",
- "owner": "nix-community",
- "repo": "poetry2nix",
- "rev": "d5006be9c2c2417dafb2e2e5034d83fabd207ee3",
- "type": "github"
- },
- "original": {
- "owner": "nix-community",
- "repo": "poetry2nix",
- "type": "github"
- }
- },
- "pre-commit-hooks": {
- "inputs": {
- "flake-compat": [
- "devenv",
- "flake-compat"
- ],
- "flake-utils": "flake-utils_2",
- "gitignore": "gitignore",
- "nixpkgs": [
- "devenv",
- "nixpkgs"
- ],
- "nixpkgs-stable": "nixpkgs-stable"
- },
- "locked": {
- "lastModified": 1713775815,
- "narHash": "sha256-Wu9cdYTnGQQwtT20QQMg7jzkANKQjwBD9iccfGKkfls=",
- "owner": "cachix",
- "repo": "pre-commit-hooks.nix",
- "rev": "2ac4dcbf55ed43f3be0bae15e181f08a57af24a4",
- "type": "github"
- },
- "original": {
- "owner": "cachix",
- "repo": "pre-commit-hooks.nix",
- "type": "github"
- }
- },
- "root": {
- "inputs": {
- "devenv": "devenv",
- "flake-parts": "flake-parts",
- "nixpkgs": "nixpkgs_3"
- }
- },
- "systems": {
- "locked": {
- "lastModified": 1681028828,
- "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
- "owner": "nix-systems",
- "repo": "default",
- "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
- "type": "github"
- },
- "original": {
- "owner": "nix-systems",
- "repo": "default",
- "type": "github"
- }
- },
- "systems_2": {
- "locked": {
- "lastModified": 1681028828,
- "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
- "owner": "nix-systems",
- "repo": "default",
- "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
- "type": "github"
- },
- "original": {
- "owner": "nix-systems",
- "repo": "default",
- "type": "github"
- }
- }
- },
- "root": "root",
- "version": 7
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/flake.nix b/vendor/github.com/go-viper/mapstructure/v2/flake.nix
deleted file mode 100644
index 4ed0f53311..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/flake.nix
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- inputs = {
- nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
- flake-parts.url = "github:hercules-ci/flake-parts";
- devenv.url = "github:cachix/devenv";
- };
-
- outputs = inputs@{ flake-parts, ... }:
- flake-parts.lib.mkFlake { inherit inputs; } {
- imports = [
- inputs.devenv.flakeModule
- ];
-
- systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ];
-
- perSystem = { config, self', inputs', pkgs, system, ... }: rec {
- devenv.shells = {
- default = {
- languages = {
- go.enable = true;
- };
-
- pre-commit.hooks = {
- nixpkgs-fmt.enable = true;
- };
-
- packages = with pkgs; [
- golangci-lint
- ];
-
- # https://github.com/cachix/devenv/issues/528#issuecomment-1556108767
- containers = pkgs.lib.mkForce { };
- };
-
- ci = devenv.shells.default;
- };
- };
- };
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/internal/errors/errors.go b/vendor/github.com/go-viper/mapstructure/v2/internal/errors/errors.go
deleted file mode 100644
index d1c15e474f..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/internal/errors/errors.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package errors
-
-import "errors"
-
-func New(text string) error {
- return errors.New(text)
-}
-
-func As(err error, target interface{}) bool {
- return errors.As(err, target)
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/internal/errors/join.go b/vendor/github.com/go-viper/mapstructure/v2/internal/errors/join.go
deleted file mode 100644
index d74e3a0b5a..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/internal/errors/join.go
+++ /dev/null
@@ -1,9 +0,0 @@
-//go:build go1.20
-
-package errors
-
-import "errors"
-
-func Join(errs ...error) error {
- return errors.Join(errs...)
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/internal/errors/join_go1_19.go b/vendor/github.com/go-viper/mapstructure/v2/internal/errors/join_go1_19.go
deleted file mode 100644
index 700b40229c..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/internal/errors/join_go1_19.go
+++ /dev/null
@@ -1,61 +0,0 @@
-//go:build !go1.20
-
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package errors
-
-// Join returns an error that wraps the given errors.
-// Any nil error values are discarded.
-// Join returns nil if every value in errs is nil.
-// The error formats as the concatenation of the strings obtained
-// by calling the Error method of each element of errs, with a newline
-// between each string.
-//
-// A non-nil error returned by Join implements the Unwrap() []error method.
-func Join(errs ...error) error {
- n := 0
- for _, err := range errs {
- if err != nil {
- n++
- }
- }
- if n == 0 {
- return nil
- }
- e := &joinError{
- errs: make([]error, 0, n),
- }
- for _, err := range errs {
- if err != nil {
- e.errs = append(e.errs, err)
- }
- }
- return e
-}
-
-type joinError struct {
- errs []error
-}
-
-func (e *joinError) Error() string {
- // Since Join returns nil if every value in errs is nil,
- // e.errs cannot be empty.
- if len(e.errs) == 1 {
- return e.errs[0].Error()
- }
-
- b := []byte(e.errs[0].Error())
- for _, err := range e.errs[1:] {
- b = append(b, '\n')
- b = append(b, err.Error()...)
- }
- // At this point, b has at least one byte '\n'.
- // return unsafe.String(&b[0], len(b))
- return string(b)
-}
-
-func (e *joinError) Unwrap() []error {
- return e.errs
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
deleted file mode 100644
index e77e63ba38..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
+++ /dev/null
@@ -1,1620 +0,0 @@
-// Package mapstructure exposes functionality to convert one arbitrary
-// Go type into another, typically to convert a map[string]interface{}
-// into a native Go structure.
-//
-// The Go structure can be arbitrarily complex, containing slices,
-// other structs, etc. and the decoder will properly decode nested
-// maps and so on into the proper structures in the native Go struct.
-// See the examples to see what the decoder is capable of.
-//
-// The simplest function to start with is Decode.
-//
-// # Field Tags
-//
-// When decoding to a struct, mapstructure will use the field name by
-// default to perform the mapping. For example, if a struct has a field
-// "Username" then mapstructure will look for a key in the source value
-// of "username" (case insensitive).
-//
-// type User struct {
-// Username string
-// }
-//
-// You can change the behavior of mapstructure by using struct tags.
-// The default struct tag that mapstructure looks for is "mapstructure"
-// but you can customize it using DecoderConfig.
-//
-// # Renaming Fields
-//
-// To rename the key that mapstructure looks for, use the "mapstructure"
-// tag and set a value directly. For example, to change the "username" example
-// above to "user":
-//
-// type User struct {
-// Username string `mapstructure:"user"`
-// }
-//
-// # Embedded Structs and Squashing
-//
-// Embedded structs are treated as if they're another field with that name.
-// By default, the two structs below are equivalent when decoding with
-// mapstructure:
-//
-// type Person struct {
-// Name string
-// }
-//
-// type Friend struct {
-// Person
-// }
-//
-// type Friend struct {
-// Person Person
-// }
-//
-// This would require an input that looks like below:
-//
-// map[string]interface{}{
-// "person": map[string]interface{}{"name": "alice"},
-// }
-//
-// If your "person" value is NOT nested, then you can append ",squash" to
-// your tag value and mapstructure will treat it as if the embedded struct
-// were part of the struct directly. Example:
-//
-// type Friend struct {
-// Person `mapstructure:",squash"`
-// }
-//
-// Now the following input would be accepted:
-//
-// map[string]interface{}{
-// "name": "alice",
-// }
-//
-// When decoding from a struct to a map, the squash tag squashes the struct
-// fields into a single map. Using the example structs from above:
-//
-// Friend{Person: Person{Name: "alice"}}
-//
-// Will be decoded into a map:
-//
-// map[string]interface{}{
-// "name": "alice",
-// }
-//
-// DecoderConfig has a field that changes the behavior of mapstructure
-// to always squash embedded structs.
-//
-// # Remainder Values
-//
-// If there are any unmapped keys in the source value, mapstructure by
-// default will silently ignore them. You can error by setting ErrorUnused
-// in DecoderConfig. If you're using Metadata you can also maintain a slice
-// of the unused keys.
-//
-// You can also use the ",remain" suffix on your tag to collect all unused
-// values in a map. The field with this tag MUST be a map type and should
-// probably be a "map[string]interface{}" or "map[interface{}]interface{}".
-// See example below:
-//
-// type Friend struct {
-// Name string
-// Other map[string]interface{} `mapstructure:",remain"`
-// }
-//
-// Given the input below, Other would be populated with the other
-// values that weren't used (everything but "name"):
-//
-// map[string]interface{}{
-// "name": "bob",
-// "address": "123 Maple St.",
-// }
-//
-// # Omit Empty Values
-//
-// When decoding from a struct to any other value, you may use the
-// ",omitempty" suffix on your tag to omit that value if it equates to
-// the zero value. The zero value of all types is specified in the Go
-// specification.
-//
-// For example, the zero type of a numeric type is zero ("0"). If the struct
-// field value is zero and a numeric type, the field is empty, and it won't
-// be encoded into the destination type.
-//
-// type Source struct {
-// Age int `mapstructure:",omitempty"`
-// }
-//
-// # Unexported fields
-//
-// Since unexported (private) struct fields cannot be set outside the package
-// where they are defined, the decoder will simply skip them.
-//
-// For this output type definition:
-//
-// type Exported struct {
-// private string // this unexported field will be skipped
-// Public string
-// }
-//
-// Using this map as input:
-//
-// map[string]interface{}{
-// "private": "I will be ignored",
-// "Public": "I made it through!",
-// }
-//
-// The following struct will be decoded:
-//
-// type Exported struct {
-// private: "" // field is left with an empty string (zero value)
-// Public: "I made it through!"
-// }
-//
-// # Other Configuration
-//
-// mapstructure is highly configurable. See the DecoderConfig struct
-// for other features and options that are supported.
-package mapstructure
-
-import (
- "encoding/json"
- "fmt"
- "reflect"
- "sort"
- "strconv"
- "strings"
-
- "github.com/go-viper/mapstructure/v2/internal/errors"
-)
-
-// DecodeHookFunc is the callback function that can be used for
-// data transformations. See "DecodeHook" in the DecoderConfig
-// struct.
-//
-// The type must be one of DecodeHookFuncType, DecodeHookFuncKind, or
-// DecodeHookFuncValue.
-// Values are a superset of Types (Values can return types), and Types are a
-// superset of Kinds (Types can return Kinds) and are generally a richer thing
-// to use, but Kinds are simpler if you only need those.
-//
-// The reason DecodeHookFunc is multi-typed is for backwards compatibility:
-// we started with Kinds and then realized Types were the better solution,
-// but have a promise to not break backwards compat so we now support
-// both.
-type DecodeHookFunc interface{}
-
-// DecodeHookFuncType is a DecodeHookFunc which has complete information about
-// the source and target types.
-type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error)
-
-// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the
-// source and target types.
-type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error)
-
-// DecodeHookFuncValue is a DecodeHookFunc which has complete access to both the source and target
-// values.
-type DecodeHookFuncValue func(from reflect.Value, to reflect.Value) (interface{}, error)
-
-// DecoderConfig is the configuration that is used to create a new decoder
-// and allows customization of various aspects of decoding.
-type DecoderConfig struct {
- // DecodeHook, if set, will be called before any decoding and any
- // type conversion (if WeaklyTypedInput is on). This lets you modify
- // the values before they're set down onto the resulting struct. The
- // DecodeHook is called for every map and value in the input. This means
- // that if a struct has embedded fields with squash tags the decode hook
- // is called only once with all of the input data, not once for each
- // embedded struct.
- //
- // If an error is returned, the entire decode will fail with that error.
- DecodeHook DecodeHookFunc
-
- // If ErrorUnused is true, then it is an error for there to exist
- // keys in the original map that were unused in the decoding process
- // (extra keys).
- ErrorUnused bool
-
- // If ErrorUnset is true, then it is an error for there to exist
- // fields in the result that were not set in the decoding process
- // (extra fields). This only applies to decoding to a struct. This
- // will affect all nested structs as well.
- ErrorUnset bool
-
- // ZeroFields, if set to true, will zero fields before writing them.
- // For example, a map will be emptied before decoded values are put in
- // it. If this is false, a map will be merged.
- ZeroFields bool
-
- // If WeaklyTypedInput is true, the decoder will make the following
- // "weak" conversions:
- //
- // - bools to string (true = "1", false = "0")
- // - numbers to string (base 10)
- // - bools to int/uint (true = 1, false = 0)
- // - strings to int/uint (base implied by prefix)
- // - int to bool (true if value != 0)
- // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F,
- // FALSE, false, False. Anything else is an error)
- // - empty array = empty map and vice versa
- // - negative numbers to overflowed uint values (base 10)
- // - slice of maps to a merged map
- // - single values are converted to slices if required. Each
- // element is weakly decoded. For example: "4" can become []int{4}
- // if the target type is an int slice.
- //
- WeaklyTypedInput bool
-
- // Squash will squash embedded structs. A squash tag may also be
- // added to an individual struct field using a tag. For example:
- //
- // type Parent struct {
- // Child `mapstructure:",squash"`
- // }
- Squash bool
-
- // Metadata is the struct that will contain extra metadata about
- // the decoding. If this is nil, then no metadata will be tracked.
- Metadata *Metadata
-
- // Result is a pointer to the struct that will contain the decoded
- // value.
- Result interface{}
-
- // The tag name that mapstructure reads for field names. This
- // defaults to "mapstructure"
- TagName string
-
- // The option of the value in the tag that indicates a field should
- // be squashed. This defaults to "squash".
- SquashTagOption string
-
- // IgnoreUntaggedFields ignores all struct fields without explicit
- // TagName, comparable to `mapstructure:"-"` as default behaviour.
- IgnoreUntaggedFields bool
-
- // MatchName is the function used to match the map key to the struct
- // field name or tag. Defaults to `strings.EqualFold`. This can be used
- // to implement case-sensitive tag values, support snake casing, etc.
- MatchName func(mapKey, fieldName string) bool
-
- // DecodeNil, if set to true, will cause the DecodeHook (if present) to run
- // even if the input is nil. This can be used to provide default values.
- DecodeNil bool
-}
-
-// A Decoder takes a raw interface value and turns it into structured
-// data, keeping track of rich error information along the way in case
-// anything goes wrong. Unlike the basic top-level Decode method, you can
-// more finely control how the Decoder behaves using the DecoderConfig
-// structure. The top-level Decode method is just a convenience that sets
-// up the most basic Decoder.
-type Decoder struct {
- config *DecoderConfig
- cachedDecodeHook func(from reflect.Value, to reflect.Value) (interface{}, error)
-}
-
-// Metadata contains information about decoding a structure that
-// is tedious or difficult to get otherwise.
-type Metadata struct {
- // Keys are the keys of the structure which were successfully decoded
- Keys []string
-
- // Unused is a slice of keys that were found in the raw value but
- // weren't decoded since there was no matching field in the result interface
- Unused []string
-
- // Unset is a slice of field names that were found in the result interface
- // but weren't set in the decoding process since there was no matching value
- // in the input
- Unset []string
-}
-
-// Decode takes an input structure and uses reflection to translate it to
-// the output structure. output must be a pointer to a map or struct.
-func Decode(input interface{}, output interface{}) error {
- config := &DecoderConfig{
- Metadata: nil,
- Result: output,
- }
-
- decoder, err := NewDecoder(config)
- if err != nil {
- return err
- }
-
- return decoder.Decode(input)
-}
-
-// WeakDecode is the same as Decode but is shorthand to enable
-// WeaklyTypedInput. See DecoderConfig for more info.
-func WeakDecode(input, output interface{}) error {
- config := &DecoderConfig{
- Metadata: nil,
- Result: output,
- WeaklyTypedInput: true,
- }
-
- decoder, err := NewDecoder(config)
- if err != nil {
- return err
- }
-
- return decoder.Decode(input)
-}
-
-// DecodeMetadata is the same as Decode, but is shorthand to
-// enable metadata collection. See DecoderConfig for more info.
-func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error {
- config := &DecoderConfig{
- Metadata: metadata,
- Result: output,
- }
-
- decoder, err := NewDecoder(config)
- if err != nil {
- return err
- }
-
- return decoder.Decode(input)
-}
-
-// WeakDecodeMetadata is the same as Decode, but is shorthand to
-// enable both WeaklyTypedInput and metadata collection. See
-// DecoderConfig for more info.
-func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error {
- config := &DecoderConfig{
- Metadata: metadata,
- Result: output,
- WeaklyTypedInput: true,
- }
-
- decoder, err := NewDecoder(config)
- if err != nil {
- return err
- }
-
- return decoder.Decode(input)
-}
-
-// NewDecoder returns a new decoder for the given configuration. Once
-// a decoder has been returned, the same configuration must not be used
-// again.
-func NewDecoder(config *DecoderConfig) (*Decoder, error) {
- val := reflect.ValueOf(config.Result)
- if val.Kind() != reflect.Ptr {
- return nil, errors.New("result must be a pointer")
- }
-
- val = val.Elem()
- if !val.CanAddr() {
- return nil, errors.New("result must be addressable (a pointer)")
- }
-
- if config.Metadata != nil {
- if config.Metadata.Keys == nil {
- config.Metadata.Keys = make([]string, 0)
- }
-
- if config.Metadata.Unused == nil {
- config.Metadata.Unused = make([]string, 0)
- }
-
- if config.Metadata.Unset == nil {
- config.Metadata.Unset = make([]string, 0)
- }
- }
-
- if config.TagName == "" {
- config.TagName = "mapstructure"
- }
-
- if config.SquashTagOption == "" {
- config.SquashTagOption = "squash"
- }
-
- if config.MatchName == nil {
- config.MatchName = strings.EqualFold
- }
-
- result := &Decoder{
- config: config,
- }
- if config.DecodeHook != nil {
- result.cachedDecodeHook = cachedDecodeHook(config.DecodeHook)
- }
-
- return result, nil
-}
-
-// Decode decodes the given raw interface to the target pointer specified
-// by the configuration.
-func (d *Decoder) Decode(input interface{}) error {
- err := d.decode("", input, reflect.ValueOf(d.config.Result).Elem())
-
- // Retain some of the original behavior when multiple errors ocurr
- var joinedErr interface{ Unwrap() []error }
- if errors.As(err, &joinedErr) {
- return fmt.Errorf("decoding failed due to the following error(s):\n\n%w", err)
- }
-
- return err
-}
-
-// isNil returns true if the input is nil or a typed nil pointer.
-func isNil(input interface{}) bool {
- if input == nil {
- return true
- }
- val := reflect.ValueOf(input)
- return val.Kind() == reflect.Ptr && val.IsNil()
-}
-
-// Decodes an unknown data type into a specific reflection value.
-func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error {
- var (
- inputVal = reflect.ValueOf(input)
- outputKind = getKind(outVal)
- decodeNil = d.config.DecodeNil && d.cachedDecodeHook != nil
- )
- if isNil(input) {
- // Typed nils won't match the "input == nil" below, so reset input.
- input = nil
- }
- if input == nil {
- // If the data is nil, then we don't set anything, unless ZeroFields is set
- // to true.
- if d.config.ZeroFields {
- outVal.Set(reflect.Zero(outVal.Type()))
-
- if d.config.Metadata != nil && name != "" {
- d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
- }
- }
- if !decodeNil {
- return nil
- }
- }
- if !inputVal.IsValid() {
- if !decodeNil {
- // If the input value is invalid, then we just set the value
- // to be the zero value.
- outVal.Set(reflect.Zero(outVal.Type()))
- if d.config.Metadata != nil && name != "" {
- d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
- }
- return nil
- }
- // Hooks need a valid inputVal, so reset it to zero value of outVal type.
- switch outputKind {
- case reflect.Struct, reflect.Map:
- var mapVal map[string]interface{}
- inputVal = reflect.ValueOf(mapVal) // create nil map pointer
- case reflect.Slice, reflect.Array:
- var sliceVal []interface{}
- inputVal = reflect.ValueOf(sliceVal) // create nil slice pointer
- default:
- inputVal = reflect.Zero(outVal.Type())
- }
- }
-
- if d.cachedDecodeHook != nil {
- // We have a DecodeHook, so let's pre-process the input.
- var err error
- input, err = d.cachedDecodeHook(inputVal, outVal)
- if err != nil {
- return fmt.Errorf("error decoding '%s': %w", name, err)
- }
- }
- if isNil(input) {
- return nil
- }
-
- var err error
- addMetaKey := true
- switch outputKind {
- case reflect.Bool:
- err = d.decodeBool(name, input, outVal)
- case reflect.Interface:
- err = d.decodeBasic(name, input, outVal)
- case reflect.String:
- err = d.decodeString(name, input, outVal)
- case reflect.Int:
- err = d.decodeInt(name, input, outVal)
- case reflect.Uint:
- err = d.decodeUint(name, input, outVal)
- case reflect.Float32:
- err = d.decodeFloat(name, input, outVal)
- case reflect.Complex64:
- err = d.decodeComplex(name, input, outVal)
- case reflect.Struct:
- err = d.decodeStruct(name, input, outVal)
- case reflect.Map:
- err = d.decodeMap(name, input, outVal)
- case reflect.Ptr:
- addMetaKey, err = d.decodePtr(name, input, outVal)
- case reflect.Slice:
- err = d.decodeSlice(name, input, outVal)
- case reflect.Array:
- err = d.decodeArray(name, input, outVal)
- case reflect.Func:
- err = d.decodeFunc(name, input, outVal)
- default:
- // If we reached this point then we weren't able to decode it
- return fmt.Errorf("%s: unsupported type: %s", name, outputKind)
- }
-
- // If we reached here, then we successfully decoded SOMETHING, so
- // mark the key as used if we're tracking metainput.
- if addMetaKey && d.config.Metadata != nil && name != "" {
- d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
- }
-
- return err
-}
-
-// This decodes a basic type (bool, int, string, etc.) and sets the
-// value to "data" of that type.
-func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error {
- if val.IsValid() && val.Elem().IsValid() {
- elem := val.Elem()
-
- // If we can't address this element, then its not writable. Instead,
- // we make a copy of the value (which is a pointer and therefore
- // writable), decode into that, and replace the whole value.
- copied := false
- if !elem.CanAddr() {
- copied = true
-
- // Make *T
- copy := reflect.New(elem.Type())
-
- // *T = elem
- copy.Elem().Set(elem)
-
- // Set elem so we decode into it
- elem = copy
- }
-
- // Decode. If we have an error then return. We also return right
- // away if we're not a copy because that means we decoded directly.
- if err := d.decode(name, data, elem); err != nil || !copied {
- return err
- }
-
- // If we're a copy, we need to set te final result
- val.Set(elem.Elem())
- return nil
- }
-
- dataVal := reflect.ValueOf(data)
-
- // If the input data is a pointer, and the assigned type is the dereference
- // of that exact pointer, then indirect it so that we can assign it.
- // Example: *string to string
- if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() {
- dataVal = reflect.Indirect(dataVal)
- }
-
- if !dataVal.IsValid() {
- dataVal = reflect.Zero(val.Type())
- }
-
- dataValType := dataVal.Type()
- if !dataValType.AssignableTo(val.Type()) {
- return fmt.Errorf(
- "'%s' expected type '%s', got '%s'",
- name, val.Type(), dataValType)
- }
-
- val.Set(dataVal)
- return nil
-}
-
-func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataKind := getKind(dataVal)
-
- converted := true
- switch {
- case dataKind == reflect.String:
- val.SetString(dataVal.String())
- case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
- if dataVal.Bool() {
- val.SetString("1")
- } else {
- val.SetString("0")
- }
- case dataKind == reflect.Int && d.config.WeaklyTypedInput:
- val.SetString(strconv.FormatInt(dataVal.Int(), 10))
- case dataKind == reflect.Uint && d.config.WeaklyTypedInput:
- val.SetString(strconv.FormatUint(dataVal.Uint(), 10))
- case dataKind == reflect.Float32 && d.config.WeaklyTypedInput:
- val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64))
- case dataKind == reflect.Slice && d.config.WeaklyTypedInput,
- dataKind == reflect.Array && d.config.WeaklyTypedInput:
- dataType := dataVal.Type()
- elemKind := dataType.Elem().Kind()
- switch elemKind {
- case reflect.Uint8:
- var uints []uint8
- if dataKind == reflect.Array {
- uints = make([]uint8, dataVal.Len(), dataVal.Len())
- for i := range uints {
- uints[i] = dataVal.Index(i).Interface().(uint8)
- }
- } else {
- uints = dataVal.Interface().([]uint8)
- }
- val.SetString(string(uints))
- default:
- converted = false
- }
- default:
- converted = false
- }
-
- if !converted {
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
- }
-
- return nil
-}
-
-func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataKind := getKind(dataVal)
- dataType := dataVal.Type()
-
- switch {
- case dataKind == reflect.Int:
- val.SetInt(dataVal.Int())
- case dataKind == reflect.Uint:
- val.SetInt(int64(dataVal.Uint()))
- case dataKind == reflect.Float32:
- val.SetInt(int64(dataVal.Float()))
- case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
- if dataVal.Bool() {
- val.SetInt(1)
- } else {
- val.SetInt(0)
- }
- case dataKind == reflect.String && d.config.WeaklyTypedInput:
- str := dataVal.String()
- if str == "" {
- str = "0"
- }
-
- i, err := strconv.ParseInt(str, 0, val.Type().Bits())
- if err == nil {
- val.SetInt(i)
- } else {
- return fmt.Errorf("cannot parse '%s' as int: %s", name, err)
- }
- case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
- jn := data.(json.Number)
- i, err := jn.Int64()
- if err != nil {
- return fmt.Errorf(
- "error decoding json.Number into %s: %s", name, err)
- }
- val.SetInt(i)
- default:
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
- }
-
- return nil
-}
-
-func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataKind := getKind(dataVal)
- dataType := dataVal.Type()
-
- switch {
- case dataKind == reflect.Int:
- i := dataVal.Int()
- if i < 0 && !d.config.WeaklyTypedInput {
- return fmt.Errorf("cannot parse '%s', %d overflows uint",
- name, i)
- }
- val.SetUint(uint64(i))
- case dataKind == reflect.Uint:
- val.SetUint(dataVal.Uint())
- case dataKind == reflect.Float32:
- f := dataVal.Float()
- if f < 0 && !d.config.WeaklyTypedInput {
- return fmt.Errorf("cannot parse '%s', %f overflows uint",
- name, f)
- }
- val.SetUint(uint64(f))
- case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
- if dataVal.Bool() {
- val.SetUint(1)
- } else {
- val.SetUint(0)
- }
- case dataKind == reflect.String && d.config.WeaklyTypedInput:
- str := dataVal.String()
- if str == "" {
- str = "0"
- }
-
- i, err := strconv.ParseUint(str, 0, val.Type().Bits())
- if err == nil {
- val.SetUint(i)
- } else {
- return fmt.Errorf("cannot parse '%s' as uint: %s", name, err)
- }
- case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
- jn := data.(json.Number)
- i, err := strconv.ParseUint(string(jn), 0, 64)
- if err != nil {
- return fmt.Errorf(
- "error decoding json.Number into %s: %s", name, err)
- }
- val.SetUint(i)
- default:
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
- }
-
- return nil
-}
-
-func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataKind := getKind(dataVal)
-
- switch {
- case dataKind == reflect.Bool:
- val.SetBool(dataVal.Bool())
- case dataKind == reflect.Int && d.config.WeaklyTypedInput:
- val.SetBool(dataVal.Int() != 0)
- case dataKind == reflect.Uint && d.config.WeaklyTypedInput:
- val.SetBool(dataVal.Uint() != 0)
- case dataKind == reflect.Float32 && d.config.WeaklyTypedInput:
- val.SetBool(dataVal.Float() != 0)
- case dataKind == reflect.String && d.config.WeaklyTypedInput:
- b, err := strconv.ParseBool(dataVal.String())
- if err == nil {
- val.SetBool(b)
- } else if dataVal.String() == "" {
- val.SetBool(false)
- } else {
- return fmt.Errorf("cannot parse '%s' as bool: %s", name, err)
- }
- default:
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%#v', value: '%#v'",
- name, val, dataVal, data)
- }
-
- return nil
-}
-
-func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataKind := getKind(dataVal)
- dataType := dataVal.Type()
-
- switch {
- case dataKind == reflect.Int:
- val.SetFloat(float64(dataVal.Int()))
- case dataKind == reflect.Uint:
- val.SetFloat(float64(dataVal.Uint()))
- case dataKind == reflect.Float32:
- val.SetFloat(dataVal.Float())
- case dataKind == reflect.Bool && d.config.WeaklyTypedInput:
- if dataVal.Bool() {
- val.SetFloat(1)
- } else {
- val.SetFloat(0)
- }
- case dataKind == reflect.String && d.config.WeaklyTypedInput:
- str := dataVal.String()
- if str == "" {
- str = "0"
- }
-
- f, err := strconv.ParseFloat(str, val.Type().Bits())
- if err == nil {
- val.SetFloat(f)
- } else {
- return fmt.Errorf("cannot parse '%s' as float: %s", name, err)
- }
- case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
- jn := data.(json.Number)
- i, err := jn.Float64()
- if err != nil {
- return fmt.Errorf(
- "error decoding json.Number into %s: %s", name, err)
- }
- val.SetFloat(i)
- default:
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
- }
-
- return nil
-}
-
-func (d *Decoder) decodeComplex(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataKind := getKind(dataVal)
-
- switch {
- case dataKind == reflect.Complex64:
- val.SetComplex(dataVal.Complex())
- default:
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
- }
-
- return nil
-}
-
-func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error {
- valType := val.Type()
- valKeyType := valType.Key()
- valElemType := valType.Elem()
-
- // By default we overwrite keys in the current map
- valMap := val
-
- // If the map is nil or we're purposely zeroing fields, make a new map
- if valMap.IsNil() || d.config.ZeroFields {
- // Make a new map to hold our result
- mapType := reflect.MapOf(valKeyType, valElemType)
- valMap = reflect.MakeMap(mapType)
- }
-
- dataVal := reflect.ValueOf(data)
-
- // Resolve any levels of indirection
- for dataVal.Kind() == reflect.Pointer {
- dataVal = reflect.Indirect(dataVal)
- }
-
- // Check input type and based on the input type jump to the proper func
- switch dataVal.Kind() {
- case reflect.Map:
- return d.decodeMapFromMap(name, dataVal, val, valMap)
-
- case reflect.Struct:
- return d.decodeMapFromStruct(name, dataVal, val, valMap)
-
- case reflect.Array, reflect.Slice:
- if d.config.WeaklyTypedInput {
- return d.decodeMapFromSlice(name, dataVal, val, valMap)
- }
-
- fallthrough
-
- default:
- return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
- }
-}
-
-func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
- // Special case for BC reasons (covered by tests)
- if dataVal.Len() == 0 {
- val.Set(valMap)
- return nil
- }
-
- for i := 0; i < dataVal.Len(); i++ {
- err := d.decode(
- name+"["+strconv.Itoa(i)+"]",
- dataVal.Index(i).Interface(), val)
- if err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
- valType := val.Type()
- valKeyType := valType.Key()
- valElemType := valType.Elem()
-
- // Accumulate errors
- var errs []error
-
- // If the input data is empty, then we just match what the input data is.
- if dataVal.Len() == 0 {
- if dataVal.IsNil() {
- if !val.IsNil() {
- val.Set(dataVal)
- }
- } else {
- // Set to empty allocated value
- val.Set(valMap)
- }
-
- return nil
- }
-
- for _, k := range dataVal.MapKeys() {
- fieldName := name + "[" + k.String() + "]"
-
- // First decode the key into the proper type
- currentKey := reflect.Indirect(reflect.New(valKeyType))
- if err := d.decode(fieldName, k.Interface(), currentKey); err != nil {
- errs = append(errs, err)
- continue
- }
-
- // Next decode the data into the proper type
- v := dataVal.MapIndex(k).Interface()
- currentVal := reflect.Indirect(reflect.New(valElemType))
- if err := d.decode(fieldName, v, currentVal); err != nil {
- errs = append(errs, err)
- continue
- }
-
- valMap.SetMapIndex(currentKey, currentVal)
- }
-
- // Set the built up map to the value
- val.Set(valMap)
-
- return errors.Join(errs...)
-}
-
-func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error {
- typ := dataVal.Type()
- for i := 0; i < typ.NumField(); i++ {
- // Get the StructField first since this is a cheap operation. If the
- // field is unexported, then ignore it.
- f := typ.Field(i)
- if f.PkgPath != "" {
- continue
- }
-
- // Next get the actual value of this field and verify it is assignable
- // to the map value.
- v := dataVal.Field(i)
- if !v.Type().AssignableTo(valMap.Type().Elem()) {
- return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem())
- }
-
- tagValue := f.Tag.Get(d.config.TagName)
- keyName := f.Name
-
- if tagValue == "" && d.config.IgnoreUntaggedFields {
- continue
- }
-
- // If Squash is set in the config, we squash the field down.
- squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous
-
- v = dereferencePtrToStructIfNeeded(v, d.config.TagName)
-
- // Determine the name of the key in the map
- if index := strings.Index(tagValue, ","); index != -1 {
- if tagValue[:index] == "-" {
- continue
- }
- // If "omitempty" is specified in the tag, it ignores empty values.
- if strings.Index(tagValue[index+1:], "omitempty") != -1 && isEmptyValue(v) {
- continue
- }
-
- // If "squash" is specified in the tag, we squash the field down.
- squash = squash || strings.Contains(tagValue[index+1:], d.config.SquashTagOption)
- if squash {
- // When squashing, the embedded type can be a pointer to a struct.
- if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct {
- v = v.Elem()
- }
-
- // The final type must be a struct
- if v.Kind() != reflect.Struct {
- return fmt.Errorf("cannot squash non-struct type '%s'", v.Type())
- }
- } else {
- if strings.Index(tagValue[index+1:], "remain") != -1 {
- if v.Kind() != reflect.Map {
- return fmt.Errorf("error remain-tag field with invalid type: '%s'", v.Type())
- }
-
- ptr := v.MapRange()
- for ptr.Next() {
- valMap.SetMapIndex(ptr.Key(), ptr.Value())
- }
- continue
- }
- }
- if keyNameTagValue := tagValue[:index]; keyNameTagValue != "" {
- keyName = keyNameTagValue
- }
- } else if len(tagValue) > 0 {
- if tagValue == "-" {
- continue
- }
- keyName = tagValue
- }
-
- switch v.Kind() {
- // this is an embedded struct, so handle it differently
- case reflect.Struct:
- x := reflect.New(v.Type())
- x.Elem().Set(v)
-
- vType := valMap.Type()
- vKeyType := vType.Key()
- vElemType := vType.Elem()
- mType := reflect.MapOf(vKeyType, vElemType)
- vMap := reflect.MakeMap(mType)
-
- // Creating a pointer to a map so that other methods can completely
- // overwrite the map if need be (looking at you decodeMapFromMap). The
- // indirection allows the underlying map to be settable (CanSet() == true)
- // where as reflect.MakeMap returns an unsettable map.
- addrVal := reflect.New(vMap.Type())
- reflect.Indirect(addrVal).Set(vMap)
-
- err := d.decode(keyName, x.Interface(), reflect.Indirect(addrVal))
- if err != nil {
- return err
- }
-
- // the underlying map may have been completely overwritten so pull
- // it indirectly out of the enclosing value.
- vMap = reflect.Indirect(addrVal)
-
- if squash {
- for _, k := range vMap.MapKeys() {
- valMap.SetMapIndex(k, vMap.MapIndex(k))
- }
- } else {
- valMap.SetMapIndex(reflect.ValueOf(keyName), vMap)
- }
-
- default:
- valMap.SetMapIndex(reflect.ValueOf(keyName), v)
- }
- }
-
- if val.CanAddr() {
- val.Set(valMap)
- }
-
- return nil
-}
-
-func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) (bool, error) {
- // If the input data is nil, then we want to just set the output
- // pointer to be nil as well.
- isNil := data == nil
- if !isNil {
- switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() {
- case reflect.Chan,
- reflect.Func,
- reflect.Interface,
- reflect.Map,
- reflect.Ptr,
- reflect.Slice:
- isNil = v.IsNil()
- }
- }
- if isNil {
- if !val.IsNil() && val.CanSet() {
- nilValue := reflect.New(val.Type()).Elem()
- val.Set(nilValue)
- }
-
- return true, nil
- }
-
- // Create an element of the concrete (non pointer) type and decode
- // into that. Then set the value of the pointer to this type.
- valType := val.Type()
- valElemType := valType.Elem()
- if val.CanSet() {
- realVal := val
- if realVal.IsNil() || d.config.ZeroFields {
- realVal = reflect.New(valElemType)
- }
-
- if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil {
- return false, err
- }
-
- val.Set(realVal)
- } else {
- if err := d.decode(name, data, reflect.Indirect(val)); err != nil {
- return false, err
- }
- }
- return false, nil
-}
-
-func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error {
- // Create an element of the concrete (non pointer) type and decode
- // into that. Then set the value of the pointer to this type.
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- if val.Type() != dataVal.Type() {
- return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
- }
- val.Set(dataVal)
- return nil
-}
-
-func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataValKind := dataVal.Kind()
- valType := val.Type()
- valElemType := valType.Elem()
- sliceType := reflect.SliceOf(valElemType)
-
- // If we have a non array/slice type then we first attempt to convert.
- if dataValKind != reflect.Array && dataValKind != reflect.Slice {
- if d.config.WeaklyTypedInput {
- switch {
- // Slice and array we use the normal logic
- case dataValKind == reflect.Slice, dataValKind == reflect.Array:
- break
-
- // Empty maps turn into empty slices
- case dataValKind == reflect.Map:
- if dataVal.Len() == 0 {
- val.Set(reflect.MakeSlice(sliceType, 0, 0))
- return nil
- }
- // Create slice of maps of other sizes
- return d.decodeSlice(name, []interface{}{data}, val)
-
- case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8:
- return d.decodeSlice(name, []byte(dataVal.String()), val)
-
- // All other types we try to convert to the slice type
- // and "lift" it into it. i.e. a string becomes a string slice.
- default:
- // Just re-try this function with data as a slice.
- return d.decodeSlice(name, []interface{}{data}, val)
- }
- }
-
- return fmt.Errorf(
- "'%s': source data must be an array or slice, got %s", name, dataValKind)
- }
-
- // If the input value is nil, then don't allocate since empty != nil
- if dataValKind != reflect.Array && dataVal.IsNil() {
- return nil
- }
-
- valSlice := val
- if valSlice.IsNil() || d.config.ZeroFields {
- // Make a new slice to hold our result, same size as the original data.
- valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len())
- } else if valSlice.Len() > dataVal.Len() {
- valSlice = valSlice.Slice(0, dataVal.Len())
- }
-
- // Accumulate any errors
- var errs []error
-
- for i := 0; i < dataVal.Len(); i++ {
- currentData := dataVal.Index(i).Interface()
- for valSlice.Len() <= i {
- valSlice = reflect.Append(valSlice, reflect.Zero(valElemType))
- }
- currentField := valSlice.Index(i)
-
- fieldName := name + "[" + strconv.Itoa(i) + "]"
- if err := d.decode(fieldName, currentData, currentField); err != nil {
- errs = append(errs, err)
- }
- }
-
- // Finally, set the value to the slice we built up
- val.Set(valSlice)
-
- return errors.Join(errs...)
-}
-
-func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
- dataValKind := dataVal.Kind()
- valType := val.Type()
- valElemType := valType.Elem()
- arrayType := reflect.ArrayOf(valType.Len(), valElemType)
-
- valArray := val
-
- if isComparable(valArray) && valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields {
- // Check input type
- if dataValKind != reflect.Array && dataValKind != reflect.Slice {
- if d.config.WeaklyTypedInput {
- switch {
- // Empty maps turn into empty arrays
- case dataValKind == reflect.Map:
- if dataVal.Len() == 0 {
- val.Set(reflect.Zero(arrayType))
- return nil
- }
-
- // All other types we try to convert to the array type
- // and "lift" it into it. i.e. a string becomes a string array.
- default:
- // Just re-try this function with data as a slice.
- return d.decodeArray(name, []interface{}{data}, val)
- }
- }
-
- return fmt.Errorf(
- "'%s': source data must be an array or slice, got %s", name, dataValKind)
-
- }
- if dataVal.Len() > arrayType.Len() {
- return fmt.Errorf(
- "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len())
- }
-
- // Make a new array to hold our result, same size as the original data.
- valArray = reflect.New(arrayType).Elem()
- }
-
- // Accumulate any errors
- var errs []error
-
- for i := 0; i < dataVal.Len(); i++ {
- currentData := dataVal.Index(i).Interface()
- currentField := valArray.Index(i)
-
- fieldName := name + "[" + strconv.Itoa(i) + "]"
- if err := d.decode(fieldName, currentData, currentField); err != nil {
- errs = append(errs, err)
- }
- }
-
- // Finally, set the value to the array we built up
- val.Set(valArray)
-
- return errors.Join(errs...)
-}
-
-func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error {
- dataVal := reflect.Indirect(reflect.ValueOf(data))
-
- // If the type of the value to write to and the data match directly,
- // then we just set it directly instead of recursing into the structure.
- if dataVal.Type() == val.Type() {
- val.Set(dataVal)
- return nil
- }
-
- dataValKind := dataVal.Kind()
- switch dataValKind {
- case reflect.Map:
- return d.decodeStructFromMap(name, dataVal, val)
-
- case reflect.Struct:
- // Not the most efficient way to do this but we can optimize later if
- // we want to. To convert from struct to struct we go to map first
- // as an intermediary.
-
- // Make a new map to hold our result
- mapType := reflect.TypeOf((map[string]interface{})(nil))
- mval := reflect.MakeMap(mapType)
-
- // Creating a pointer to a map so that other methods can completely
- // overwrite the map if need be (looking at you decodeMapFromMap). The
- // indirection allows the underlying map to be settable (CanSet() == true)
- // where as reflect.MakeMap returns an unsettable map.
- addrVal := reflect.New(mval.Type())
-
- reflect.Indirect(addrVal).Set(mval)
- if err := d.decodeMapFromStruct(name, dataVal, reflect.Indirect(addrVal), mval); err != nil {
- return err
- }
-
- result := d.decodeStructFromMap(name, reflect.Indirect(addrVal), val)
- return result
-
- default:
- return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
- }
-}
-
-func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error {
- dataValType := dataVal.Type()
- if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface {
- return fmt.Errorf(
- "'%s' needs a map with string keys, has '%s' keys",
- name, dataValType.Key().Kind())
- }
-
- dataValKeys := make(map[reflect.Value]struct{})
- dataValKeysUnused := make(map[interface{}]struct{})
- for _, dataValKey := range dataVal.MapKeys() {
- dataValKeys[dataValKey] = struct{}{}
- dataValKeysUnused[dataValKey.Interface()] = struct{}{}
- }
-
- targetValKeysUnused := make(map[interface{}]struct{})
-
- var errs []error
-
- // This slice will keep track of all the structs we'll be decoding.
- // There can be more than one struct if there are embedded structs
- // that are squashed.
- structs := make([]reflect.Value, 1, 5)
- structs[0] = val
-
- // Compile the list of all the fields that we're going to be decoding
- // from all the structs.
- type field struct {
- field reflect.StructField
- val reflect.Value
- }
-
- // remainField is set to a valid field set with the "remain" tag if
- // we are keeping track of remaining values.
- var remainField *field
-
- fields := []field{}
- for len(structs) > 0 {
- structVal := structs[0]
- structs = structs[1:]
-
- structType := structVal.Type()
-
- for i := 0; i < structType.NumField(); i++ {
- fieldType := structType.Field(i)
- fieldVal := structVal.Field(i)
- if fieldVal.Kind() == reflect.Ptr && fieldVal.Elem().Kind() == reflect.Struct {
- // Handle embedded struct pointers as embedded structs.
- fieldVal = fieldVal.Elem()
- }
-
- // If "squash" is specified in the tag, we squash the field down.
- squash := d.config.Squash && fieldVal.Kind() == reflect.Struct && fieldType.Anonymous
- remain := false
-
- // We always parse the tags cause we're looking for other tags too
- tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",")
- for _, tag := range tagParts[1:] {
- if tag == d.config.SquashTagOption {
- squash = true
- break
- }
-
- if tag == "remain" {
- remain = true
- break
- }
- }
-
- if squash {
- switch fieldVal.Kind() {
- case reflect.Struct:
- structs = append(structs, fieldVal)
- case reflect.Interface:
- if !fieldVal.IsNil() {
- structs = append(structs, fieldVal.Elem().Elem())
- }
- default:
- errs = append(errs, fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind()))
- }
- continue
- }
-
- // Build our field
- if remain {
- remainField = &field{fieldType, fieldVal}
- } else {
- // Normal struct field, store it away
- fields = append(fields, field{fieldType, fieldVal})
- }
- }
- }
-
- // for fieldType, field := range fields {
- for _, f := range fields {
- field, fieldValue := f.field, f.val
- fieldName := field.Name
-
- tagValue := field.Tag.Get(d.config.TagName)
- if tagValue == "" && d.config.IgnoreUntaggedFields {
- continue
- }
- tagValue = strings.SplitN(tagValue, ",", 2)[0]
- if tagValue != "" {
- fieldName = tagValue
- }
-
- rawMapKey := reflect.ValueOf(fieldName)
- rawMapVal := dataVal.MapIndex(rawMapKey)
- if !rawMapVal.IsValid() {
- // Do a slower search by iterating over each key and
- // doing case-insensitive search.
- for dataValKey := range dataValKeys {
- mK, ok := dataValKey.Interface().(string)
- if !ok {
- // Not a string key
- continue
- }
-
- if d.config.MatchName(mK, fieldName) {
- rawMapKey = dataValKey
- rawMapVal = dataVal.MapIndex(dataValKey)
- break
- }
- }
-
- if !rawMapVal.IsValid() {
- // There was no matching key in the map for the value in
- // the struct. Remember it for potential errors and metadata.
- targetValKeysUnused[fieldName] = struct{}{}
- continue
- }
- }
-
- if !fieldValue.IsValid() {
- // This should never happen
- panic("field is not valid")
- }
-
- // If we can't set the field, then it is unexported or something,
- // and we just continue onwards.
- if !fieldValue.CanSet() {
- continue
- }
-
- // Delete the key we're using from the unused map so we stop tracking
- delete(dataValKeysUnused, rawMapKey.Interface())
-
- // If the name is empty string, then we're at the root, and we
- // don't dot-join the fields.
- if name != "" {
- fieldName = name + "." + fieldName
- }
-
- if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil {
- errs = append(errs, err)
- }
- }
-
- // If we have a "remain"-tagged field and we have unused keys then
- // we put the unused keys directly into the remain field.
- if remainField != nil && len(dataValKeysUnused) > 0 {
- // Build a map of only the unused values
- remain := map[interface{}]interface{}{}
- for key := range dataValKeysUnused {
- remain[key] = dataVal.MapIndex(reflect.ValueOf(key)).Interface()
- }
-
- // Decode it as-if we were just decoding this map onto our map.
- if err := d.decodeMap(name, remain, remainField.val); err != nil {
- errs = append(errs, err)
- }
-
- // Set the map to nil so we have none so that the next check will
- // not error (ErrorUnused)
- dataValKeysUnused = nil
- }
-
- if d.config.ErrorUnused && len(dataValKeysUnused) > 0 {
- keys := make([]string, 0, len(dataValKeysUnused))
- for rawKey := range dataValKeysUnused {
- keys = append(keys, rawKey.(string))
- }
- sort.Strings(keys)
-
- err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", "))
- errs = append(errs, err)
- }
-
- if d.config.ErrorUnset && len(targetValKeysUnused) > 0 {
- keys := make([]string, 0, len(targetValKeysUnused))
- for rawKey := range targetValKeysUnused {
- keys = append(keys, rawKey.(string))
- }
- sort.Strings(keys)
-
- err := fmt.Errorf("'%s' has unset fields: %s", name, strings.Join(keys, ", "))
- errs = append(errs, err)
- }
-
- if err := errors.Join(errs...); err != nil {
- return err
- }
-
- // Add the unused keys to the list of unused keys if we're tracking metadata
- if d.config.Metadata != nil {
- for rawKey := range dataValKeysUnused {
- key := rawKey.(string)
- if name != "" {
- key = name + "." + key
- }
-
- d.config.Metadata.Unused = append(d.config.Metadata.Unused, key)
- }
- for rawKey := range targetValKeysUnused {
- key := rawKey.(string)
- if name != "" {
- key = name + "." + key
- }
-
- d.config.Metadata.Unset = append(d.config.Metadata.Unset, key)
- }
- }
-
- return nil
-}
-
-func isEmptyValue(v reflect.Value) bool {
- switch getKind(v) {
- case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
- return v.Len() == 0
- case reflect.Bool:
- return !v.Bool()
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return v.Int() == 0
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return v.Uint() == 0
- case reflect.Float32, reflect.Float64:
- return v.Float() == 0
- case reflect.Interface, reflect.Ptr:
- return v.IsNil()
- }
- return false
-}
-
-func getKind(val reflect.Value) reflect.Kind {
- kind := val.Kind()
-
- switch {
- case kind >= reflect.Int && kind <= reflect.Int64:
- return reflect.Int
- case kind >= reflect.Uint && kind <= reflect.Uint64:
- return reflect.Uint
- case kind >= reflect.Float32 && kind <= reflect.Float64:
- return reflect.Float32
- case kind >= reflect.Complex64 && kind <= reflect.Complex128:
- return reflect.Complex64
- default:
- return kind
- }
-}
-
-func isStructTypeConvertibleToMap(typ reflect.Type, checkMapstructureTags bool, tagName string) bool {
- for i := 0; i < typ.NumField(); i++ {
- f := typ.Field(i)
- if f.PkgPath == "" && !checkMapstructureTags { // check for unexported fields
- return true
- }
- if checkMapstructureTags && f.Tag.Get(tagName) != "" { // check for mapstructure tags inside
- return true
- }
- }
- return false
-}
-
-func dereferencePtrToStructIfNeeded(v reflect.Value, tagName string) reflect.Value {
- if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {
- return v
- }
- deref := v.Elem()
- derefT := deref.Type()
- if isStructTypeConvertibleToMap(derefT, true, tagName) {
- return deref
- }
- return v
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go
deleted file mode 100644
index d0913fff6c..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go
+++ /dev/null
@@ -1,44 +0,0 @@
-//go:build !go1.20
-
-package mapstructure
-
-import "reflect"
-
-func isComparable(v reflect.Value) bool {
- k := v.Kind()
- switch k {
- case reflect.Invalid:
- return false
-
- case reflect.Array:
- switch v.Type().Elem().Kind() {
- case reflect.Interface, reflect.Array, reflect.Struct:
- for i := 0; i < v.Type().Len(); i++ {
- // if !v.Index(i).Comparable() {
- if !isComparable(v.Index(i)) {
- return false
- }
- }
- return true
- }
- return v.Type().Comparable()
-
- case reflect.Interface:
- // return v.Elem().Comparable()
- return isComparable(v.Elem())
-
- case reflect.Struct:
- for i := 0; i < v.NumField(); i++ {
- return false
-
- // if !v.Field(i).Comparable() {
- if !isComparable(v.Field(i)) {
- return false
- }
- }
- return true
-
- default:
- return v.Type().Comparable()
- }
-}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go b/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go
deleted file mode 100644
index f8255a1b17..0000000000
--- a/vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go
+++ /dev/null
@@ -1,10 +0,0 @@
-//go:build go1.20
-
-package mapstructure
-
-import "reflect"
-
-// TODO: remove once we drop support for Go <1.20
-func isComparable(v reflect.Value) bool {
- return v.Comparable()
-}
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/.go-version b/vendor/github.com/hashicorp/go-retryablehttp/.go-version
index 6fee2fedb0..a1b6e17d61 100644
--- a/vendor/github.com/hashicorp/go-retryablehttp/.go-version
+++ b/vendor/github.com/hashicorp/go-retryablehttp/.go-version
@@ -1 +1 @@
-1.22.2
+1.23
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/.golangci.yml b/vendor/github.com/hashicorp/go-retryablehttp/.golangci.yml
new file mode 100644
index 0000000000..4ff1a93b08
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/.golangci.yml
@@ -0,0 +1,11 @@
+# Copyright (c) HashiCorp, Inc.
+# SPDX-License-Identifier: MPL-2.0
+
+linters:
+ disable-all: true
+ enable:
+ - errcheck
+ - staticcheck
+ - gosimple
+ - govet
+output_format: colored-line-number
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS b/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS
index d6dd78a2dd..85b44a129e 100644
--- a/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS
+++ b/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS
@@ -1 +1,13 @@
-* @hashicorp/go-retryablehttp-maintainers
+# Each line is a file pattern followed by one or more owners.
+# More on CODEOWNERS files: https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners
+
+# Default owner
+* @hashicorp/team-ip-compliance @hashicorp/go-retryablehttp-maintainers
+
+# Add override rules below. Each line is a file/folder pattern followed by one or more owners.
+# Being an owner means those groups or individuals will be added as reviewers to PRs affecting
+# those areas of the code.
+# Examples:
+# /docs/ @docs-team
+# *.js @js-team
+# *.go @go-team
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/Makefile b/vendor/github.com/hashicorp/go-retryablehttp/Makefile
index 5255241961..07b85a1326 100644
--- a/vendor/github.com/hashicorp/go-retryablehttp/Makefile
+++ b/vendor/github.com/hashicorp/go-retryablehttp/Makefile
@@ -2,7 +2,7 @@ default: test
test:
go vet ./...
- go test -v -race ./...
+ go test -v -race ./... -coverprofile=coverage.out
updatedeps:
go get -f -t -u ./...
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/client.go b/vendor/github.com/hashicorp/go-retryablehttp/client.go
index efee53c400..91059f7dec 100644
--- a/vendor/github.com/hashicorp/go-retryablehttp/client.go
+++ b/vendor/github.com/hashicorp/go-retryablehttp/client.go
@@ -638,6 +638,23 @@ func LinearJitterBackoff(min, max time.Duration, attemptNum int, resp *http.Resp
return time.Duration(jitterMin * int64(attemptNum))
}
+// RateLimitLinearJitterBackoff wraps the retryablehttp.LinearJitterBackoff.
+// It first checks if the response status code is http.StatusTooManyRequests
+// (HTTP Code 429) or http.StatusServiceUnavailable (HTTP Code 503). If it is
+// and the response contains a Retry-After response header, it will wait the
+// amount of time specified by the header. Otherwise, this calls
+// LinearJitterBackoff.
+func RateLimitLinearJitterBackoff(min, max time.Duration, attemptNum int, resp *http.Response) time.Duration {
+ if resp != nil {
+ if resp.StatusCode == http.StatusTooManyRequests || resp.StatusCode == http.StatusServiceUnavailable {
+ if sleep, ok := parseRetryAfterHeader(resp.Header["Retry-After"]); ok {
+ return sleep
+ }
+ }
+ }
+ return LinearJitterBackoff(min, max, attemptNum, resp)
+}
+
// PassthroughErrorHandler is an ErrorHandler that directly passes through the
// values from the net/http library for the final request. The body is not
// closed.
@@ -870,11 +887,13 @@ func (c *Client) Head(url string) (*http.Response, error) {
}
// Post is a shortcut for doing a POST request without making a new client.
+// The bodyType parameter sets the "Content-Type" header of the request.
func Post(url, bodyType string, body interface{}) (*http.Response, error) {
return defaultClient.Post(url, bodyType, body)
}
// Post is a convenience method for doing simple POST requests.
+// The bodyType parameter sets the "Content-Type" header of the request.
func (c *Client) Post(url, bodyType string, body interface{}) (*http.Response, error) {
req, err := NewRequest("POST", url, body)
if err != nil {
diff --git a/vendor/github.com/josharian/intern/README.md b/vendor/github.com/josharian/intern/README.md
deleted file mode 100644
index ffc44b219b..0000000000
--- a/vendor/github.com/josharian/intern/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-Docs: https://godoc.org/github.com/josharian/intern
-
-See also [Go issue 5160](https://golang.org/issue/5160).
-
-License: MIT
diff --git a/vendor/github.com/josharian/intern/intern.go b/vendor/github.com/josharian/intern/intern.go
deleted file mode 100644
index 7acb1fe90a..0000000000
--- a/vendor/github.com/josharian/intern/intern.go
+++ /dev/null
@@ -1,44 +0,0 @@
-// Package intern interns strings.
-// Interning is best effort only.
-// Interned strings may be removed automatically
-// at any time without notification.
-// All functions may be called concurrently
-// with themselves and each other.
-package intern
-
-import "sync"
-
-var (
- pool sync.Pool = sync.Pool{
- New: func() interface{} {
- return make(map[string]string)
- },
- }
-)
-
-// String returns s, interned.
-func String(s string) string {
- m := pool.Get().(map[string]string)
- c, ok := m[s]
- if ok {
- pool.Put(m)
- return c
- }
- m[s] = s
- pool.Put(m)
- return s
-}
-
-// Bytes returns b converted to a string, interned.
-func Bytes(b []byte) string {
- m := pool.Get().(map[string]string)
- c, ok := m[string(b)]
- if ok {
- pool.Put(m)
- return c
- }
- s := string(b)
- m[s] = s
- pool.Put(m)
- return s
-}
diff --git a/vendor/github.com/josharian/intern/license.md b/vendor/github.com/josharian/intern/license.md
deleted file mode 100644
index 353d3055f0..0000000000
--- a/vendor/github.com/josharian/intern/license.md
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2019 Josh Bleecher Snyder
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/vendor/github.com/mailru/easyjson/LICENSE b/vendor/github.com/mailru/easyjson/LICENSE
deleted file mode 100644
index fbff658f70..0000000000
--- a/vendor/github.com/mailru/easyjson/LICENSE
+++ /dev/null
@@ -1,7 +0,0 @@
-Copyright (c) 2016 Mail.Ru Group
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/mailru/easyjson/buffer/pool.go b/vendor/github.com/mailru/easyjson/buffer/pool.go
deleted file mode 100644
index 598a54af9d..0000000000
--- a/vendor/github.com/mailru/easyjson/buffer/pool.go
+++ /dev/null
@@ -1,278 +0,0 @@
-// Package buffer implements a buffer for serialization, consisting of a chain of []byte-s to
-// reduce copying and to allow reuse of individual chunks.
-package buffer
-
-import (
- "io"
- "net"
- "sync"
-)
-
-// PoolConfig contains configuration for the allocation and reuse strategy.
-type PoolConfig struct {
- StartSize int // Minimum chunk size that is allocated.
- PooledSize int // Minimum chunk size that is reused, reusing chunks too small will result in overhead.
- MaxSize int // Maximum chunk size that will be allocated.
-}
-
-var config = PoolConfig{
- StartSize: 128,
- PooledSize: 512,
- MaxSize: 32768,
-}
-
-// Reuse pool: chunk size -> pool.
-var buffers = map[int]*sync.Pool{}
-
-func initBuffers() {
- for l := config.PooledSize; l <= config.MaxSize; l *= 2 {
- buffers[l] = new(sync.Pool)
- }
-}
-
-func init() {
- initBuffers()
-}
-
-// Init sets up a non-default pooling and allocation strategy. Should be run before serialization is done.
-func Init(cfg PoolConfig) {
- config = cfg
- initBuffers()
-}
-
-// putBuf puts a chunk to reuse pool if it can be reused.
-func putBuf(buf []byte) {
- size := cap(buf)
- if size < config.PooledSize {
- return
- }
- if c := buffers[size]; c != nil {
- c.Put(buf[:0])
- }
-}
-
-// getBuf gets a chunk from reuse pool or creates a new one if reuse failed.
-func getBuf(size int) []byte {
- if size >= config.PooledSize {
- if c := buffers[size]; c != nil {
- v := c.Get()
- if v != nil {
- return v.([]byte)
- }
- }
- }
- return make([]byte, 0, size)
-}
-
-// Buffer is a buffer optimized for serialization without extra copying.
-type Buffer struct {
-
- // Buf is the current chunk that can be used for serialization.
- Buf []byte
-
- toPool []byte
- bufs [][]byte
-}
-
-// EnsureSpace makes sure that the current chunk contains at least s free bytes,
-// possibly creating a new chunk.
-func (b *Buffer) EnsureSpace(s int) {
- if cap(b.Buf)-len(b.Buf) < s {
- b.ensureSpaceSlow(s)
- }
-}
-
-func (b *Buffer) ensureSpaceSlow(s int) {
- l := len(b.Buf)
- if l > 0 {
- if cap(b.toPool) != cap(b.Buf) {
- // Chunk was reallocated, toPool can be pooled.
- putBuf(b.toPool)
- }
- if cap(b.bufs) == 0 {
- b.bufs = make([][]byte, 0, 8)
- }
- b.bufs = append(b.bufs, b.Buf)
- l = cap(b.toPool) * 2
- } else {
- l = config.StartSize
- }
-
- if l > config.MaxSize {
- l = config.MaxSize
- }
- b.Buf = getBuf(l)
- b.toPool = b.Buf
-}
-
-// AppendByte appends a single byte to buffer.
-func (b *Buffer) AppendByte(data byte) {
- b.EnsureSpace(1)
- b.Buf = append(b.Buf, data)
-}
-
-// AppendBytes appends a byte slice to buffer.
-func (b *Buffer) AppendBytes(data []byte) {
- if len(data) <= cap(b.Buf)-len(b.Buf) {
- b.Buf = append(b.Buf, data...) // fast path
- } else {
- b.appendBytesSlow(data)
- }
-}
-
-func (b *Buffer) appendBytesSlow(data []byte) {
- for len(data) > 0 {
- b.EnsureSpace(1)
-
- sz := cap(b.Buf) - len(b.Buf)
- if sz > len(data) {
- sz = len(data)
- }
-
- b.Buf = append(b.Buf, data[:sz]...)
- data = data[sz:]
- }
-}
-
-// AppendString appends a string to buffer.
-func (b *Buffer) AppendString(data string) {
- if len(data) <= cap(b.Buf)-len(b.Buf) {
- b.Buf = append(b.Buf, data...) // fast path
- } else {
- b.appendStringSlow(data)
- }
-}
-
-func (b *Buffer) appendStringSlow(data string) {
- for len(data) > 0 {
- b.EnsureSpace(1)
-
- sz := cap(b.Buf) - len(b.Buf)
- if sz > len(data) {
- sz = len(data)
- }
-
- b.Buf = append(b.Buf, data[:sz]...)
- data = data[sz:]
- }
-}
-
-// Size computes the size of a buffer by adding sizes of every chunk.
-func (b *Buffer) Size() int {
- size := len(b.Buf)
- for _, buf := range b.bufs {
- size += len(buf)
- }
- return size
-}
-
-// DumpTo outputs the contents of a buffer to a writer and resets the buffer.
-func (b *Buffer) DumpTo(w io.Writer) (written int, err error) {
- bufs := net.Buffers(b.bufs)
- if len(b.Buf) > 0 {
- bufs = append(bufs, b.Buf)
- }
- n, err := bufs.WriteTo(w)
-
- for _, buf := range b.bufs {
- putBuf(buf)
- }
- putBuf(b.toPool)
-
- b.bufs = nil
- b.Buf = nil
- b.toPool = nil
-
- return int(n), err
-}
-
-// BuildBytes creates a single byte slice with all the contents of the buffer. Data is
-// copied if it does not fit in a single chunk. You can optionally provide one byte
-// slice as argument that it will try to reuse.
-func (b *Buffer) BuildBytes(reuse ...[]byte) []byte {
- if len(b.bufs) == 0 {
- ret := b.Buf
- b.toPool = nil
- b.Buf = nil
- return ret
- }
-
- var ret []byte
- size := b.Size()
-
- // If we got a buffer as argument and it is big enough, reuse it.
- if len(reuse) == 1 && cap(reuse[0]) >= size {
- ret = reuse[0][:0]
- } else {
- ret = make([]byte, 0, size)
- }
- for _, buf := range b.bufs {
- ret = append(ret, buf...)
- putBuf(buf)
- }
-
- ret = append(ret, b.Buf...)
- putBuf(b.toPool)
-
- b.bufs = nil
- b.toPool = nil
- b.Buf = nil
-
- return ret
-}
-
-type readCloser struct {
- offset int
- bufs [][]byte
-}
-
-func (r *readCloser) Read(p []byte) (n int, err error) {
- for _, buf := range r.bufs {
- // Copy as much as we can.
- x := copy(p[n:], buf[r.offset:])
- n += x // Increment how much we filled.
-
- // Did we empty the whole buffer?
- if r.offset+x == len(buf) {
- // On to the next buffer.
- r.offset = 0
- r.bufs = r.bufs[1:]
-
- // We can release this buffer.
- putBuf(buf)
- } else {
- r.offset += x
- }
-
- if n == len(p) {
- break
- }
- }
- // No buffers left or nothing read?
- if len(r.bufs) == 0 {
- err = io.EOF
- }
- return
-}
-
-func (r *readCloser) Close() error {
- // Release all remaining buffers.
- for _, buf := range r.bufs {
- putBuf(buf)
- }
- // In case Close gets called multiple times.
- r.bufs = nil
-
- return nil
-}
-
-// ReadCloser creates an io.ReadCloser with all the contents of the buffer.
-func (b *Buffer) ReadCloser() io.ReadCloser {
- ret := &readCloser{0, append(b.bufs, b.Buf)}
-
- b.bufs = nil
- b.toPool = nil
- b.Buf = nil
-
- return ret
-}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go b/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
deleted file mode 100644
index e68108f868..0000000000
--- a/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// This file will only be included to the build if neither
-// easyjson_nounsafe nor appengine build tag is set. See README notes
-// for more details.
-
-//+build !easyjson_nounsafe
-//+build !appengine
-
-package jlexer
-
-import (
- "unsafe"
-)
-
-// bytesToStr creates a string pointing at the slice to avoid copying.
-//
-// Warning: the string returned by the function should be used with care, as the whole input data
-// chunk may be either blocked from being freed by GC because of a single string or the buffer.Data
-// may be garbage-collected even when the string exists.
-func bytesToStr(data []byte) string {
- return *(*string)(unsafe.Pointer(&data))
-}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go b/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
deleted file mode 100644
index 864d1be676..0000000000
--- a/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// This file is included to the build if any of the buildtags below
-// are defined. Refer to README notes for more details.
-
-//+build easyjson_nounsafe appengine
-
-package jlexer
-
-// bytesToStr creates a string normally from []byte
-//
-// Note that this method is roughly 1.5x slower than using the 'unsafe' method.
-func bytesToStr(data []byte) string {
- return string(data)
-}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/error.go b/vendor/github.com/mailru/easyjson/jlexer/error.go
deleted file mode 100644
index e90ec40d05..0000000000
--- a/vendor/github.com/mailru/easyjson/jlexer/error.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package jlexer
-
-import "fmt"
-
-// LexerError implements the error interface and represents all possible errors that can be
-// generated during parsing the JSON data.
-type LexerError struct {
- Reason string
- Offset int
- Data string
-}
-
-func (l *LexerError) Error() string {
- return fmt.Sprintf("parse error: %s near offset %d of '%s'", l.Reason, l.Offset, l.Data)
-}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/lexer.go b/vendor/github.com/mailru/easyjson/jlexer/lexer.go
deleted file mode 100644
index a27705b12b..0000000000
--- a/vendor/github.com/mailru/easyjson/jlexer/lexer.go
+++ /dev/null
@@ -1,1257 +0,0 @@
-// Package jlexer contains a JSON lexer implementation.
-//
-// It is expected that it is mostly used with generated parser code, so the interface is tuned
-// for a parser that knows what kind of data is expected.
-package jlexer
-
-import (
- "bytes"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "strconv"
- "unicode"
- "unicode/utf16"
- "unicode/utf8"
-
- "github.com/josharian/intern"
-)
-
-// TokenKind determines type of a token.
-type TokenKind byte
-
-const (
- TokenUndef TokenKind = iota // No token.
- TokenDelim // Delimiter: one of '{', '}', '[' or ']'.
- TokenString // A string literal, e.g. "abc\u1234"
- TokenNumber // Number literal, e.g. 1.5e5
- TokenBool // Boolean literal: true or false.
- TokenNull // null keyword.
-)
-
-// token describes a single token: type, position in the input and value.
-type token struct {
- kind TokenKind // Type of a token.
-
- boolValue bool // Value if a boolean literal token.
- byteValueCloned bool // true if byteValue was allocated and does not refer to original json body
- byteValue []byte // Raw value of a token.
- delimValue byte
-}
-
-// Lexer is a JSON lexer: it iterates over JSON tokens in a byte slice.
-type Lexer struct {
- Data []byte // Input data given to the lexer.
-
- start int // Start of the current token.
- pos int // Current unscanned position in the input stream.
- token token // Last scanned token, if token.kind != TokenUndef.
-
- firstElement bool // Whether current element is the first in array or an object.
- wantSep byte // A comma or a colon character, which need to occur before a token.
-
- UseMultipleErrors bool // If we want to use multiple errors.
- fatalError error // Fatal error occurred during lexing. It is usually a syntax error.
- multipleErrors []*LexerError // Semantic errors occurred during lexing. Marshalling will be continued after finding this errors.
-}
-
-// FetchToken scans the input for the next token.
-func (r *Lexer) FetchToken() {
- r.token.kind = TokenUndef
- r.start = r.pos
-
- // Check if r.Data has r.pos element
- // If it doesn't, it mean corrupted input data
- if len(r.Data) < r.pos {
- r.errParse("Unexpected end of data")
- return
- }
- // Determine the type of a token by skipping whitespace and reading the
- // first character.
- for _, c := range r.Data[r.pos:] {
- switch c {
- case ':', ',':
- if r.wantSep == c {
- r.pos++
- r.start++
- r.wantSep = 0
- } else {
- r.errSyntax()
- }
-
- case ' ', '\t', '\r', '\n':
- r.pos++
- r.start++
-
- case '"':
- if r.wantSep != 0 {
- r.errSyntax()
- }
-
- r.token.kind = TokenString
- r.fetchString()
- return
-
- case '{', '[':
- if r.wantSep != 0 {
- r.errSyntax()
- }
- r.firstElement = true
- r.token.kind = TokenDelim
- r.token.delimValue = r.Data[r.pos]
- r.pos++
- return
-
- case '}', ']':
- if !r.firstElement && (r.wantSep != ',') {
- r.errSyntax()
- }
- r.wantSep = 0
- r.token.kind = TokenDelim
- r.token.delimValue = r.Data[r.pos]
- r.pos++
- return
-
- case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-':
- if r.wantSep != 0 {
- r.errSyntax()
- }
- r.token.kind = TokenNumber
- r.fetchNumber()
- return
-
- case 'n':
- if r.wantSep != 0 {
- r.errSyntax()
- }
-
- r.token.kind = TokenNull
- r.fetchNull()
- return
-
- case 't':
- if r.wantSep != 0 {
- r.errSyntax()
- }
-
- r.token.kind = TokenBool
- r.token.boolValue = true
- r.fetchTrue()
- return
-
- case 'f':
- if r.wantSep != 0 {
- r.errSyntax()
- }
-
- r.token.kind = TokenBool
- r.token.boolValue = false
- r.fetchFalse()
- return
-
- default:
- r.errSyntax()
- return
- }
- }
- r.fatalError = io.EOF
- return
-}
-
-// isTokenEnd returns true if the char can follow a non-delimiter token
-func isTokenEnd(c byte) bool {
- return c == ' ' || c == '\t' || c == '\r' || c == '\n' || c == '[' || c == ']' || c == '{' || c == '}' || c == ',' || c == ':'
-}
-
-// fetchNull fetches and checks remaining bytes of null keyword.
-func (r *Lexer) fetchNull() {
- r.pos += 4
- if r.pos > len(r.Data) ||
- r.Data[r.pos-3] != 'u' ||
- r.Data[r.pos-2] != 'l' ||
- r.Data[r.pos-1] != 'l' ||
- (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
-
- r.pos -= 4
- r.errSyntax()
- }
-}
-
-// fetchTrue fetches and checks remaining bytes of true keyword.
-func (r *Lexer) fetchTrue() {
- r.pos += 4
- if r.pos > len(r.Data) ||
- r.Data[r.pos-3] != 'r' ||
- r.Data[r.pos-2] != 'u' ||
- r.Data[r.pos-1] != 'e' ||
- (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
-
- r.pos -= 4
- r.errSyntax()
- }
-}
-
-// fetchFalse fetches and checks remaining bytes of false keyword.
-func (r *Lexer) fetchFalse() {
- r.pos += 5
- if r.pos > len(r.Data) ||
- r.Data[r.pos-4] != 'a' ||
- r.Data[r.pos-3] != 'l' ||
- r.Data[r.pos-2] != 's' ||
- r.Data[r.pos-1] != 'e' ||
- (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
-
- r.pos -= 5
- r.errSyntax()
- }
-}
-
-// fetchNumber scans a number literal token.
-func (r *Lexer) fetchNumber() {
- hasE := false
- afterE := false
- hasDot := false
-
- r.pos++
- for i, c := range r.Data[r.pos:] {
- switch {
- case c >= '0' && c <= '9':
- afterE = false
- case c == '.' && !hasDot:
- hasDot = true
- case (c == 'e' || c == 'E') && !hasE:
- hasE = true
- hasDot = true
- afterE = true
- case (c == '+' || c == '-') && afterE:
- afterE = false
- default:
- r.pos += i
- if !isTokenEnd(c) {
- r.errSyntax()
- } else {
- r.token.byteValue = r.Data[r.start:r.pos]
- }
- return
- }
- }
-
- r.pos = len(r.Data)
- r.token.byteValue = r.Data[r.start:]
-}
-
-// findStringLen tries to scan into the string literal for ending quote char to determine required size.
-// The size will be exact if no escapes are present and may be inexact if there are escaped chars.
-func findStringLen(data []byte) (isValid bool, length int) {
- for {
- idx := bytes.IndexByte(data, '"')
- if idx == -1 {
- return false, len(data)
- }
- if idx == 0 || (idx > 0 && data[idx-1] != '\\') {
- return true, length + idx
- }
-
- // count \\\\\\\ sequences. even number of slashes means quote is not really escaped
- cnt := 1
- for idx-cnt-1 >= 0 && data[idx-cnt-1] == '\\' {
- cnt++
- }
- if cnt%2 == 0 {
- return true, length + idx
- }
-
- length += idx + 1
- data = data[idx+1:]
- }
-}
-
-// unescapeStringToken performs unescaping of string token.
-// if no escaping is needed, original string is returned, otherwise - a new one allocated
-func (r *Lexer) unescapeStringToken() (err error) {
- data := r.token.byteValue
- var unescapedData []byte
-
- for {
- i := bytes.IndexByte(data, '\\')
- if i == -1 {
- break
- }
-
- escapedRune, escapedBytes, err := decodeEscape(data[i:])
- if err != nil {
- r.errParse(err.Error())
- return err
- }
-
- if unescapedData == nil {
- unescapedData = make([]byte, 0, len(r.token.byteValue))
- }
-
- var d [4]byte
- s := utf8.EncodeRune(d[:], escapedRune)
- unescapedData = append(unescapedData, data[:i]...)
- unescapedData = append(unescapedData, d[:s]...)
-
- data = data[i+escapedBytes:]
- }
-
- if unescapedData != nil {
- r.token.byteValue = append(unescapedData, data...)
- r.token.byteValueCloned = true
- }
- return
-}
-
-// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
-// or it returns -1.
-func getu4(s []byte) rune {
- if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
- return -1
- }
- var val rune
- for i := 2; i < len(s) && i < 6; i++ {
- var v byte
- c := s[i]
- switch c {
- case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
- v = c - '0'
- case 'a', 'b', 'c', 'd', 'e', 'f':
- v = c - 'a' + 10
- case 'A', 'B', 'C', 'D', 'E', 'F':
- v = c - 'A' + 10
- default:
- return -1
- }
-
- val <<= 4
- val |= rune(v)
- }
- return val
-}
-
-// decodeEscape processes a single escape sequence and returns number of bytes processed.
-func decodeEscape(data []byte) (decoded rune, bytesProcessed int, err error) {
- if len(data) < 2 {
- return 0, 0, errors.New("incorrect escape symbol \\ at the end of token")
- }
-
- c := data[1]
- switch c {
- case '"', '/', '\\':
- return rune(c), 2, nil
- case 'b':
- return '\b', 2, nil
- case 'f':
- return '\f', 2, nil
- case 'n':
- return '\n', 2, nil
- case 'r':
- return '\r', 2, nil
- case 't':
- return '\t', 2, nil
- case 'u':
- rr := getu4(data)
- if rr < 0 {
- return 0, 0, errors.New("incorrectly escaped \\uXXXX sequence")
- }
-
- read := 6
- if utf16.IsSurrogate(rr) {
- rr1 := getu4(data[read:])
- if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
- read += 6
- rr = dec
- } else {
- rr = unicode.ReplacementChar
- }
- }
- return rr, read, nil
- }
-
- return 0, 0, errors.New("incorrectly escaped bytes")
-}
-
-// fetchString scans a string literal token.
-func (r *Lexer) fetchString() {
- r.pos++
- data := r.Data[r.pos:]
-
- isValid, length := findStringLen(data)
- if !isValid {
- r.pos += length
- r.errParse("unterminated string literal")
- return
- }
- r.token.byteValue = data[:length]
- r.pos += length + 1 // skip closing '"' as well
-}
-
-// scanToken scans the next token if no token is currently available in the lexer.
-func (r *Lexer) scanToken() {
- if r.token.kind != TokenUndef || r.fatalError != nil {
- return
- }
-
- r.FetchToken()
-}
-
-// consume resets the current token to allow scanning the next one.
-func (r *Lexer) consume() {
- r.token.kind = TokenUndef
- r.token.byteValueCloned = false
- r.token.delimValue = 0
-}
-
-// Ok returns true if no error (including io.EOF) was encountered during scanning.
-func (r *Lexer) Ok() bool {
- return r.fatalError == nil
-}
-
-const maxErrorContextLen = 13
-
-func (r *Lexer) errParse(what string) {
- if r.fatalError == nil {
- var str string
- if len(r.Data)-r.pos <= maxErrorContextLen {
- str = string(r.Data)
- } else {
- str = string(r.Data[r.pos:r.pos+maxErrorContextLen-3]) + "..."
- }
- r.fatalError = &LexerError{
- Reason: what,
- Offset: r.pos,
- Data: str,
- }
- }
-}
-
-func (r *Lexer) errSyntax() {
- r.errParse("syntax error")
-}
-
-func (r *Lexer) errInvalidToken(expected string) {
- if r.fatalError != nil {
- return
- }
- if r.UseMultipleErrors {
- r.pos = r.start
- r.consume()
- r.SkipRecursive()
- switch expected {
- case "[":
- r.token.delimValue = ']'
- r.token.kind = TokenDelim
- case "{":
- r.token.delimValue = '}'
- r.token.kind = TokenDelim
- }
- r.addNonfatalError(&LexerError{
- Reason: fmt.Sprintf("expected %s", expected),
- Offset: r.start,
- Data: string(r.Data[r.start:r.pos]),
- })
- return
- }
-
- var str string
- if len(r.token.byteValue) <= maxErrorContextLen {
- str = string(r.token.byteValue)
- } else {
- str = string(r.token.byteValue[:maxErrorContextLen-3]) + "..."
- }
- r.fatalError = &LexerError{
- Reason: fmt.Sprintf("expected %s", expected),
- Offset: r.pos,
- Data: str,
- }
-}
-
-func (r *Lexer) GetPos() int {
- return r.pos
-}
-
-// Delim consumes a token and verifies that it is the given delimiter.
-func (r *Lexer) Delim(c byte) {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
-
- if !r.Ok() || r.token.delimValue != c {
- r.consume() // errInvalidToken can change token if UseMultipleErrors is enabled.
- r.errInvalidToken(string([]byte{c}))
- } else {
- r.consume()
- }
-}
-
-// IsDelim returns true if there was no scanning error and next token is the given delimiter.
-func (r *Lexer) IsDelim(c byte) bool {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- return !r.Ok() || r.token.delimValue == c
-}
-
-// Null verifies that the next token is null and consumes it.
-func (r *Lexer) Null() {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenNull {
- r.errInvalidToken("null")
- }
- r.consume()
-}
-
-// IsNull returns true if the next token is a null keyword.
-func (r *Lexer) IsNull() bool {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- return r.Ok() && r.token.kind == TokenNull
-}
-
-// Skip skips a single token.
-func (r *Lexer) Skip() {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- r.consume()
-}
-
-// SkipRecursive skips next array or object completely, or just skips a single token if not
-// an array/object.
-//
-// Note: no syntax validation is performed on the skipped data.
-func (r *Lexer) SkipRecursive() {
- r.scanToken()
- var start, end byte
- startPos := r.start
-
- switch r.token.delimValue {
- case '{':
- start, end = '{', '}'
- case '[':
- start, end = '[', ']'
- default:
- r.consume()
- return
- }
-
- r.consume()
-
- level := 1
- inQuotes := false
- wasEscape := false
-
- for i, c := range r.Data[r.pos:] {
- switch {
- case c == start && !inQuotes:
- level++
- case c == end && !inQuotes:
- level--
- if level == 0 {
- r.pos += i + 1
- if !json.Valid(r.Data[startPos:r.pos]) {
- r.pos = len(r.Data)
- r.fatalError = &LexerError{
- Reason: "skipped array/object json value is invalid",
- Offset: r.pos,
- Data: string(r.Data[r.pos:]),
- }
- }
- return
- }
- case c == '\\' && inQuotes:
- wasEscape = !wasEscape
- continue
- case c == '"' && inQuotes:
- inQuotes = wasEscape
- case c == '"':
- inQuotes = true
- }
- wasEscape = false
- }
- r.pos = len(r.Data)
- r.fatalError = &LexerError{
- Reason: "EOF reached while skipping array/object or token",
- Offset: r.pos,
- Data: string(r.Data[r.pos:]),
- }
-}
-
-// Raw fetches the next item recursively as a data slice
-func (r *Lexer) Raw() []byte {
- r.SkipRecursive()
- if !r.Ok() {
- return nil
- }
- return r.Data[r.start:r.pos]
-}
-
-// IsStart returns whether the lexer is positioned at the start
-// of an input string.
-func (r *Lexer) IsStart() bool {
- return r.pos == 0
-}
-
-// Consumed reads all remaining bytes from the input, publishing an error if
-// there is anything but whitespace remaining.
-func (r *Lexer) Consumed() {
- if r.pos > len(r.Data) || !r.Ok() {
- return
- }
-
- for _, c := range r.Data[r.pos:] {
- if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
- r.AddError(&LexerError{
- Reason: "invalid character '" + string(c) + "' after top-level value",
- Offset: r.pos,
- Data: string(r.Data[r.pos:]),
- })
- return
- }
-
- r.pos++
- r.start++
- }
-}
-
-func (r *Lexer) unsafeString(skipUnescape bool) (string, []byte) {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenString {
- r.errInvalidToken("string")
- return "", nil
- }
- if !skipUnescape {
- if err := r.unescapeStringToken(); err != nil {
- r.errInvalidToken("string")
- return "", nil
- }
- }
-
- bytes := r.token.byteValue
- ret := bytesToStr(r.token.byteValue)
- r.consume()
- return ret, bytes
-}
-
-// UnsafeString returns the string value if the token is a string literal.
-//
-// Warning: returned string may point to the input buffer, so the string should not outlive
-// the input buffer. Intended pattern of usage is as an argument to a switch statement.
-func (r *Lexer) UnsafeString() string {
- ret, _ := r.unsafeString(false)
- return ret
-}
-
-// UnsafeBytes returns the byte slice if the token is a string literal.
-func (r *Lexer) UnsafeBytes() []byte {
- _, ret := r.unsafeString(false)
- return ret
-}
-
-// UnsafeFieldName returns current member name string token
-func (r *Lexer) UnsafeFieldName(skipUnescape bool) string {
- ret, _ := r.unsafeString(skipUnescape)
- return ret
-}
-
-// String reads a string literal.
-func (r *Lexer) String() string {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenString {
- r.errInvalidToken("string")
- return ""
- }
- if err := r.unescapeStringToken(); err != nil {
- r.errInvalidToken("string")
- return ""
- }
- var ret string
- if r.token.byteValueCloned {
- ret = bytesToStr(r.token.byteValue)
- } else {
- ret = string(r.token.byteValue)
- }
- r.consume()
- return ret
-}
-
-// StringIntern reads a string literal, and performs string interning on it.
-func (r *Lexer) StringIntern() string {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenString {
- r.errInvalidToken("string")
- return ""
- }
- if err := r.unescapeStringToken(); err != nil {
- r.errInvalidToken("string")
- return ""
- }
- ret := intern.Bytes(r.token.byteValue)
- r.consume()
- return ret
-}
-
-// Bytes reads a string literal and base64 decodes it into a byte slice.
-func (r *Lexer) Bytes() []byte {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenString {
- r.errInvalidToken("string")
- return nil
- }
- if err := r.unescapeStringToken(); err != nil {
- r.errInvalidToken("string")
- return nil
- }
- ret := make([]byte, base64.StdEncoding.DecodedLen(len(r.token.byteValue)))
- n, err := base64.StdEncoding.Decode(ret, r.token.byteValue)
- if err != nil {
- r.fatalError = &LexerError{
- Reason: err.Error(),
- }
- return nil
- }
-
- r.consume()
- return ret[:n]
-}
-
-// Bool reads a true or false boolean keyword.
-func (r *Lexer) Bool() bool {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenBool {
- r.errInvalidToken("bool")
- return false
- }
- ret := r.token.boolValue
- r.consume()
- return ret
-}
-
-func (r *Lexer) number() string {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() || r.token.kind != TokenNumber {
- r.errInvalidToken("number")
- return ""
- }
- ret := bytesToStr(r.token.byteValue)
- r.consume()
- return ret
-}
-
-func (r *Lexer) Uint8() uint8 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 8)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return uint8(n)
-}
-
-func (r *Lexer) Uint16() uint16 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 16)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return uint16(n)
-}
-
-func (r *Lexer) Uint32() uint32 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 32)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return uint32(n)
-}
-
-func (r *Lexer) Uint64() uint64 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 64)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return n
-}
-
-func (r *Lexer) Uint() uint {
- return uint(r.Uint64())
-}
-
-func (r *Lexer) Int8() int8 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 8)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return int8(n)
-}
-
-func (r *Lexer) Int16() int16 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 16)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return int16(n)
-}
-
-func (r *Lexer) Int32() int32 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 32)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return int32(n)
-}
-
-func (r *Lexer) Int64() int64 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 64)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return n
-}
-
-func (r *Lexer) Int() int {
- return int(r.Int64())
-}
-
-func (r *Lexer) Uint8Str() uint8 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 8)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return uint8(n)
-}
-
-func (r *Lexer) Uint16Str() uint16 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 16)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return uint16(n)
-}
-
-func (r *Lexer) Uint32Str() uint32 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 32)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return uint32(n)
-}
-
-func (r *Lexer) Uint64Str() uint64 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseUint(s, 10, 64)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return n
-}
-
-func (r *Lexer) UintStr() uint {
- return uint(r.Uint64Str())
-}
-
-func (r *Lexer) UintptrStr() uintptr {
- return uintptr(r.Uint64Str())
-}
-
-func (r *Lexer) Int8Str() int8 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 8)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return int8(n)
-}
-
-func (r *Lexer) Int16Str() int16 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 16)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return int16(n)
-}
-
-func (r *Lexer) Int32Str() int32 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 32)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return int32(n)
-}
-
-func (r *Lexer) Int64Str() int64 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseInt(s, 10, 64)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return n
-}
-
-func (r *Lexer) IntStr() int {
- return int(r.Int64Str())
-}
-
-func (r *Lexer) Float32() float32 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseFloat(s, 32)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return float32(n)
-}
-
-func (r *Lexer) Float32Str() float32 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
- n, err := strconv.ParseFloat(s, 32)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return float32(n)
-}
-
-func (r *Lexer) Float64() float64 {
- s := r.number()
- if !r.Ok() {
- return 0
- }
-
- n, err := strconv.ParseFloat(s, 64)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: s,
- })
- }
- return n
-}
-
-func (r *Lexer) Float64Str() float64 {
- s, b := r.unsafeString(false)
- if !r.Ok() {
- return 0
- }
- n, err := strconv.ParseFloat(s, 64)
- if err != nil {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Reason: err.Error(),
- Data: string(b),
- })
- }
- return n
-}
-
-func (r *Lexer) Error() error {
- return r.fatalError
-}
-
-func (r *Lexer) AddError(e error) {
- if r.fatalError == nil {
- r.fatalError = e
- }
-}
-
-func (r *Lexer) AddNonFatalError(e error) {
- r.addNonfatalError(&LexerError{
- Offset: r.start,
- Data: string(r.Data[r.start:r.pos]),
- Reason: e.Error(),
- })
-}
-
-func (r *Lexer) addNonfatalError(err *LexerError) {
- if r.UseMultipleErrors {
- // We don't want to add errors with the same offset.
- if len(r.multipleErrors) != 0 && r.multipleErrors[len(r.multipleErrors)-1].Offset == err.Offset {
- return
- }
- r.multipleErrors = append(r.multipleErrors, err)
- return
- }
- r.fatalError = err
-}
-
-func (r *Lexer) GetNonFatalErrors() []*LexerError {
- return r.multipleErrors
-}
-
-// JsonNumber fetches and json.Number from 'encoding/json' package.
-// Both int, float or string, contains them are valid values
-func (r *Lexer) JsonNumber() json.Number {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
- if !r.Ok() {
- r.errInvalidToken("json.Number")
- return json.Number("")
- }
-
- switch r.token.kind {
- case TokenString:
- return json.Number(r.String())
- case TokenNumber:
- return json.Number(r.Raw())
- case TokenNull:
- r.Null()
- return json.Number("")
- default:
- r.errSyntax()
- return json.Number("")
- }
-}
-
-// Interface fetches an interface{} analogous to the 'encoding/json' package.
-func (r *Lexer) Interface() interface{} {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
-
- if !r.Ok() {
- return nil
- }
- switch r.token.kind {
- case TokenString:
- return r.String()
- case TokenNumber:
- return r.Float64()
- case TokenBool:
- return r.Bool()
- case TokenNull:
- r.Null()
- return nil
- }
-
- if r.token.delimValue == '{' {
- r.consume()
-
- ret := map[string]interface{}{}
- for !r.IsDelim('}') {
- key := r.String()
- r.WantColon()
- ret[key] = r.Interface()
- r.WantComma()
- }
- r.Delim('}')
-
- if r.Ok() {
- return ret
- } else {
- return nil
- }
- } else if r.token.delimValue == '[' {
- r.consume()
-
- ret := []interface{}{}
- for !r.IsDelim(']') {
- ret = append(ret, r.Interface())
- r.WantComma()
- }
- r.Delim(']')
-
- if r.Ok() {
- return ret
- } else {
- return nil
- }
- }
- r.errSyntax()
- return nil
-}
-
-// WantComma requires a comma to be present before fetching next token.
-func (r *Lexer) WantComma() {
- r.wantSep = ','
- r.firstElement = false
-}
-
-// WantColon requires a colon to be present before fetching next token.
-func (r *Lexer) WantColon() {
- r.wantSep = ':'
- r.firstElement = false
-}
-
-// CurrentToken returns current token kind if there were no errors and TokenUndef otherwise
-func (r *Lexer) CurrentToken() TokenKind {
- if r.token.kind == TokenUndef && r.Ok() {
- r.FetchToken()
- }
-
- if !r.Ok() {
- return TokenUndef
- }
-
- return r.token.kind
-}
diff --git a/vendor/github.com/mailru/easyjson/jwriter/writer.go b/vendor/github.com/mailru/easyjson/jwriter/writer.go
deleted file mode 100644
index 34b0ade468..0000000000
--- a/vendor/github.com/mailru/easyjson/jwriter/writer.go
+++ /dev/null
@@ -1,417 +0,0 @@
-// Package jwriter contains a JSON writer.
-package jwriter
-
-import (
- "io"
- "strconv"
- "unicode/utf8"
-
- "github.com/mailru/easyjson/buffer"
-)
-
-// Flags describe various encoding options. The behavior may be actually implemented in the encoder, but
-// Flags field in Writer is used to set and pass them around.
-type Flags int
-
-const (
- NilMapAsEmpty Flags = 1 << iota // Encode nil map as '{}' rather than 'null'.
- NilSliceAsEmpty // Encode nil slice as '[]' rather than 'null'.
-)
-
-// Writer is a JSON writer.
-type Writer struct {
- Flags Flags
-
- Error error
- Buffer buffer.Buffer
- NoEscapeHTML bool
-}
-
-// Size returns the size of the data that was written out.
-func (w *Writer) Size() int {
- return w.Buffer.Size()
-}
-
-// DumpTo outputs the data to given io.Writer, resetting the buffer.
-func (w *Writer) DumpTo(out io.Writer) (written int, err error) {
- return w.Buffer.DumpTo(out)
-}
-
-// BuildBytes returns writer data as a single byte slice. You can optionally provide one byte slice
-// as argument that it will try to reuse.
-func (w *Writer) BuildBytes(reuse ...[]byte) ([]byte, error) {
- if w.Error != nil {
- return nil, w.Error
- }
-
- return w.Buffer.BuildBytes(reuse...), nil
-}
-
-// ReadCloser returns an io.ReadCloser that can be used to read the data.
-// ReadCloser also resets the buffer.
-func (w *Writer) ReadCloser() (io.ReadCloser, error) {
- if w.Error != nil {
- return nil, w.Error
- }
-
- return w.Buffer.ReadCloser(), nil
-}
-
-// RawByte appends raw binary data to the buffer.
-func (w *Writer) RawByte(c byte) {
- w.Buffer.AppendByte(c)
-}
-
-// RawByte appends raw binary data to the buffer.
-func (w *Writer) RawString(s string) {
- w.Buffer.AppendString(s)
-}
-
-// RawBytesString appends string from bytes to the buffer.
-func (w *Writer) RawBytesString(data []byte, err error) {
- switch {
- case w.Error != nil:
- return
- case err != nil:
- w.Error = err
- default:
- w.String(string(data))
- }
-}
-
-// Raw appends raw binary data to the buffer or sets the error if it is given. Useful for
-// calling with results of MarshalJSON-like functions.
-func (w *Writer) Raw(data []byte, err error) {
- switch {
- case w.Error != nil:
- return
- case err != nil:
- w.Error = err
- case len(data) > 0:
- w.Buffer.AppendBytes(data)
- default:
- w.RawString("null")
- }
-}
-
-// RawText encloses raw binary data in quotes and appends in to the buffer.
-// Useful for calling with results of MarshalText-like functions.
-func (w *Writer) RawText(data []byte, err error) {
- switch {
- case w.Error != nil:
- return
- case err != nil:
- w.Error = err
- case len(data) > 0:
- w.String(string(data))
- default:
- w.RawString("null")
- }
-}
-
-// Base64Bytes appends data to the buffer after base64 encoding it
-func (w *Writer) Base64Bytes(data []byte) {
- if data == nil {
- w.Buffer.AppendString("null")
- return
- }
- w.Buffer.AppendByte('"')
- w.base64(data)
- w.Buffer.AppendByte('"')
-}
-
-func (w *Writer) Uint8(n uint8) {
- w.Buffer.EnsureSpace(3)
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
-}
-
-func (w *Writer) Uint16(n uint16) {
- w.Buffer.EnsureSpace(5)
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
-}
-
-func (w *Writer) Uint32(n uint32) {
- w.Buffer.EnsureSpace(10)
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
-}
-
-func (w *Writer) Uint(n uint) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
-}
-
-func (w *Writer) Uint64(n uint64) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, n, 10)
-}
-
-func (w *Writer) Int8(n int8) {
- w.Buffer.EnsureSpace(4)
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
-}
-
-func (w *Writer) Int16(n int16) {
- w.Buffer.EnsureSpace(6)
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
-}
-
-func (w *Writer) Int32(n int32) {
- w.Buffer.EnsureSpace(11)
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
-}
-
-func (w *Writer) Int(n int) {
- w.Buffer.EnsureSpace(21)
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
-}
-
-func (w *Writer) Int64(n int64) {
- w.Buffer.EnsureSpace(21)
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, n, 10)
-}
-
-func (w *Writer) Uint8Str(n uint8) {
- w.Buffer.EnsureSpace(3)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Uint16Str(n uint16) {
- w.Buffer.EnsureSpace(5)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Uint32Str(n uint32) {
- w.Buffer.EnsureSpace(10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) UintStr(n uint) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Uint64Str(n uint64) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, n, 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) UintptrStr(n uintptr) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Int8Str(n int8) {
- w.Buffer.EnsureSpace(4)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Int16Str(n int16) {
- w.Buffer.EnsureSpace(6)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Int32Str(n int32) {
- w.Buffer.EnsureSpace(11)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) IntStr(n int) {
- w.Buffer.EnsureSpace(21)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Int64Str(n int64) {
- w.Buffer.EnsureSpace(21)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, n, 10)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Float32(n float32) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 32)
-}
-
-func (w *Writer) Float32Str(n float32) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 32)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Float64(n float64) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, n, 'g', -1, 64)
-}
-
-func (w *Writer) Float64Str(n float64) {
- w.Buffer.EnsureSpace(20)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
- w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 64)
- w.Buffer.Buf = append(w.Buffer.Buf, '"')
-}
-
-func (w *Writer) Bool(v bool) {
- w.Buffer.EnsureSpace(5)
- if v {
- w.Buffer.Buf = append(w.Buffer.Buf, "true"...)
- } else {
- w.Buffer.Buf = append(w.Buffer.Buf, "false"...)
- }
-}
-
-const chars = "0123456789abcdef"
-
-func getTable(falseValues ...int) [128]bool {
- table := [128]bool{}
-
- for i := 0; i < 128; i++ {
- table[i] = true
- }
-
- for _, v := range falseValues {
- table[v] = false
- }
-
- return table
-}
-
-var (
- htmlEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '&', '<', '>', '\\')
- htmlNoEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '\\')
-)
-
-func (w *Writer) String(s string) {
- w.Buffer.AppendByte('"')
-
- // Portions of the string that contain no escapes are appended as
- // byte slices.
-
- p := 0 // last non-escape symbol
-
- escapeTable := &htmlEscapeTable
- if w.NoEscapeHTML {
- escapeTable = &htmlNoEscapeTable
- }
-
- for i := 0; i < len(s); {
- c := s[i]
-
- if c < utf8.RuneSelf {
- if escapeTable[c] {
- // single-width character, no escaping is required
- i++
- continue
- }
-
- w.Buffer.AppendString(s[p:i])
- switch c {
- case '\t':
- w.Buffer.AppendString(`\t`)
- case '\r':
- w.Buffer.AppendString(`\r`)
- case '\n':
- w.Buffer.AppendString(`\n`)
- case '\\':
- w.Buffer.AppendString(`\\`)
- case '"':
- w.Buffer.AppendString(`\"`)
- default:
- w.Buffer.AppendString(`\u00`)
- w.Buffer.AppendByte(chars[c>>4])
- w.Buffer.AppendByte(chars[c&0xf])
- }
-
- i++
- p = i
- continue
- }
-
- // broken utf
- runeValue, runeWidth := utf8.DecodeRuneInString(s[i:])
- if runeValue == utf8.RuneError && runeWidth == 1 {
- w.Buffer.AppendString(s[p:i])
- w.Buffer.AppendString(`\ufffd`)
- i++
- p = i
- continue
- }
-
- // jsonp stuff - tab separator and line separator
- if runeValue == '\u2028' || runeValue == '\u2029' {
- w.Buffer.AppendString(s[p:i])
- w.Buffer.AppendString(`\u202`)
- w.Buffer.AppendByte(chars[runeValue&0xf])
- i += runeWidth
- p = i
- continue
- }
- i += runeWidth
- }
- w.Buffer.AppendString(s[p:])
- w.Buffer.AppendByte('"')
-}
-
-const encode = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
-const padChar = '='
-
-func (w *Writer) base64(in []byte) {
-
- if len(in) == 0 {
- return
- }
-
- w.Buffer.EnsureSpace(((len(in)-1)/3 + 1) * 4)
-
- si := 0
- n := (len(in) / 3) * 3
-
- for si < n {
- // Convert 3x 8bit source bytes into 4 bytes
- val := uint(in[si+0])<<16 | uint(in[si+1])<<8 | uint(in[si+2])
-
- w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>18&0x3F], encode[val>>12&0x3F], encode[val>>6&0x3F], encode[val&0x3F])
-
- si += 3
- }
-
- remain := len(in) - si
- if remain == 0 {
- return
- }
-
- // Add the remaining small block
- val := uint(in[si+0]) << 16
- if remain == 2 {
- val |= uint(in[si+1]) << 8
- }
-
- w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>18&0x3F], encode[val>>12&0x3F])
-
- switch remain {
- case 2:
- w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>6&0x3F], byte(padChar))
- case 1:
- w.Buffer.Buf = append(w.Buffer.Buf, byte(padChar), byte(padChar))
- }
-}
diff --git a/vendor/github.com/oklog/ulid/.gitignore b/vendor/github.com/oklog/ulid/.gitignore
deleted file mode 100644
index c92c4d5608..0000000000
--- a/vendor/github.com/oklog/ulid/.gitignore
+++ /dev/null
@@ -1,29 +0,0 @@
-#### joe made this: http://goel.io/joe
-
-#####=== Go ===#####
-
-# Compiled Object files, Static and Dynamic libs (Shared Objects)
-*.o
-*.a
-*.so
-
-# Folders
-_obj
-_test
-
-# Architecture specific extensions/prefixes
-*.[568vq]
-[568vq].out
-
-*.cgo1.go
-*.cgo2.c
-_cgo_defun.c
-_cgo_gotypes.go
-_cgo_export.*
-
-_testmain.go
-
-*.exe
-*.test
-*.prof
-
diff --git a/vendor/github.com/oklog/ulid/.travis.yml b/vendor/github.com/oklog/ulid/.travis.yml
deleted file mode 100644
index 43eb762fa3..0000000000
--- a/vendor/github.com/oklog/ulid/.travis.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-language: go
-sudo: false
-go:
- - 1.10.x
-install:
- - go get -v github.com/golang/lint/golint
- - go get golang.org/x/tools/cmd/cover
- - go get github.com/mattn/goveralls
- - go get -d -t -v ./...
- - go build -v ./...
-script:
- - go vet ./...
- - $HOME/gopath/bin/golint .
- - go test -v -race ./...
- - go test -v -covermode=count -coverprofile=cov.out
- - $HOME/gopath/bin/goveralls -coverprofile=cov.out -service=travis-ci -repotoken "$COVERALLS_TOKEN" || true
diff --git a/vendor/github.com/oklog/ulid/AUTHORS.md b/vendor/github.com/oklog/ulid/AUTHORS.md
deleted file mode 100644
index 95581c78b0..0000000000
--- a/vendor/github.com/oklog/ulid/AUTHORS.md
+++ /dev/null
@@ -1,2 +0,0 @@
-- Peter Bourgon (@peterbourgon)
-- Tomás Senart (@tsenart)
diff --git a/vendor/github.com/oklog/ulid/CHANGELOG.md b/vendor/github.com/oklog/ulid/CHANGELOG.md
deleted file mode 100644
index 8da38c6b00..0000000000
--- a/vendor/github.com/oklog/ulid/CHANGELOG.md
+++ /dev/null
@@ -1,33 +0,0 @@
-## 1.3.1 / 2018-10-02
-
-* Use underlying entropy source for random increments in Monotonic (#32)
-
-## 1.3.0 / 2018-09-29
-
-* Monotonic entropy support (#31)
-
-## 1.2.0 / 2018-09-09
-
-* Add a function to convert Unix time in milliseconds back to time.Time (#30)
-
-## 1.1.0 / 2018-08-15
-
-* Ensure random part is always read from the entropy reader in full (#28)
-
-## 1.0.0 / 2018-07-29
-
-* Add ParseStrict and MustParseStrict functions (#26)
-* Enforce overflow checking when parsing (#20)
-
-## 0.3.0 / 2017-01-03
-
-* Implement ULID.Compare method
-
-## 0.2.0 / 2016-12-13
-
-* Remove year 2262 Timestamp bug. (#1)
-* Gracefully handle invalid encodings when parsing.
-
-## 0.1.0 / 2016-12-06
-
-* First ULID release
diff --git a/vendor/github.com/oklog/ulid/CONTRIBUTING.md b/vendor/github.com/oklog/ulid/CONTRIBUTING.md
deleted file mode 100644
index 68f03f26eb..0000000000
--- a/vendor/github.com/oklog/ulid/CONTRIBUTING.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# Contributing
-
-We use GitHub to manage reviews of pull requests.
-
-* If you have a trivial fix or improvement, go ahead and create a pull
- request, addressing (with `@...`) one or more of the maintainers
- (see [AUTHORS.md](AUTHORS.md)) in the description of the pull request.
-
-* If you plan to do something more involved, first propose your ideas
- in a Github issue. This will avoid unnecessary work and surely give
- you and us a good deal of inspiration.
-
-* Relevant coding style guidelines are the [Go Code Review
- Comments](https://code.google.com/p/go-wiki/wiki/CodeReviewComments)
- and the _Formatting and style_ section of Peter Bourgon's [Go: Best
- Practices for Production
- Environments](http://peter.bourgon.org/go-in-production/#formatting-and-style).
diff --git a/vendor/github.com/oklog/ulid/Gopkg.lock b/vendor/github.com/oklog/ulid/Gopkg.lock
deleted file mode 100644
index 349b449a6e..0000000000
--- a/vendor/github.com/oklog/ulid/Gopkg.lock
+++ /dev/null
@@ -1,15 +0,0 @@
-# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-
-
-[[projects]]
- branch = "master"
- name = "github.com/pborman/getopt"
- packages = ["v2"]
- revision = "7148bc3a4c3008adfcab60cbebfd0576018f330b"
-
-[solve-meta]
- analyzer-name = "dep"
- analyzer-version = 1
- inputs-digest = "6779b05abd5cd429c5393641d2453005a3cb74a400d161b2b5c5d0ca2e10e116"
- solver-name = "gps-cdcl"
- solver-version = 1
diff --git a/vendor/github.com/oklog/ulid/Gopkg.toml b/vendor/github.com/oklog/ulid/Gopkg.toml
deleted file mode 100644
index 624a7a019c..0000000000
--- a/vendor/github.com/oklog/ulid/Gopkg.toml
+++ /dev/null
@@ -1,26 +0,0 @@
-
-# Gopkg.toml example
-#
-# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
-# for detailed Gopkg.toml documentation.
-#
-# required = ["github.com/user/thing/cmd/thing"]
-# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
-#
-# [[constraint]]
-# name = "github.com/user/project"
-# version = "1.0.0"
-#
-# [[constraint]]
-# name = "github.com/user/project2"
-# branch = "dev"
-# source = "github.com/myfork/project2"
-#
-# [[override]]
-# name = "github.com/x/y"
-# version = "2.4.0"
-
-
-[[constraint]]
- branch = "master"
- name = "github.com/pborman/getopt"
diff --git a/vendor/github.com/oklog/ulid/LICENSE b/vendor/github.com/oklog/ulid/LICENSE
deleted file mode 100644
index 261eeb9e9f..0000000000
--- a/vendor/github.com/oklog/ulid/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/oklog/ulid/README.md b/vendor/github.com/oklog/ulid/README.md
deleted file mode 100644
index 0a3d2f82b2..0000000000
--- a/vendor/github.com/oklog/ulid/README.md
+++ /dev/null
@@ -1,150 +0,0 @@
-# Universally Unique Lexicographically Sortable Identifier
-
-
-[](http://travis-ci.org/oklog/ulid)
-[](https://goreportcard.com/report/oklog/ulid)
-[](https://coveralls.io/github/oklog/ulid?branch=master)
-[](https://godoc.org/github.com/oklog/ulid)
-[](https://raw.githubusercontent.com/oklog/ulid/master/LICENSE)
-
-A Go port of [alizain/ulid](https://github.com/alizain/ulid) with binary format implemented.
-
-## Background
-
-A GUID/UUID can be suboptimal for many use-cases because:
-
-- It isn't the most character efficient way of encoding 128 bits
-- UUID v1/v2 is impractical in many environments, as it requires access to a unique, stable MAC address
-- UUID v3/v5 requires a unique seed and produces randomly distributed IDs, which can cause fragmentation in many data structures
-- UUID v4 provides no other information than randomness which can cause fragmentation in many data structures
-
-A ULID however:
-
-- Is compatible with UUID/GUID's
-- 1.21e+24 unique ULIDs per millisecond (1,208,925,819,614,629,174,706,176 to be exact)
-- Lexicographically sortable
-- Canonically encoded as a 26 character string, as opposed to the 36 character UUID
-- Uses Crockford's base32 for better efficiency and readability (5 bits per character)
-- Case insensitive
-- No special characters (URL safe)
-- Monotonic sort order (correctly detects and handles the same millisecond)
-
-## Install
-
-```shell
-go get github.com/oklog/ulid
-```
-
-## Usage
-
-An ULID is constructed with a `time.Time` and an `io.Reader` entropy source.
-This design allows for greater flexibility in choosing your trade-offs.
-
-Please note that `rand.Rand` from the `math` package is *not* safe for concurrent use.
-Instantiate one per long living go-routine or use a `sync.Pool` if you want to avoid the potential contention of a locked `rand.Source` as its been frequently observed in the package level functions.
-
-
-```go
-func ExampleULID() {
- t := time.Unix(1000000, 0)
- entropy := ulid.Monotonic(rand.New(rand.NewSource(t.UnixNano())), 0)
- fmt.Println(ulid.MustNew(ulid.Timestamp(t), entropy))
- // Output: 0000XSNJG0MQJHBF4QX1EFD6Y3
-}
-
-```
-
-## Specification
-
-Below is the current specification of ULID as implemented in this repository.
-
-### Components
-
-**Timestamp**
-- 48 bits
-- UNIX-time in milliseconds
-- Won't run out of space till the year 10895 AD
-
-**Entropy**
-- 80 bits
-- User defined entropy source.
-- Monotonicity within the same millisecond with [`ulid.Monotonic`](https://godoc.org/github.com/oklog/ulid#Monotonic)
-
-### Encoding
-
-[Crockford's Base32](http://www.crockford.com/wrmg/base32.html) is used as shown.
-This alphabet excludes the letters I, L, O, and U to avoid confusion and abuse.
-
-```
-0123456789ABCDEFGHJKMNPQRSTVWXYZ
-```
-
-### Binary Layout and Byte Order
-
-The components are encoded as 16 octets. Each component is encoded with the Most Significant Byte first (network byte order).
-
-```
-0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-| 32_bit_uint_time_high |
-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-| 16_bit_uint_time_low | 16_bit_uint_random |
-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-| 32_bit_uint_random |
-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-| 32_bit_uint_random |
-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-```
-
-### String Representation
-
-```
- 01AN4Z07BY 79KA1307SR9X4MV3
-|----------| |----------------|
- Timestamp Entropy
- 10 chars 16 chars
- 48bits 80bits
- base32 base32
-```
-
-## Test
-
-```shell
-go test ./...
-```
-
-## Benchmarks
-
-On a Intel Core i7 Ivy Bridge 2.7 GHz, MacOS 10.12.1 and Go 1.8.0beta1
-
-```
-BenchmarkNew/WithCryptoEntropy-8 2000000 771 ns/op 20.73 MB/s 16 B/op 1 allocs/op
-BenchmarkNew/WithEntropy-8 20000000 65.8 ns/op 243.01 MB/s 16 B/op 1 allocs/op
-BenchmarkNew/WithoutEntropy-8 50000000 30.0 ns/op 534.06 MB/s 16 B/op 1 allocs/op
-BenchmarkMustNew/WithCryptoEntropy-8 2000000 781 ns/op 20.48 MB/s 16 B/op 1 allocs/op
-BenchmarkMustNew/WithEntropy-8 20000000 70.0 ns/op 228.51 MB/s 16 B/op 1 allocs/op
-BenchmarkMustNew/WithoutEntropy-8 50000000 34.6 ns/op 462.98 MB/s 16 B/op 1 allocs/op
-BenchmarkParse-8 50000000 30.0 ns/op 866.16 MB/s 0 B/op 0 allocs/op
-BenchmarkMustParse-8 50000000 35.2 ns/op 738.94 MB/s 0 B/op 0 allocs/op
-BenchmarkString-8 20000000 64.9 ns/op 246.40 MB/s 32 B/op 1 allocs/op
-BenchmarkMarshal/Text-8 20000000 55.8 ns/op 286.84 MB/s 32 B/op 1 allocs/op
-BenchmarkMarshal/TextTo-8 100000000 22.4 ns/op 714.91 MB/s 0 B/op 0 allocs/op
-BenchmarkMarshal/Binary-8 300000000 4.02 ns/op 3981.77 MB/s 0 B/op 0 allocs/op
-BenchmarkMarshal/BinaryTo-8 2000000000 1.18 ns/op 13551.75 MB/s 0 B/op 0 allocs/op
-BenchmarkUnmarshal/Text-8 100000000 20.5 ns/op 1265.27 MB/s 0 B/op 0 allocs/op
-BenchmarkUnmarshal/Binary-8 300000000 4.94 ns/op 3240.01 MB/s 0 B/op 0 allocs/op
-BenchmarkNow-8 100000000 15.1 ns/op 528.09 MB/s 0 B/op 0 allocs/op
-BenchmarkTimestamp-8 2000000000 0.29 ns/op 27271.59 MB/s 0 B/op 0 allocs/op
-BenchmarkTime-8 2000000000 0.58 ns/op 13717.80 MB/s 0 B/op 0 allocs/op
-BenchmarkSetTime-8 2000000000 0.89 ns/op 9023.95 MB/s 0 B/op 0 allocs/op
-BenchmarkEntropy-8 200000000 7.62 ns/op 1311.66 MB/s 0 B/op 0 allocs/op
-BenchmarkSetEntropy-8 2000000000 0.88 ns/op 11376.54 MB/s 0 B/op 0 allocs/op
-BenchmarkCompare-8 200000000 7.34 ns/op 4359.23 MB/s 0 B/op 0 allocs/op
-```
-
-## Prior Art
-
-- [alizain/ulid](https://github.com/alizain/ulid)
-- [RobThree/NUlid](https://github.com/RobThree/NUlid)
-- [imdario/go-ulid](https://github.com/imdario/go-ulid)
diff --git a/vendor/github.com/oklog/ulid/ulid.go b/vendor/github.com/oklog/ulid/ulid.go
deleted file mode 100644
index c5d0d66fd2..0000000000
--- a/vendor/github.com/oklog/ulid/ulid.go
+++ /dev/null
@@ -1,614 +0,0 @@
-// Copyright 2016 The Oklog Authors
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ulid
-
-import (
- "bufio"
- "bytes"
- "database/sql/driver"
- "encoding/binary"
- "errors"
- "io"
- "math"
- "math/bits"
- "math/rand"
- "time"
-)
-
-/*
-An ULID is a 16 byte Universally Unique Lexicographically Sortable Identifier
-
- The components are encoded as 16 octets.
- Each component is encoded with the MSB first (network byte order).
-
- 0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | 32_bit_uint_time_high |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | 16_bit_uint_time_low | 16_bit_uint_random |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | 32_bit_uint_random |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | 32_bit_uint_random |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-*/
-type ULID [16]byte
-
-var (
- // ErrDataSize is returned when parsing or unmarshaling ULIDs with the wrong
- // data size.
- ErrDataSize = errors.New("ulid: bad data size when unmarshaling")
-
- // ErrInvalidCharacters is returned when parsing or unmarshaling ULIDs with
- // invalid Base32 encodings.
- ErrInvalidCharacters = errors.New("ulid: bad data characters when unmarshaling")
-
- // ErrBufferSize is returned when marshalling ULIDs to a buffer of insufficient
- // size.
- ErrBufferSize = errors.New("ulid: bad buffer size when marshaling")
-
- // ErrBigTime is returned when constructing an ULID with a time that is larger
- // than MaxTime.
- ErrBigTime = errors.New("ulid: time too big")
-
- // ErrOverflow is returned when unmarshaling a ULID whose first character is
- // larger than 7, thereby exceeding the valid bit depth of 128.
- ErrOverflow = errors.New("ulid: overflow when unmarshaling")
-
- // ErrMonotonicOverflow is returned by a Monotonic entropy source when
- // incrementing the previous ULID's entropy bytes would result in overflow.
- ErrMonotonicOverflow = errors.New("ulid: monotonic entropy overflow")
-
- // ErrScanValue is returned when the value passed to scan cannot be unmarshaled
- // into the ULID.
- ErrScanValue = errors.New("ulid: source value must be a string or byte slice")
-)
-
-// New returns an ULID with the given Unix milliseconds timestamp and an
-// optional entropy source. Use the Timestamp function to convert
-// a time.Time to Unix milliseconds.
-//
-// ErrBigTime is returned when passing a timestamp bigger than MaxTime.
-// Reading from the entropy source may also return an error.
-func New(ms uint64, entropy io.Reader) (id ULID, err error) {
- if err = id.SetTime(ms); err != nil {
- return id, err
- }
-
- switch e := entropy.(type) {
- case nil:
- return id, err
- case *monotonic:
- err = e.MonotonicRead(ms, id[6:])
- default:
- _, err = io.ReadFull(e, id[6:])
- }
-
- return id, err
-}
-
-// MustNew is a convenience function equivalent to New that panics on failure
-// instead of returning an error.
-func MustNew(ms uint64, entropy io.Reader) ULID {
- id, err := New(ms, entropy)
- if err != nil {
- panic(err)
- }
- return id
-}
-
-// Parse parses an encoded ULID, returning an error in case of failure.
-//
-// ErrDataSize is returned if the len(ulid) is different from an encoded
-// ULID's length. Invalid encodings produce undefined ULIDs. For a version that
-// returns an error instead, see ParseStrict.
-func Parse(ulid string) (id ULID, err error) {
- return id, parse([]byte(ulid), false, &id)
-}
-
-// ParseStrict parses an encoded ULID, returning an error in case of failure.
-//
-// It is like Parse, but additionally validates that the parsed ULID consists
-// only of valid base32 characters. It is slightly slower than Parse.
-//
-// ErrDataSize is returned if the len(ulid) is different from an encoded
-// ULID's length. Invalid encodings return ErrInvalidCharacters.
-func ParseStrict(ulid string) (id ULID, err error) {
- return id, parse([]byte(ulid), true, &id)
-}
-
-func parse(v []byte, strict bool, id *ULID) error {
- // Check if a base32 encoded ULID is the right length.
- if len(v) != EncodedSize {
- return ErrDataSize
- }
-
- // Check if all the characters in a base32 encoded ULID are part of the
- // expected base32 character set.
- if strict &&
- (dec[v[0]] == 0xFF ||
- dec[v[1]] == 0xFF ||
- dec[v[2]] == 0xFF ||
- dec[v[3]] == 0xFF ||
- dec[v[4]] == 0xFF ||
- dec[v[5]] == 0xFF ||
- dec[v[6]] == 0xFF ||
- dec[v[7]] == 0xFF ||
- dec[v[8]] == 0xFF ||
- dec[v[9]] == 0xFF ||
- dec[v[10]] == 0xFF ||
- dec[v[11]] == 0xFF ||
- dec[v[12]] == 0xFF ||
- dec[v[13]] == 0xFF ||
- dec[v[14]] == 0xFF ||
- dec[v[15]] == 0xFF ||
- dec[v[16]] == 0xFF ||
- dec[v[17]] == 0xFF ||
- dec[v[18]] == 0xFF ||
- dec[v[19]] == 0xFF ||
- dec[v[20]] == 0xFF ||
- dec[v[21]] == 0xFF ||
- dec[v[22]] == 0xFF ||
- dec[v[23]] == 0xFF ||
- dec[v[24]] == 0xFF ||
- dec[v[25]] == 0xFF) {
- return ErrInvalidCharacters
- }
-
- // Check if the first character in a base32 encoded ULID will overflow. This
- // happens because the base32 representation encodes 130 bits, while the
- // ULID is only 128 bits.
- //
- // See https://github.com/oklog/ulid/issues/9 for details.
- if v[0] > '7' {
- return ErrOverflow
- }
-
- // Use an optimized unrolled loop (from https://github.com/RobThree/NUlid)
- // to decode a base32 ULID.
-
- // 6 bytes timestamp (48 bits)
- (*id)[0] = ((dec[v[0]] << 5) | dec[v[1]])
- (*id)[1] = ((dec[v[2]] << 3) | (dec[v[3]] >> 2))
- (*id)[2] = ((dec[v[3]] << 6) | (dec[v[4]] << 1) | (dec[v[5]] >> 4))
- (*id)[3] = ((dec[v[5]] << 4) | (dec[v[6]] >> 1))
- (*id)[4] = ((dec[v[6]] << 7) | (dec[v[7]] << 2) | (dec[v[8]] >> 3))
- (*id)[5] = ((dec[v[8]] << 5) | dec[v[9]])
-
- // 10 bytes of entropy (80 bits)
- (*id)[6] = ((dec[v[10]] << 3) | (dec[v[11]] >> 2))
- (*id)[7] = ((dec[v[11]] << 6) | (dec[v[12]] << 1) | (dec[v[13]] >> 4))
- (*id)[8] = ((dec[v[13]] << 4) | (dec[v[14]] >> 1))
- (*id)[9] = ((dec[v[14]] << 7) | (dec[v[15]] << 2) | (dec[v[16]] >> 3))
- (*id)[10] = ((dec[v[16]] << 5) | dec[v[17]])
- (*id)[11] = ((dec[v[18]] << 3) | dec[v[19]]>>2)
- (*id)[12] = ((dec[v[19]] << 6) | (dec[v[20]] << 1) | (dec[v[21]] >> 4))
- (*id)[13] = ((dec[v[21]] << 4) | (dec[v[22]] >> 1))
- (*id)[14] = ((dec[v[22]] << 7) | (dec[v[23]] << 2) | (dec[v[24]] >> 3))
- (*id)[15] = ((dec[v[24]] << 5) | dec[v[25]])
-
- return nil
-}
-
-// MustParse is a convenience function equivalent to Parse that panics on failure
-// instead of returning an error.
-func MustParse(ulid string) ULID {
- id, err := Parse(ulid)
- if err != nil {
- panic(err)
- }
- return id
-}
-
-// MustParseStrict is a convenience function equivalent to ParseStrict that
-// panics on failure instead of returning an error.
-func MustParseStrict(ulid string) ULID {
- id, err := ParseStrict(ulid)
- if err != nil {
- panic(err)
- }
- return id
-}
-
-// String returns a lexicographically sortable string encoded ULID
-// (26 characters, non-standard base 32) e.g. 01AN4Z07BY79KA1307SR9X4MV3
-// Format: tttttttttteeeeeeeeeeeeeeee where t is time and e is entropy
-func (id ULID) String() string {
- ulid := make([]byte, EncodedSize)
- _ = id.MarshalTextTo(ulid)
- return string(ulid)
-}
-
-// MarshalBinary implements the encoding.BinaryMarshaler interface by
-// returning the ULID as a byte slice.
-func (id ULID) MarshalBinary() ([]byte, error) {
- ulid := make([]byte, len(id))
- return ulid, id.MarshalBinaryTo(ulid)
-}
-
-// MarshalBinaryTo writes the binary encoding of the ULID to the given buffer.
-// ErrBufferSize is returned when the len(dst) != 16.
-func (id ULID) MarshalBinaryTo(dst []byte) error {
- if len(dst) != len(id) {
- return ErrBufferSize
- }
-
- copy(dst, id[:])
- return nil
-}
-
-// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface by
-// copying the passed data and converting it to an ULID. ErrDataSize is
-// returned if the data length is different from ULID length.
-func (id *ULID) UnmarshalBinary(data []byte) error {
- if len(data) != len(*id) {
- return ErrDataSize
- }
-
- copy((*id)[:], data)
- return nil
-}
-
-// Encoding is the base 32 encoding alphabet used in ULID strings.
-const Encoding = "0123456789ABCDEFGHJKMNPQRSTVWXYZ"
-
-// MarshalText implements the encoding.TextMarshaler interface by
-// returning the string encoded ULID.
-func (id ULID) MarshalText() ([]byte, error) {
- ulid := make([]byte, EncodedSize)
- return ulid, id.MarshalTextTo(ulid)
-}
-
-// MarshalTextTo writes the ULID as a string to the given buffer.
-// ErrBufferSize is returned when the len(dst) != 26.
-func (id ULID) MarshalTextTo(dst []byte) error {
- // Optimized unrolled loop ahead.
- // From https://github.com/RobThree/NUlid
-
- if len(dst) != EncodedSize {
- return ErrBufferSize
- }
-
- // 10 byte timestamp
- dst[0] = Encoding[(id[0]&224)>>5]
- dst[1] = Encoding[id[0]&31]
- dst[2] = Encoding[(id[1]&248)>>3]
- dst[3] = Encoding[((id[1]&7)<<2)|((id[2]&192)>>6)]
- dst[4] = Encoding[(id[2]&62)>>1]
- dst[5] = Encoding[((id[2]&1)<<4)|((id[3]&240)>>4)]
- dst[6] = Encoding[((id[3]&15)<<1)|((id[4]&128)>>7)]
- dst[7] = Encoding[(id[4]&124)>>2]
- dst[8] = Encoding[((id[4]&3)<<3)|((id[5]&224)>>5)]
- dst[9] = Encoding[id[5]&31]
-
- // 16 bytes of entropy
- dst[10] = Encoding[(id[6]&248)>>3]
- dst[11] = Encoding[((id[6]&7)<<2)|((id[7]&192)>>6)]
- dst[12] = Encoding[(id[7]&62)>>1]
- dst[13] = Encoding[((id[7]&1)<<4)|((id[8]&240)>>4)]
- dst[14] = Encoding[((id[8]&15)<<1)|((id[9]&128)>>7)]
- dst[15] = Encoding[(id[9]&124)>>2]
- dst[16] = Encoding[((id[9]&3)<<3)|((id[10]&224)>>5)]
- dst[17] = Encoding[id[10]&31]
- dst[18] = Encoding[(id[11]&248)>>3]
- dst[19] = Encoding[((id[11]&7)<<2)|((id[12]&192)>>6)]
- dst[20] = Encoding[(id[12]&62)>>1]
- dst[21] = Encoding[((id[12]&1)<<4)|((id[13]&240)>>4)]
- dst[22] = Encoding[((id[13]&15)<<1)|((id[14]&128)>>7)]
- dst[23] = Encoding[(id[14]&124)>>2]
- dst[24] = Encoding[((id[14]&3)<<3)|((id[15]&224)>>5)]
- dst[25] = Encoding[id[15]&31]
-
- return nil
-}
-
-// Byte to index table for O(1) lookups when unmarshaling.
-// We use 0xFF as sentinel value for invalid indexes.
-var dec = [...]byte{
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x01,
- 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
- 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14, 0x15, 0xFF,
- 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C, 0x1D, 0x1E,
- 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C,
- 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14,
- 0x15, 0xFF, 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C,
- 0x1D, 0x1E, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
- 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
-}
-
-// EncodedSize is the length of a text encoded ULID.
-const EncodedSize = 26
-
-// UnmarshalText implements the encoding.TextUnmarshaler interface by
-// parsing the data as string encoded ULID.
-//
-// ErrDataSize is returned if the len(v) is different from an encoded
-// ULID's length. Invalid encodings produce undefined ULIDs.
-func (id *ULID) UnmarshalText(v []byte) error {
- return parse(v, false, id)
-}
-
-// Time returns the Unix time in milliseconds encoded in the ULID.
-// Use the top level Time function to convert the returned value to
-// a time.Time.
-func (id ULID) Time() uint64 {
- return uint64(id[5]) | uint64(id[4])<<8 |
- uint64(id[3])<<16 | uint64(id[2])<<24 |
- uint64(id[1])<<32 | uint64(id[0])<<40
-}
-
-// maxTime is the maximum Unix time in milliseconds that can be
-// represented in an ULID.
-var maxTime = ULID{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}.Time()
-
-// MaxTime returns the maximum Unix time in milliseconds that
-// can be encoded in an ULID.
-func MaxTime() uint64 { return maxTime }
-
-// Now is a convenience function that returns the current
-// UTC time in Unix milliseconds. Equivalent to:
-// Timestamp(time.Now().UTC())
-func Now() uint64 { return Timestamp(time.Now().UTC()) }
-
-// Timestamp converts a time.Time to Unix milliseconds.
-//
-// Because of the way ULID stores time, times from the year
-// 10889 produces undefined results.
-func Timestamp(t time.Time) uint64 {
- return uint64(t.Unix())*1000 +
- uint64(t.Nanosecond()/int(time.Millisecond))
-}
-
-// Time converts Unix milliseconds in the format
-// returned by the Timestamp function to a time.Time.
-func Time(ms uint64) time.Time {
- s := int64(ms / 1e3)
- ns := int64((ms % 1e3) * 1e6)
- return time.Unix(s, ns)
-}
-
-// SetTime sets the time component of the ULID to the given Unix time
-// in milliseconds.
-func (id *ULID) SetTime(ms uint64) error {
- if ms > maxTime {
- return ErrBigTime
- }
-
- (*id)[0] = byte(ms >> 40)
- (*id)[1] = byte(ms >> 32)
- (*id)[2] = byte(ms >> 24)
- (*id)[3] = byte(ms >> 16)
- (*id)[4] = byte(ms >> 8)
- (*id)[5] = byte(ms)
-
- return nil
-}
-
-// Entropy returns the entropy from the ULID.
-func (id ULID) Entropy() []byte {
- e := make([]byte, 10)
- copy(e, id[6:])
- return e
-}
-
-// SetEntropy sets the ULID entropy to the passed byte slice.
-// ErrDataSize is returned if len(e) != 10.
-func (id *ULID) SetEntropy(e []byte) error {
- if len(e) != 10 {
- return ErrDataSize
- }
-
- copy((*id)[6:], e)
- return nil
-}
-
-// Compare returns an integer comparing id and other lexicographically.
-// The result will be 0 if id==other, -1 if id < other, and +1 if id > other.
-func (id ULID) Compare(other ULID) int {
- return bytes.Compare(id[:], other[:])
-}
-
-// Scan implements the sql.Scanner interface. It supports scanning
-// a string or byte slice.
-func (id *ULID) Scan(src interface{}) error {
- switch x := src.(type) {
- case nil:
- return nil
- case string:
- return id.UnmarshalText([]byte(x))
- case []byte:
- return id.UnmarshalBinary(x)
- }
-
- return ErrScanValue
-}
-
-// Value implements the sql/driver.Valuer interface. This returns the value
-// represented as a byte slice. If instead a string is desirable, a wrapper
-// type can be created that calls String().
-//
-// // stringValuer wraps a ULID as a string-based driver.Valuer.
-// type stringValuer ULID
-//
-// func (id stringValuer) Value() (driver.Value, error) {
-// return ULID(id).String(), nil
-// }
-//
-// // Example usage.
-// db.Exec("...", stringValuer(id))
-func (id ULID) Value() (driver.Value, error) {
- return id.MarshalBinary()
-}
-
-// Monotonic returns an entropy source that is guaranteed to yield
-// strictly increasing entropy bytes for the same ULID timestamp.
-// On conflicts, the previous ULID entropy is incremented with a
-// random number between 1 and `inc` (inclusive).
-//
-// The provided entropy source must actually yield random bytes or else
-// monotonic reads are not guaranteed to terminate, since there isn't
-// enough randomness to compute an increment number.
-//
-// When `inc == 0`, it'll be set to a secure default of `math.MaxUint32`.
-// The lower the value of `inc`, the easier the next ULID within the
-// same millisecond is to guess. If your code depends on ULIDs having
-// secure entropy bytes, then don't go under this default unless you know
-// what you're doing.
-//
-// The returned io.Reader isn't safe for concurrent use.
-func Monotonic(entropy io.Reader, inc uint64) io.Reader {
- m := monotonic{
- Reader: bufio.NewReader(entropy),
- inc: inc,
- }
-
- if m.inc == 0 {
- m.inc = math.MaxUint32
- }
-
- if rng, ok := entropy.(*rand.Rand); ok {
- m.rng = rng
- }
-
- return &m
-}
-
-type monotonic struct {
- io.Reader
- ms uint64
- inc uint64
- entropy uint80
- rand [8]byte
- rng *rand.Rand
-}
-
-func (m *monotonic) MonotonicRead(ms uint64, entropy []byte) (err error) {
- if !m.entropy.IsZero() && m.ms == ms {
- err = m.increment()
- m.entropy.AppendTo(entropy)
- } else if _, err = io.ReadFull(m.Reader, entropy); err == nil {
- m.ms = ms
- m.entropy.SetBytes(entropy)
- }
- return err
-}
-
-// increment the previous entropy number with a random number
-// of up to m.inc (inclusive).
-func (m *monotonic) increment() error {
- if inc, err := m.random(); err != nil {
- return err
- } else if m.entropy.Add(inc) {
- return ErrMonotonicOverflow
- }
- return nil
-}
-
-// random returns a uniform random value in [1, m.inc), reading entropy
-// from m.Reader. When m.inc == 0 || m.inc == 1, it returns 1.
-// Adapted from: https://golang.org/pkg/crypto/rand/#Int
-func (m *monotonic) random() (inc uint64, err error) {
- if m.inc <= 1 {
- return 1, nil
- }
-
- // Fast path for using a underlying rand.Rand directly.
- if m.rng != nil {
- // Range: [1, m.inc)
- return 1 + uint64(m.rng.Int63n(int64(m.inc))), nil
- }
-
- // bitLen is the maximum bit length needed to encode a value < m.inc.
- bitLen := bits.Len64(m.inc)
-
- // byteLen is the maximum byte length needed to encode a value < m.inc.
- byteLen := uint(bitLen+7) / 8
-
- // msbitLen is the number of bits in the most significant byte of m.inc-1.
- msbitLen := uint(bitLen % 8)
- if msbitLen == 0 {
- msbitLen = 8
- }
-
- for inc == 0 || inc >= m.inc {
- if _, err = io.ReadFull(m.Reader, m.rand[:byteLen]); err != nil {
- return 0, err
- }
-
- // Clear bits in the first byte to increase the probability
- // that the candidate is < m.inc.
- m.rand[0] &= uint8(int(1<
-- Add IsGlobalTracerRegistered() to indicate if a tracer has been registered (#201)
-- Use Set() instead of Add() in HTTPHeadersCarrier (#191)
-- Update license to Apache 2.0 (#181)
-- Replace 'golang.org/x/net/context' with 'context' (#176)
-- Port of Python opentracing/harness/api_check.py to Go (#146)
-- Fix race condition in MockSpan.Context() (#170)
-- Add PeerHostIPv4.SetString() (#155)
-- Add a Noop log field type to log to allow for optional fields (#150)
-
-
-1.0.2 (2017-04-26)
--------------------
-
-- Add more semantic tags (#139)
-
-
-1.0.1 (2017-02-06)
--------------------
-
-- Correct spelling in comments
-- Address race in nextMockID() (#123)
-- log: avoid panic marshaling nil error (#131)
-- Deprecate InitGlobalTracer in favor of SetGlobalTracer (#128)
-- Drop Go 1.5 that fails in Travis (#129)
-- Add convenience methods Key() and Value() to log.Field
-- Add convenience methods to log.Field (2 years, 6 months ago)
-
-1.0.0 (2016-09-26)
--------------------
-
-- This release implements OpenTracing Specification 1.0 (https://opentracing.io/spec)
-
diff --git a/vendor/github.com/opentracing/opentracing-go/LICENSE b/vendor/github.com/opentracing/opentracing-go/LICENSE
deleted file mode 100644
index f0027349e8..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright 2016 The OpenTracing Authors
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/opentracing/opentracing-go/Makefile b/vendor/github.com/opentracing/opentracing-go/Makefile
deleted file mode 100644
index 62abb63f58..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-.DEFAULT_GOAL := test-and-lint
-
-.PHONY: test-and-lint
-test-and-lint: test lint
-
-.PHONY: test
-test:
- go test -v -cover -race ./...
-
-.PHONY: cover
-cover:
- go test -v -coverprofile=coverage.txt -covermode=atomic -race ./...
-
-.PHONY: lint
-lint:
- go fmt ./...
- golint ./...
- @# Run again with magic to exit non-zero if golint outputs anything.
- @! (golint ./... | read dummy)
- go vet ./...
diff --git a/vendor/github.com/opentracing/opentracing-go/README.md b/vendor/github.com/opentracing/opentracing-go/README.md
deleted file mode 100644
index 6ef1d7c9d2..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/README.md
+++ /dev/null
@@ -1,171 +0,0 @@
-[](https://gitter.im/opentracing/public) [](https://travis-ci.org/opentracing/opentracing-go) [](http://godoc.org/github.com/opentracing/opentracing-go)
-[](https://sourcegraph.com/github.com/opentracing/opentracing-go?badge)
-
-# OpenTracing API for Go
-
-This package is a Go platform API for OpenTracing.
-
-## Required Reading
-
-In order to understand the Go platform API, one must first be familiar with the
-[OpenTracing project](https://opentracing.io) and
-[terminology](https://opentracing.io/specification/) more specifically.
-
-## API overview for those adding instrumentation
-
-Everyday consumers of this `opentracing` package really only need to worry
-about a couple of key abstractions: the `StartSpan` function, the `Span`
-interface, and binding a `Tracer` at `main()`-time. Here are code snippets
-demonstrating some important use cases.
-
-#### Singleton initialization
-
-The simplest starting point is `./default_tracer.go`. As early as possible, call
-
-```go
- import "github.com/opentracing/opentracing-go"
- import ".../some_tracing_impl"
-
- func main() {
- opentracing.SetGlobalTracer(
- // tracing impl specific:
- some_tracing_impl.New(...),
- )
- ...
- }
-```
-
-#### Non-Singleton initialization
-
-If you prefer direct control to singletons, manage ownership of the
-`opentracing.Tracer` implementation explicitly.
-
-#### Creating a Span given an existing Go `context.Context`
-
-If you use `context.Context` in your application, OpenTracing's Go library will
-happily rely on it for `Span` propagation. To start a new (blocking child)
-`Span`, you can use `StartSpanFromContext`.
-
-```go
- func xyz(ctx context.Context, ...) {
- ...
- span, ctx := opentracing.StartSpanFromContext(ctx, "operation_name")
- defer span.Finish()
- span.LogFields(
- log.String("event", "soft error"),
- log.String("type", "cache timeout"),
- log.Int("waited.millis", 1500))
- ...
- }
-```
-
-#### Starting an empty trace by creating a "root span"
-
-It's always possible to create a "root" `Span` with no parent or other causal
-reference.
-
-```go
- func xyz() {
- ...
- sp := opentracing.StartSpan("operation_name")
- defer sp.Finish()
- ...
- }
-```
-
-#### Creating a (child) Span given an existing (parent) Span
-
-```go
- func xyz(parentSpan opentracing.Span, ...) {
- ...
- sp := opentracing.StartSpan(
- "operation_name",
- opentracing.ChildOf(parentSpan.Context()))
- defer sp.Finish()
- ...
- }
-```
-
-#### Serializing to the wire
-
-```go
- func makeSomeRequest(ctx context.Context) ... {
- if span := opentracing.SpanFromContext(ctx); span != nil {
- httpClient := &http.Client{}
- httpReq, _ := http.NewRequest("GET", "http://myservice/", nil)
-
- // Transmit the span's TraceContext as HTTP headers on our
- // outbound request.
- opentracing.GlobalTracer().Inject(
- span.Context(),
- opentracing.HTTPHeaders,
- opentracing.HTTPHeadersCarrier(httpReq.Header))
-
- resp, err := httpClient.Do(httpReq)
- ...
- }
- ...
- }
-```
-
-#### Deserializing from the wire
-
-```go
- http.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
- var serverSpan opentracing.Span
- appSpecificOperationName := ...
- wireContext, err := opentracing.GlobalTracer().Extract(
- opentracing.HTTPHeaders,
- opentracing.HTTPHeadersCarrier(req.Header))
- if err != nil {
- // Optionally record something about err here
- }
-
- // Create the span referring to the RPC client if available.
- // If wireContext == nil, a root span will be created.
- serverSpan = opentracing.StartSpan(
- appSpecificOperationName,
- ext.RPCServerOption(wireContext))
-
- defer serverSpan.Finish()
-
- ctx := opentracing.ContextWithSpan(context.Background(), serverSpan)
- ...
- }
-```
-
-#### Conditionally capture a field using `log.Noop`
-
-In some situations, you may want to dynamically decide whether or not
-to log a field. For example, you may want to capture additional data,
-such as a customer ID, in non-production environments:
-
-```go
- func Customer(order *Order) log.Field {
- if os.Getenv("ENVIRONMENT") == "dev" {
- return log.String("customer", order.Customer.ID)
- }
- return log.Noop()
- }
-```
-
-#### Goroutine-safety
-
-The entire public API is goroutine-safe and does not require external
-synchronization.
-
-## API pointers for those implementing a tracing system
-
-Tracing system implementors may be able to reuse or copy-paste-modify the `basictracer` package, found [here](https://github.com/opentracing/basictracer-go). In particular, see `basictracer.New(...)`.
-
-## API compatibility
-
-For the time being, "mild" backwards-incompatible changes may be made without changing the major version number. As OpenTracing and `opentracing-go` mature, backwards compatibility will become more of a priority.
-
-## Tracer test suite
-
-A test suite is available in the [harness](https://godoc.org/github.com/opentracing/opentracing-go/harness) package that can assist Tracer implementors to assert that their Tracer is working correctly.
-
-## Licensing
-
-[Apache 2.0 License](./LICENSE).
diff --git a/vendor/github.com/opentracing/opentracing-go/ext.go b/vendor/github.com/opentracing/opentracing-go/ext.go
deleted file mode 100644
index e11977ebe8..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/ext.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package opentracing
-
-import (
- "context"
-)
-
-// TracerContextWithSpanExtension is an extension interface that the
-// implementation of the Tracer interface may want to implement. It
-// allows to have some control over the go context when the
-// ContextWithSpan is invoked.
-//
-// The primary purpose of this extension are adapters from opentracing
-// API to some other tracing API.
-type TracerContextWithSpanExtension interface {
- // ContextWithSpanHook gets called by the ContextWithSpan
- // function, when the Tracer implementation also implements
- // this interface. It allows to put extra information into the
- // context and make it available to the callers of the
- // ContextWithSpan.
- //
- // This hook is invoked before the ContextWithSpan function
- // actually puts the span into the context.
- ContextWithSpanHook(ctx context.Context, span Span) context.Context
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/ext/field.go b/vendor/github.com/opentracing/opentracing-go/ext/field.go
deleted file mode 100644
index 8282bd7584..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/ext/field.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package ext
-
-import (
- "github.com/opentracing/opentracing-go"
- "github.com/opentracing/opentracing-go/log"
-)
-
-// LogError sets the error=true tag on the Span and logs err as an "error" event.
-func LogError(span opentracing.Span, err error, fields ...log.Field) {
- Error.Set(span, true)
- ef := []log.Field{
- log.Event("error"),
- log.Error(err),
- }
- ef = append(ef, fields...)
- span.LogFields(ef...)
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/ext/tags.go b/vendor/github.com/opentracing/opentracing-go/ext/tags.go
deleted file mode 100644
index a414b5951f..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/ext/tags.go
+++ /dev/null
@@ -1,215 +0,0 @@
-package ext
-
-import "github.com/opentracing/opentracing-go"
-
-// These constants define common tag names recommended for better portability across
-// tracing systems and languages/platforms.
-//
-// The tag names are defined as typed strings, so that in addition to the usual use
-//
-// span.setTag(TagName, value)
-//
-// they also support value type validation via this additional syntax:
-//
-// TagName.Set(span, value)
-//
-var (
- //////////////////////////////////////////////////////////////////////
- // SpanKind (client/server or producer/consumer)
- //////////////////////////////////////////////////////////////////////
-
- // SpanKind hints at relationship between spans, e.g. client/server
- SpanKind = spanKindTagName("span.kind")
-
- // SpanKindRPCClient marks a span representing the client-side of an RPC
- // or other remote call
- SpanKindRPCClientEnum = SpanKindEnum("client")
- SpanKindRPCClient = opentracing.Tag{Key: string(SpanKind), Value: SpanKindRPCClientEnum}
-
- // SpanKindRPCServer marks a span representing the server-side of an RPC
- // or other remote call
- SpanKindRPCServerEnum = SpanKindEnum("server")
- SpanKindRPCServer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindRPCServerEnum}
-
- // SpanKindProducer marks a span representing the producer-side of a
- // message bus
- SpanKindProducerEnum = SpanKindEnum("producer")
- SpanKindProducer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindProducerEnum}
-
- // SpanKindConsumer marks a span representing the consumer-side of a
- // message bus
- SpanKindConsumerEnum = SpanKindEnum("consumer")
- SpanKindConsumer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindConsumerEnum}
-
- //////////////////////////////////////////////////////////////////////
- // Component name
- //////////////////////////////////////////////////////////////////////
-
- // Component is a low-cardinality identifier of the module, library,
- // or package that is generating a span.
- Component = StringTagName("component")
-
- //////////////////////////////////////////////////////////////////////
- // Sampling hint
- //////////////////////////////////////////////////////////////////////
-
- // SamplingPriority determines the priority of sampling this Span.
- SamplingPriority = Uint16TagName("sampling.priority")
-
- //////////////////////////////////////////////////////////////////////
- // Peer tags. These tags can be emitted by either client-side or
- // server-side to describe the other side/service in a peer-to-peer
- // communications, like an RPC call.
- //////////////////////////////////////////////////////////////////////
-
- // PeerService records the service name of the peer.
- PeerService = StringTagName("peer.service")
-
- // PeerAddress records the address name of the peer. This may be a "ip:port",
- // a bare "hostname", a FQDN or even a database DSN substring
- // like "mysql://username@127.0.0.1:3306/dbname"
- PeerAddress = StringTagName("peer.address")
-
- // PeerHostname records the host name of the peer
- PeerHostname = StringTagName("peer.hostname")
-
- // PeerHostIPv4 records IP v4 host address of the peer
- PeerHostIPv4 = IPv4TagName("peer.ipv4")
-
- // PeerHostIPv6 records IP v6 host address of the peer
- PeerHostIPv6 = StringTagName("peer.ipv6")
-
- // PeerPort records port number of the peer
- PeerPort = Uint16TagName("peer.port")
-
- //////////////////////////////////////////////////////////////////////
- // HTTP Tags
- //////////////////////////////////////////////////////////////////////
-
- // HTTPUrl should be the URL of the request being handled in this segment
- // of the trace, in standard URI format. The protocol is optional.
- HTTPUrl = StringTagName("http.url")
-
- // HTTPMethod is the HTTP method of the request, and is case-insensitive.
- HTTPMethod = StringTagName("http.method")
-
- // HTTPStatusCode is the numeric HTTP status code (200, 404, etc) of the
- // HTTP response.
- HTTPStatusCode = Uint16TagName("http.status_code")
-
- //////////////////////////////////////////////////////////////////////
- // DB Tags
- //////////////////////////////////////////////////////////////////////
-
- // DBInstance is database instance name.
- DBInstance = StringTagName("db.instance")
-
- // DBStatement is a database statement for the given database type.
- // It can be a query or a prepared statement (i.e., before substitution).
- DBStatement = StringTagName("db.statement")
-
- // DBType is a database type. For any SQL database, "sql".
- // For others, the lower-case database category, e.g. "redis"
- DBType = StringTagName("db.type")
-
- // DBUser is a username for accessing database.
- DBUser = StringTagName("db.user")
-
- //////////////////////////////////////////////////////////////////////
- // Message Bus Tag
- //////////////////////////////////////////////////////////////////////
-
- // MessageBusDestination is an address at which messages can be exchanged
- MessageBusDestination = StringTagName("message_bus.destination")
-
- //////////////////////////////////////////////////////////////////////
- // Error Tag
- //////////////////////////////////////////////////////////////////////
-
- // Error indicates that operation represented by the span resulted in an error.
- Error = BoolTagName("error")
-)
-
-// ---
-
-// SpanKindEnum represents common span types
-type SpanKindEnum string
-
-type spanKindTagName string
-
-// Set adds a string tag to the `span`
-func (tag spanKindTagName) Set(span opentracing.Span, value SpanKindEnum) {
- span.SetTag(string(tag), value)
-}
-
-type rpcServerOption struct {
- clientContext opentracing.SpanContext
-}
-
-func (r rpcServerOption) Apply(o *opentracing.StartSpanOptions) {
- if r.clientContext != nil {
- opentracing.ChildOf(r.clientContext).Apply(o)
- }
- SpanKindRPCServer.Apply(o)
-}
-
-// RPCServerOption returns a StartSpanOption appropriate for an RPC server span
-// with `client` representing the metadata for the remote peer Span if available.
-// In case client == nil, due to the client not being instrumented, this RPC
-// server span will be a root span.
-func RPCServerOption(client opentracing.SpanContext) opentracing.StartSpanOption {
- return rpcServerOption{client}
-}
-
-// ---
-
-// StringTagName is a common tag name to be set to a string value
-type StringTagName string
-
-// Set adds a string tag to the `span`
-func (tag StringTagName) Set(span opentracing.Span, value string) {
- span.SetTag(string(tag), value)
-}
-
-// ---
-
-// Uint32TagName is a common tag name to be set to a uint32 value
-type Uint32TagName string
-
-// Set adds a uint32 tag to the `span`
-func (tag Uint32TagName) Set(span opentracing.Span, value uint32) {
- span.SetTag(string(tag), value)
-}
-
-// ---
-
-// Uint16TagName is a common tag name to be set to a uint16 value
-type Uint16TagName string
-
-// Set adds a uint16 tag to the `span`
-func (tag Uint16TagName) Set(span opentracing.Span, value uint16) {
- span.SetTag(string(tag), value)
-}
-
-// ---
-
-// BoolTagName is a common tag name to be set to a bool value
-type BoolTagName string
-
-// Set adds a bool tag to the `span`
-func (tag BoolTagName) Set(span opentracing.Span, value bool) {
- span.SetTag(string(tag), value)
-}
-
-// IPv4TagName is a common tag name to be set to an ipv4 value
-type IPv4TagName string
-
-// Set adds IP v4 host address of the peer as an uint32 value to the `span`, keep this for backward and zipkin compatibility
-func (tag IPv4TagName) Set(span opentracing.Span, value uint32) {
- span.SetTag(string(tag), value)
-}
-
-// SetString records IP v4 host address of the peer as a .-separated tuple to the `span`. E.g., "127.0.0.1"
-func (tag IPv4TagName) SetString(span opentracing.Span, value string) {
- span.SetTag(string(tag), value)
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/globaltracer.go b/vendor/github.com/opentracing/opentracing-go/globaltracer.go
deleted file mode 100644
index 4f7066a925..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/globaltracer.go
+++ /dev/null
@@ -1,42 +0,0 @@
-package opentracing
-
-type registeredTracer struct {
- tracer Tracer
- isRegistered bool
-}
-
-var (
- globalTracer = registeredTracer{NoopTracer{}, false}
-)
-
-// SetGlobalTracer sets the [singleton] opentracing.Tracer returned by
-// GlobalTracer(). Those who use GlobalTracer (rather than directly manage an
-// opentracing.Tracer instance) should call SetGlobalTracer as early as
-// possible in main(), prior to calling the `StartSpan` global func below.
-// Prior to calling `SetGlobalTracer`, any Spans started via the `StartSpan`
-// (etc) globals are noops.
-func SetGlobalTracer(tracer Tracer) {
- globalTracer = registeredTracer{tracer, true}
-}
-
-// GlobalTracer returns the global singleton `Tracer` implementation.
-// Before `SetGlobalTracer()` is called, the `GlobalTracer()` is a noop
-// implementation that drops all data handed to it.
-func GlobalTracer() Tracer {
- return globalTracer.tracer
-}
-
-// StartSpan defers to `Tracer.StartSpan`. See `GlobalTracer()`.
-func StartSpan(operationName string, opts ...StartSpanOption) Span {
- return globalTracer.tracer.StartSpan(operationName, opts...)
-}
-
-// InitGlobalTracer is deprecated. Please use SetGlobalTracer.
-func InitGlobalTracer(tracer Tracer) {
- SetGlobalTracer(tracer)
-}
-
-// IsGlobalTracerRegistered returns a `bool` to indicate if a tracer has been globally registered
-func IsGlobalTracerRegistered() bool {
- return globalTracer.isRegistered
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/gocontext.go b/vendor/github.com/opentracing/opentracing-go/gocontext.go
deleted file mode 100644
index 1831bc9b26..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/gocontext.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package opentracing
-
-import "context"
-
-type contextKey struct{}
-
-var activeSpanKey = contextKey{}
-
-// ContextWithSpan returns a new `context.Context` that holds a reference to
-// the span. If span is nil, a new context without an active span is returned.
-func ContextWithSpan(ctx context.Context, span Span) context.Context {
- if span != nil {
- if tracerWithHook, ok := span.Tracer().(TracerContextWithSpanExtension); ok {
- ctx = tracerWithHook.ContextWithSpanHook(ctx, span)
- }
- }
- return context.WithValue(ctx, activeSpanKey, span)
-}
-
-// SpanFromContext returns the `Span` previously associated with `ctx`, or
-// `nil` if no such `Span` could be found.
-//
-// NOTE: context.Context != SpanContext: the former is Go's intra-process
-// context propagation mechanism, and the latter houses OpenTracing's per-Span
-// identity and baggage information.
-func SpanFromContext(ctx context.Context) Span {
- val := ctx.Value(activeSpanKey)
- if sp, ok := val.(Span); ok {
- return sp
- }
- return nil
-}
-
-// StartSpanFromContext starts and returns a Span with `operationName`, using
-// any Span found within `ctx` as a ChildOfRef. If no such parent could be
-// found, StartSpanFromContext creates a root (parentless) Span.
-//
-// The second return value is a context.Context object built around the
-// returned Span.
-//
-// Example usage:
-//
-// SomeFunction(ctx context.Context, ...) {
-// sp, ctx := opentracing.StartSpanFromContext(ctx, "SomeFunction")
-// defer sp.Finish()
-// ...
-// }
-func StartSpanFromContext(ctx context.Context, operationName string, opts ...StartSpanOption) (Span, context.Context) {
- return StartSpanFromContextWithTracer(ctx, GlobalTracer(), operationName, opts...)
-}
-
-// StartSpanFromContextWithTracer starts and returns a span with `operationName`
-// using a span found within the context as a ChildOfRef. If that doesn't exist
-// it creates a root span. It also returns a context.Context object built
-// around the returned span.
-//
-// It's behavior is identical to StartSpanFromContext except that it takes an explicit
-// tracer as opposed to using the global tracer.
-func StartSpanFromContextWithTracer(ctx context.Context, tracer Tracer, operationName string, opts ...StartSpanOption) (Span, context.Context) {
- if parentSpan := SpanFromContext(ctx); parentSpan != nil {
- opts = append(opts, ChildOf(parentSpan.Context()))
- }
- span := tracer.StartSpan(operationName, opts...)
- return span, ContextWithSpan(ctx, span)
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/log/field.go b/vendor/github.com/opentracing/opentracing-go/log/field.go
deleted file mode 100644
index f222ded797..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/log/field.go
+++ /dev/null
@@ -1,282 +0,0 @@
-package log
-
-import (
- "fmt"
- "math"
-)
-
-type fieldType int
-
-const (
- stringType fieldType = iota
- boolType
- intType
- int32Type
- uint32Type
- int64Type
- uint64Type
- float32Type
- float64Type
- errorType
- objectType
- lazyLoggerType
- noopType
-)
-
-// Field instances are constructed via LogBool, LogString, and so on.
-// Tracing implementations may then handle them via the Field.Marshal
-// method.
-//
-// "heavily influenced by" (i.e., partially stolen from)
-// https://github.com/uber-go/zap
-type Field struct {
- key string
- fieldType fieldType
- numericVal int64
- stringVal string
- interfaceVal interface{}
-}
-
-// String adds a string-valued key:value pair to a Span.LogFields() record
-func String(key, val string) Field {
- return Field{
- key: key,
- fieldType: stringType,
- stringVal: val,
- }
-}
-
-// Bool adds a bool-valued key:value pair to a Span.LogFields() record
-func Bool(key string, val bool) Field {
- var numericVal int64
- if val {
- numericVal = 1
- }
- return Field{
- key: key,
- fieldType: boolType,
- numericVal: numericVal,
- }
-}
-
-// Int adds an int-valued key:value pair to a Span.LogFields() record
-func Int(key string, val int) Field {
- return Field{
- key: key,
- fieldType: intType,
- numericVal: int64(val),
- }
-}
-
-// Int32 adds an int32-valued key:value pair to a Span.LogFields() record
-func Int32(key string, val int32) Field {
- return Field{
- key: key,
- fieldType: int32Type,
- numericVal: int64(val),
- }
-}
-
-// Int64 adds an int64-valued key:value pair to a Span.LogFields() record
-func Int64(key string, val int64) Field {
- return Field{
- key: key,
- fieldType: int64Type,
- numericVal: val,
- }
-}
-
-// Uint32 adds a uint32-valued key:value pair to a Span.LogFields() record
-func Uint32(key string, val uint32) Field {
- return Field{
- key: key,
- fieldType: uint32Type,
- numericVal: int64(val),
- }
-}
-
-// Uint64 adds a uint64-valued key:value pair to a Span.LogFields() record
-func Uint64(key string, val uint64) Field {
- return Field{
- key: key,
- fieldType: uint64Type,
- numericVal: int64(val),
- }
-}
-
-// Float32 adds a float32-valued key:value pair to a Span.LogFields() record
-func Float32(key string, val float32) Field {
- return Field{
- key: key,
- fieldType: float32Type,
- numericVal: int64(math.Float32bits(val)),
- }
-}
-
-// Float64 adds a float64-valued key:value pair to a Span.LogFields() record
-func Float64(key string, val float64) Field {
- return Field{
- key: key,
- fieldType: float64Type,
- numericVal: int64(math.Float64bits(val)),
- }
-}
-
-// Error adds an error with the key "error.object" to a Span.LogFields() record
-func Error(err error) Field {
- return Field{
- key: "error.object",
- fieldType: errorType,
- interfaceVal: err,
- }
-}
-
-// Object adds an object-valued key:value pair to a Span.LogFields() record
-// Please pass in an immutable object, otherwise there may be concurrency issues.
-// Such as passing in the map, log.Object may result in "fatal error: concurrent map iteration and map write".
-// Because span is sent asynchronously, it is possible that this map will also be modified.
-func Object(key string, obj interface{}) Field {
- return Field{
- key: key,
- fieldType: objectType,
- interfaceVal: obj,
- }
-}
-
-// Event creates a string-valued Field for span logs with key="event" and value=val.
-func Event(val string) Field {
- return String("event", val)
-}
-
-// Message creates a string-valued Field for span logs with key="message" and value=val.
-func Message(val string) Field {
- return String("message", val)
-}
-
-// LazyLogger allows for user-defined, late-bound logging of arbitrary data
-type LazyLogger func(fv Encoder)
-
-// Lazy adds a LazyLogger to a Span.LogFields() record; the tracing
-// implementation will call the LazyLogger function at an indefinite time in
-// the future (after Lazy() returns).
-func Lazy(ll LazyLogger) Field {
- return Field{
- fieldType: lazyLoggerType,
- interfaceVal: ll,
- }
-}
-
-// Noop creates a no-op log field that should be ignored by the tracer.
-// It can be used to capture optional fields, for example those that should
-// only be logged in non-production environment:
-//
-// func customerField(order *Order) log.Field {
-// if os.Getenv("ENVIRONMENT") == "dev" {
-// return log.String("customer", order.Customer.ID)
-// }
-// return log.Noop()
-// }
-//
-// span.LogFields(log.String("event", "purchase"), customerField(order))
-//
-func Noop() Field {
- return Field{
- fieldType: noopType,
- }
-}
-
-// Encoder allows access to the contents of a Field (via a call to
-// Field.Marshal).
-//
-// Tracer implementations typically provide an implementation of Encoder;
-// OpenTracing callers typically do not need to concern themselves with it.
-type Encoder interface {
- EmitString(key, value string)
- EmitBool(key string, value bool)
- EmitInt(key string, value int)
- EmitInt32(key string, value int32)
- EmitInt64(key string, value int64)
- EmitUint32(key string, value uint32)
- EmitUint64(key string, value uint64)
- EmitFloat32(key string, value float32)
- EmitFloat64(key string, value float64)
- EmitObject(key string, value interface{})
- EmitLazyLogger(value LazyLogger)
-}
-
-// Marshal passes a Field instance through to the appropriate
-// field-type-specific method of an Encoder.
-func (lf Field) Marshal(visitor Encoder) {
- switch lf.fieldType {
- case stringType:
- visitor.EmitString(lf.key, lf.stringVal)
- case boolType:
- visitor.EmitBool(lf.key, lf.numericVal != 0)
- case intType:
- visitor.EmitInt(lf.key, int(lf.numericVal))
- case int32Type:
- visitor.EmitInt32(lf.key, int32(lf.numericVal))
- case int64Type:
- visitor.EmitInt64(lf.key, int64(lf.numericVal))
- case uint32Type:
- visitor.EmitUint32(lf.key, uint32(lf.numericVal))
- case uint64Type:
- visitor.EmitUint64(lf.key, uint64(lf.numericVal))
- case float32Type:
- visitor.EmitFloat32(lf.key, math.Float32frombits(uint32(lf.numericVal)))
- case float64Type:
- visitor.EmitFloat64(lf.key, math.Float64frombits(uint64(lf.numericVal)))
- case errorType:
- if err, ok := lf.interfaceVal.(error); ok {
- visitor.EmitString(lf.key, err.Error())
- } else {
- visitor.EmitString(lf.key, "")
- }
- case objectType:
- visitor.EmitObject(lf.key, lf.interfaceVal)
- case lazyLoggerType:
- visitor.EmitLazyLogger(lf.interfaceVal.(LazyLogger))
- case noopType:
- // intentionally left blank
- }
-}
-
-// Key returns the field's key.
-func (lf Field) Key() string {
- return lf.key
-}
-
-// Value returns the field's value as interface{}.
-func (lf Field) Value() interface{} {
- switch lf.fieldType {
- case stringType:
- return lf.stringVal
- case boolType:
- return lf.numericVal != 0
- case intType:
- return int(lf.numericVal)
- case int32Type:
- return int32(lf.numericVal)
- case int64Type:
- return int64(lf.numericVal)
- case uint32Type:
- return uint32(lf.numericVal)
- case uint64Type:
- return uint64(lf.numericVal)
- case float32Type:
- return math.Float32frombits(uint32(lf.numericVal))
- case float64Type:
- return math.Float64frombits(uint64(lf.numericVal))
- case errorType, objectType, lazyLoggerType:
- return lf.interfaceVal
- case noopType:
- return nil
- default:
- return nil
- }
-}
-
-// String returns a string representation of the key and value.
-func (lf Field) String() string {
- return fmt.Sprint(lf.key, ":", lf.Value())
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/log/util.go b/vendor/github.com/opentracing/opentracing-go/log/util.go
deleted file mode 100644
index d57e28aa57..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/log/util.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package log
-
-import (
- "fmt"
- "reflect"
-)
-
-// InterleavedKVToFields converts keyValues a la Span.LogKV() to a Field slice
-// a la Span.LogFields().
-func InterleavedKVToFields(keyValues ...interface{}) ([]Field, error) {
- if len(keyValues)%2 != 0 {
- return nil, fmt.Errorf("non-even keyValues len: %d", len(keyValues))
- }
- fields := make([]Field, len(keyValues)/2)
- for i := 0; i*2 < len(keyValues); i++ {
- key, ok := keyValues[i*2].(string)
- if !ok {
- return nil, fmt.Errorf(
- "non-string key (pair #%d): %T",
- i, keyValues[i*2])
- }
- switch typedVal := keyValues[i*2+1].(type) {
- case bool:
- fields[i] = Bool(key, typedVal)
- case string:
- fields[i] = String(key, typedVal)
- case int:
- fields[i] = Int(key, typedVal)
- case int8:
- fields[i] = Int32(key, int32(typedVal))
- case int16:
- fields[i] = Int32(key, int32(typedVal))
- case int32:
- fields[i] = Int32(key, typedVal)
- case int64:
- fields[i] = Int64(key, typedVal)
- case uint:
- fields[i] = Uint64(key, uint64(typedVal))
- case uint64:
- fields[i] = Uint64(key, typedVal)
- case uint8:
- fields[i] = Uint32(key, uint32(typedVal))
- case uint16:
- fields[i] = Uint32(key, uint32(typedVal))
- case uint32:
- fields[i] = Uint32(key, typedVal)
- case float32:
- fields[i] = Float32(key, typedVal)
- case float64:
- fields[i] = Float64(key, typedVal)
- default:
- if typedVal == nil || (reflect.ValueOf(typedVal).Kind() == reflect.Ptr && reflect.ValueOf(typedVal).IsNil()) {
- fields[i] = String(key, "nil")
- continue
- }
- // When in doubt, coerce to a string
- fields[i] = String(key, fmt.Sprint(typedVal))
- }
- }
- return fields, nil
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/noop.go b/vendor/github.com/opentracing/opentracing-go/noop.go
deleted file mode 100644
index f9b680a213..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/noop.go
+++ /dev/null
@@ -1,64 +0,0 @@
-package opentracing
-
-import "github.com/opentracing/opentracing-go/log"
-
-// A NoopTracer is a trivial, minimum overhead implementation of Tracer
-// for which all operations are no-ops.
-//
-// The primary use of this implementation is in libraries, such as RPC
-// frameworks, that make tracing an optional feature controlled by the
-// end user. A no-op implementation allows said libraries to use it
-// as the default Tracer and to write instrumentation that does
-// not need to keep checking if the tracer instance is nil.
-//
-// For the same reason, the NoopTracer is the default "global" tracer
-// (see GlobalTracer and SetGlobalTracer functions).
-//
-// WARNING: NoopTracer does not support baggage propagation.
-type NoopTracer struct{}
-
-type noopSpan struct{}
-type noopSpanContext struct{}
-
-var (
- defaultNoopSpanContext SpanContext = noopSpanContext{}
- defaultNoopSpan Span = noopSpan{}
- defaultNoopTracer Tracer = NoopTracer{}
-)
-
-const (
- emptyString = ""
-)
-
-// noopSpanContext:
-func (n noopSpanContext) ForeachBaggageItem(handler func(k, v string) bool) {}
-
-// noopSpan:
-func (n noopSpan) Context() SpanContext { return defaultNoopSpanContext }
-func (n noopSpan) SetBaggageItem(key, val string) Span { return n }
-func (n noopSpan) BaggageItem(key string) string { return emptyString }
-func (n noopSpan) SetTag(key string, value interface{}) Span { return n }
-func (n noopSpan) LogFields(fields ...log.Field) {}
-func (n noopSpan) LogKV(keyVals ...interface{}) {}
-func (n noopSpan) Finish() {}
-func (n noopSpan) FinishWithOptions(opts FinishOptions) {}
-func (n noopSpan) SetOperationName(operationName string) Span { return n }
-func (n noopSpan) Tracer() Tracer { return defaultNoopTracer }
-func (n noopSpan) LogEvent(event string) {}
-func (n noopSpan) LogEventWithPayload(event string, payload interface{}) {}
-func (n noopSpan) Log(data LogData) {}
-
-// StartSpan belongs to the Tracer interface.
-func (n NoopTracer) StartSpan(operationName string, opts ...StartSpanOption) Span {
- return defaultNoopSpan
-}
-
-// Inject belongs to the Tracer interface.
-func (n NoopTracer) Inject(sp SpanContext, format interface{}, carrier interface{}) error {
- return nil
-}
-
-// Extract belongs to the Tracer interface.
-func (n NoopTracer) Extract(format interface{}, carrier interface{}) (SpanContext, error) {
- return nil, ErrSpanContextNotFound
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/propagation.go b/vendor/github.com/opentracing/opentracing-go/propagation.go
deleted file mode 100644
index b0c275eb05..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/propagation.go
+++ /dev/null
@@ -1,176 +0,0 @@
-package opentracing
-
-import (
- "errors"
- "net/http"
-)
-
-///////////////////////////////////////////////////////////////////////////////
-// CORE PROPAGATION INTERFACES:
-///////////////////////////////////////////////////////////////////////////////
-
-var (
- // ErrUnsupportedFormat occurs when the `format` passed to Tracer.Inject() or
- // Tracer.Extract() is not recognized by the Tracer implementation.
- ErrUnsupportedFormat = errors.New("opentracing: Unknown or unsupported Inject/Extract format")
-
- // ErrSpanContextNotFound occurs when the `carrier` passed to
- // Tracer.Extract() is valid and uncorrupted but has insufficient
- // information to extract a SpanContext.
- ErrSpanContextNotFound = errors.New("opentracing: SpanContext not found in Extract carrier")
-
- // ErrInvalidSpanContext errors occur when Tracer.Inject() is asked to
- // operate on a SpanContext which it is not prepared to handle (for
- // example, since it was created by a different tracer implementation).
- ErrInvalidSpanContext = errors.New("opentracing: SpanContext type incompatible with tracer")
-
- // ErrInvalidCarrier errors occur when Tracer.Inject() or Tracer.Extract()
- // implementations expect a different type of `carrier` than they are
- // given.
- ErrInvalidCarrier = errors.New("opentracing: Invalid Inject/Extract carrier")
-
- // ErrSpanContextCorrupted occurs when the `carrier` passed to
- // Tracer.Extract() is of the expected type but is corrupted.
- ErrSpanContextCorrupted = errors.New("opentracing: SpanContext data corrupted in Extract carrier")
-)
-
-///////////////////////////////////////////////////////////////////////////////
-// BUILTIN PROPAGATION FORMATS:
-///////////////////////////////////////////////////////////////////////////////
-
-// BuiltinFormat is used to demarcate the values within package `opentracing`
-// that are intended for use with the Tracer.Inject() and Tracer.Extract()
-// methods.
-type BuiltinFormat byte
-
-const (
- // Binary represents SpanContexts as opaque binary data.
- //
- // For Tracer.Inject(): the carrier must be an `io.Writer`.
- //
- // For Tracer.Extract(): the carrier must be an `io.Reader`.
- Binary BuiltinFormat = iota
-
- // TextMap represents SpanContexts as key:value string pairs.
- //
- // Unlike HTTPHeaders, the TextMap format does not restrict the key or
- // value character sets in any way.
- //
- // For Tracer.Inject(): the carrier must be a `TextMapWriter`.
- //
- // For Tracer.Extract(): the carrier must be a `TextMapReader`.
- TextMap
-
- // HTTPHeaders represents SpanContexts as HTTP header string pairs.
- //
- // Unlike TextMap, the HTTPHeaders format requires that the keys and values
- // be valid as HTTP headers as-is (i.e., character casing may be unstable
- // and special characters are disallowed in keys, values should be
- // URL-escaped, etc).
- //
- // For Tracer.Inject(): the carrier must be a `TextMapWriter`.
- //
- // For Tracer.Extract(): the carrier must be a `TextMapReader`.
- //
- // See HTTPHeadersCarrier for an implementation of both TextMapWriter
- // and TextMapReader that defers to an http.Header instance for storage.
- // For example, Inject():
- //
- // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header)
- // err := span.Tracer().Inject(
- // span.Context(), opentracing.HTTPHeaders, carrier)
- //
- // Or Extract():
- //
- // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header)
- // clientContext, err := tracer.Extract(
- // opentracing.HTTPHeaders, carrier)
- //
- HTTPHeaders
-)
-
-// TextMapWriter is the Inject() carrier for the TextMap builtin format. With
-// it, the caller can encode a SpanContext for propagation as entries in a map
-// of unicode strings.
-type TextMapWriter interface {
- // Set a key:value pair to the carrier. Multiple calls to Set() for the
- // same key leads to undefined behavior.
- //
- // NOTE: The backing store for the TextMapWriter may contain data unrelated
- // to SpanContext. As such, Inject() and Extract() implementations that
- // call the TextMapWriter and TextMapReader interfaces must agree on a
- // prefix or other convention to distinguish their own key:value pairs.
- Set(key, val string)
-}
-
-// TextMapReader is the Extract() carrier for the TextMap builtin format. With it,
-// the caller can decode a propagated SpanContext as entries in a map of
-// unicode strings.
-type TextMapReader interface {
- // ForeachKey returns TextMap contents via repeated calls to the `handler`
- // function. If any call to `handler` returns a non-nil error, ForeachKey
- // terminates and returns that error.
- //
- // NOTE: The backing store for the TextMapReader may contain data unrelated
- // to SpanContext. As such, Inject() and Extract() implementations that
- // call the TextMapWriter and TextMapReader interfaces must agree on a
- // prefix or other convention to distinguish their own key:value pairs.
- //
- // The "foreach" callback pattern reduces unnecessary copying in some cases
- // and also allows implementations to hold locks while the map is read.
- ForeachKey(handler func(key, val string) error) error
-}
-
-// TextMapCarrier allows the use of regular map[string]string
-// as both TextMapWriter and TextMapReader.
-type TextMapCarrier map[string]string
-
-// ForeachKey conforms to the TextMapReader interface.
-func (c TextMapCarrier) ForeachKey(handler func(key, val string) error) error {
- for k, v := range c {
- if err := handler(k, v); err != nil {
- return err
- }
- }
- return nil
-}
-
-// Set implements Set() of opentracing.TextMapWriter
-func (c TextMapCarrier) Set(key, val string) {
- c[key] = val
-}
-
-// HTTPHeadersCarrier satisfies both TextMapWriter and TextMapReader.
-//
-// Example usage for server side:
-//
-// carrier := opentracing.HTTPHeadersCarrier(httpReq.Header)
-// clientContext, err := tracer.Extract(opentracing.HTTPHeaders, carrier)
-//
-// Example usage for client side:
-//
-// carrier := opentracing.HTTPHeadersCarrier(httpReq.Header)
-// err := tracer.Inject(
-// span.Context(),
-// opentracing.HTTPHeaders,
-// carrier)
-//
-type HTTPHeadersCarrier http.Header
-
-// Set conforms to the TextMapWriter interface.
-func (c HTTPHeadersCarrier) Set(key, val string) {
- h := http.Header(c)
- h.Set(key, val)
-}
-
-// ForeachKey conforms to the TextMapReader interface.
-func (c HTTPHeadersCarrier) ForeachKey(handler func(key, val string) error) error {
- for k, vals := range c {
- for _, v := range vals {
- if err := handler(k, v); err != nil {
- return err
- }
- }
- }
- return nil
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/span.go b/vendor/github.com/opentracing/opentracing-go/span.go
deleted file mode 100644
index 0d3fb53418..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/span.go
+++ /dev/null
@@ -1,189 +0,0 @@
-package opentracing
-
-import (
- "time"
-
- "github.com/opentracing/opentracing-go/log"
-)
-
-// SpanContext represents Span state that must propagate to descendant Spans and across process
-// boundaries (e.g., a tuple).
-type SpanContext interface {
- // ForeachBaggageItem grants access to all baggage items stored in the
- // SpanContext.
- // The handler function will be called for each baggage key/value pair.
- // The ordering of items is not guaranteed.
- //
- // The bool return value indicates if the handler wants to continue iterating
- // through the rest of the baggage items; for example if the handler is trying to
- // find some baggage item by pattern matching the name, it can return false
- // as soon as the item is found to stop further iterations.
- ForeachBaggageItem(handler func(k, v string) bool)
-}
-
-// Span represents an active, un-finished span in the OpenTracing system.
-//
-// Spans are created by the Tracer interface.
-type Span interface {
- // Sets the end timestamp and finalizes Span state.
- //
- // With the exception of calls to Context() (which are always allowed),
- // Finish() must be the last call made to any span instance, and to do
- // otherwise leads to undefined behavior.
- Finish()
- // FinishWithOptions is like Finish() but with explicit control over
- // timestamps and log data.
- FinishWithOptions(opts FinishOptions)
-
- // Context() yields the SpanContext for this Span. Note that the return
- // value of Context() is still valid after a call to Span.Finish(), as is
- // a call to Span.Context() after a call to Span.Finish().
- Context() SpanContext
-
- // Sets or changes the operation name.
- //
- // Returns a reference to this Span for chaining.
- SetOperationName(operationName string) Span
-
- // Adds a tag to the span.
- //
- // If there is a pre-existing tag set for `key`, it is overwritten.
- //
- // Tag values can be numeric types, strings, or bools. The behavior of
- // other tag value types is undefined at the OpenTracing level. If a
- // tracing system does not know how to handle a particular value type, it
- // may ignore the tag, but shall not panic.
- //
- // Returns a reference to this Span for chaining.
- SetTag(key string, value interface{}) Span
-
- // LogFields is an efficient and type-checked way to record key:value
- // logging data about a Span, though the programming interface is a little
- // more verbose than LogKV(). Here's an example:
- //
- // span.LogFields(
- // log.String("event", "soft error"),
- // log.String("type", "cache timeout"),
- // log.Int("waited.millis", 1500))
- //
- // Also see Span.FinishWithOptions() and FinishOptions.BulkLogData.
- LogFields(fields ...log.Field)
-
- // LogKV is a concise, readable way to record key:value logging data about
- // a Span, though unfortunately this also makes it less efficient and less
- // type-safe than LogFields(). Here's an example:
- //
- // span.LogKV(
- // "event", "soft error",
- // "type", "cache timeout",
- // "waited.millis", 1500)
- //
- // For LogKV (as opposed to LogFields()), the parameters must appear as
- // key-value pairs, like
- //
- // span.LogKV(key1, val1, key2, val2, key3, val3, ...)
- //
- // The keys must all be strings. The values may be strings, numeric types,
- // bools, Go error instances, or arbitrary structs.
- //
- // (Note to implementors: consider the log.InterleavedKVToFields() helper)
- LogKV(alternatingKeyValues ...interface{})
-
- // SetBaggageItem sets a key:value pair on this Span and its SpanContext
- // that also propagates to descendants of this Span.
- //
- // SetBaggageItem() enables powerful functionality given a full-stack
- // opentracing integration (e.g., arbitrary application data from a mobile
- // app can make it, transparently, all the way into the depths of a storage
- // system), and with it some powerful costs: use this feature with care.
- //
- // IMPORTANT NOTE #1: SetBaggageItem() will only propagate baggage items to
- // *future* causal descendants of the associated Span.
- //
- // IMPORTANT NOTE #2: Use this thoughtfully and with care. Every key and
- // value is copied into every local *and remote* child of the associated
- // Span, and that can add up to a lot of network and cpu overhead.
- //
- // Returns a reference to this Span for chaining.
- SetBaggageItem(restrictedKey, value string) Span
-
- // Gets the value for a baggage item given its key. Returns the empty string
- // if the value isn't found in this Span.
- BaggageItem(restrictedKey string) string
-
- // Provides access to the Tracer that created this Span.
- Tracer() Tracer
-
- // Deprecated: use LogFields or LogKV
- LogEvent(event string)
- // Deprecated: use LogFields or LogKV
- LogEventWithPayload(event string, payload interface{})
- // Deprecated: use LogFields or LogKV
- Log(data LogData)
-}
-
-// LogRecord is data associated with a single Span log. Every LogRecord
-// instance must specify at least one Field.
-type LogRecord struct {
- Timestamp time.Time
- Fields []log.Field
-}
-
-// FinishOptions allows Span.FinishWithOptions callers to override the finish
-// timestamp and provide log data via a bulk interface.
-type FinishOptions struct {
- // FinishTime overrides the Span's finish time, or implicitly becomes
- // time.Now() if FinishTime.IsZero().
- //
- // FinishTime must resolve to a timestamp that's >= the Span's StartTime
- // (per StartSpanOptions).
- FinishTime time.Time
-
- // LogRecords allows the caller to specify the contents of many LogFields()
- // calls with a single slice. May be nil.
- //
- // None of the LogRecord.Timestamp values may be .IsZero() (i.e., they must
- // be set explicitly). Also, they must be >= the Span's start timestamp and
- // <= the FinishTime (or time.Now() if FinishTime.IsZero()). Otherwise the
- // behavior of FinishWithOptions() is undefined.
- //
- // If specified, the caller hands off ownership of LogRecords at
- // FinishWithOptions() invocation time.
- //
- // If specified, the (deprecated) BulkLogData must be nil or empty.
- LogRecords []LogRecord
-
- // BulkLogData is DEPRECATED.
- BulkLogData []LogData
-}
-
-// LogData is DEPRECATED
-type LogData struct {
- Timestamp time.Time
- Event string
- Payload interface{}
-}
-
-// ToLogRecord converts a deprecated LogData to a non-deprecated LogRecord
-func (ld *LogData) ToLogRecord() LogRecord {
- var literalTimestamp time.Time
- if ld.Timestamp.IsZero() {
- literalTimestamp = time.Now()
- } else {
- literalTimestamp = ld.Timestamp
- }
- rval := LogRecord{
- Timestamp: literalTimestamp,
- }
- if ld.Payload == nil {
- rval.Fields = []log.Field{
- log.String("event", ld.Event),
- }
- } else {
- rval.Fields = []log.Field{
- log.String("event", ld.Event),
- log.Object("payload", ld.Payload),
- }
- }
- return rval
-}
diff --git a/vendor/github.com/opentracing/opentracing-go/tracer.go b/vendor/github.com/opentracing/opentracing-go/tracer.go
deleted file mode 100644
index 715f0cedfb..0000000000
--- a/vendor/github.com/opentracing/opentracing-go/tracer.go
+++ /dev/null
@@ -1,304 +0,0 @@
-package opentracing
-
-import "time"
-
-// Tracer is a simple, thin interface for Span creation and SpanContext
-// propagation.
-type Tracer interface {
-
- // Create, start, and return a new Span with the given `operationName` and
- // incorporate the given StartSpanOption `opts`. (Note that `opts` borrows
- // from the "functional options" pattern, per
- // http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis)
- //
- // A Span with no SpanReference options (e.g., opentracing.ChildOf() or
- // opentracing.FollowsFrom()) becomes the root of its own trace.
- //
- // Examples:
- //
- // var tracer opentracing.Tracer = ...
- //
- // // The root-span case:
- // sp := tracer.StartSpan("GetFeed")
- //
- // // The vanilla child span case:
- // sp := tracer.StartSpan(
- // "GetFeed",
- // opentracing.ChildOf(parentSpan.Context()))
- //
- // // All the bells and whistles:
- // sp := tracer.StartSpan(
- // "GetFeed",
- // opentracing.ChildOf(parentSpan.Context()),
- // opentracing.Tag{"user_agent", loggedReq.UserAgent},
- // opentracing.StartTime(loggedReq.Timestamp),
- // )
- //
- StartSpan(operationName string, opts ...StartSpanOption) Span
-
- // Inject() takes the `sm` SpanContext instance and injects it for
- // propagation within `carrier`. The actual type of `carrier` depends on
- // the value of `format`.
- //
- // OpenTracing defines a common set of `format` values (see BuiltinFormat),
- // and each has an expected carrier type.
- //
- // Other packages may declare their own `format` values, much like the keys
- // used by `context.Context` (see https://godoc.org/context#WithValue).
- //
- // Example usage (sans error handling):
- //
- // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header)
- // err := tracer.Inject(
- // span.Context(),
- // opentracing.HTTPHeaders,
- // carrier)
- //
- // NOTE: All opentracing.Tracer implementations MUST support all
- // BuiltinFormats.
- //
- // Implementations may return opentracing.ErrUnsupportedFormat if `format`
- // is not supported by (or not known by) the implementation.
- //
- // Implementations may return opentracing.ErrInvalidCarrier or any other
- // implementation-specific error if the format is supported but injection
- // fails anyway.
- //
- // See Tracer.Extract().
- Inject(sm SpanContext, format interface{}, carrier interface{}) error
-
- // Extract() returns a SpanContext instance given `format` and `carrier`.
- //
- // OpenTracing defines a common set of `format` values (see BuiltinFormat),
- // and each has an expected carrier type.
- //
- // Other packages may declare their own `format` values, much like the keys
- // used by `context.Context` (see
- // https://godoc.org/golang.org/x/net/context#WithValue).
- //
- // Example usage (with StartSpan):
- //
- //
- // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header)
- // clientContext, err := tracer.Extract(opentracing.HTTPHeaders, carrier)
- //
- // // ... assuming the ultimate goal here is to resume the trace with a
- // // server-side Span:
- // var serverSpan opentracing.Span
- // if err == nil {
- // span = tracer.StartSpan(
- // rpcMethodName, ext.RPCServerOption(clientContext))
- // } else {
- // span = tracer.StartSpan(rpcMethodName)
- // }
- //
- //
- // NOTE: All opentracing.Tracer implementations MUST support all
- // BuiltinFormats.
- //
- // Return values:
- // - A successful Extract returns a SpanContext instance and a nil error
- // - If there was simply no SpanContext to extract in `carrier`, Extract()
- // returns (nil, opentracing.ErrSpanContextNotFound)
- // - If `format` is unsupported or unrecognized, Extract() returns (nil,
- // opentracing.ErrUnsupportedFormat)
- // - If there are more fundamental problems with the `carrier` object,
- // Extract() may return opentracing.ErrInvalidCarrier,
- // opentracing.ErrSpanContextCorrupted, or implementation-specific
- // errors.
- //
- // See Tracer.Inject().
- Extract(format interface{}, carrier interface{}) (SpanContext, error)
-}
-
-// StartSpanOptions allows Tracer.StartSpan() callers and implementors a
-// mechanism to override the start timestamp, specify Span References, and make
-// a single Tag or multiple Tags available at Span start time.
-//
-// StartSpan() callers should look at the StartSpanOption interface and
-// implementations available in this package.
-//
-// Tracer implementations can convert a slice of `StartSpanOption` instances
-// into a `StartSpanOptions` struct like so:
-//
-// func StartSpan(opName string, opts ...opentracing.StartSpanOption) {
-// sso := opentracing.StartSpanOptions{}
-// for _, o := range opts {
-// o.Apply(&sso)
-// }
-// ...
-// }
-//
-type StartSpanOptions struct {
- // Zero or more causal references to other Spans (via their SpanContext).
- // If empty, start a "root" Span (i.e., start a new trace).
- References []SpanReference
-
- // StartTime overrides the Span's start time, or implicitly becomes
- // time.Now() if StartTime.IsZero().
- StartTime time.Time
-
- // Tags may have zero or more entries; the restrictions on map values are
- // identical to those for Span.SetTag(). May be nil.
- //
- // If specified, the caller hands off ownership of Tags at
- // StartSpan() invocation time.
- Tags map[string]interface{}
-}
-
-// StartSpanOption instances (zero or more) may be passed to Tracer.StartSpan.
-//
-// StartSpanOption borrows from the "functional options" pattern, per
-// http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis
-type StartSpanOption interface {
- Apply(*StartSpanOptions)
-}
-
-// SpanReferenceType is an enum type describing different categories of
-// relationships between two Spans. If Span-2 refers to Span-1, the
-// SpanReferenceType describes Span-1 from Span-2's perspective. For example,
-// ChildOfRef means that Span-1 created Span-2.
-//
-// NOTE: Span-1 and Span-2 do *not* necessarily depend on each other for
-// completion; e.g., Span-2 may be part of a background job enqueued by Span-1,
-// or Span-2 may be sitting in a distributed queue behind Span-1.
-type SpanReferenceType int
-
-const (
- // ChildOfRef refers to a parent Span that caused *and* somehow depends
- // upon the new child Span. Often (but not always), the parent Span cannot
- // finish until the child Span does.
- //
- // An timing diagram for a ChildOfRef that's blocked on the new Span:
- //
- // [-Parent Span---------]
- // [-Child Span----]
- //
- // See http://opentracing.io/spec/
- //
- // See opentracing.ChildOf()
- ChildOfRef SpanReferenceType = iota
-
- // FollowsFromRef refers to a parent Span that does not depend in any way
- // on the result of the new child Span. For instance, one might use
- // FollowsFromRefs to describe pipeline stages separated by queues,
- // or a fire-and-forget cache insert at the tail end of a web request.
- //
- // A FollowsFromRef Span is part of the same logical trace as the new Span:
- // i.e., the new Span is somehow caused by the work of its FollowsFromRef.
- //
- // All of the following could be valid timing diagrams for children that
- // "FollowFrom" a parent.
- //
- // [-Parent Span-] [-Child Span-]
- //
- //
- // [-Parent Span--]
- // [-Child Span-]
- //
- //
- // [-Parent Span-]
- // [-Child Span-]
- //
- // See http://opentracing.io/spec/
- //
- // See opentracing.FollowsFrom()
- FollowsFromRef
-)
-
-// SpanReference is a StartSpanOption that pairs a SpanReferenceType and a
-// referenced SpanContext. See the SpanReferenceType documentation for
-// supported relationships. If SpanReference is created with
-// ReferencedContext==nil, it has no effect. Thus it allows for a more concise
-// syntax for starting spans:
-//
-// sc, _ := tracer.Extract(someFormat, someCarrier)
-// span := tracer.StartSpan("operation", opentracing.ChildOf(sc))
-//
-// The `ChildOf(sc)` option above will not panic if sc == nil, it will just
-// not add the parent span reference to the options.
-type SpanReference struct {
- Type SpanReferenceType
- ReferencedContext SpanContext
-}
-
-// Apply satisfies the StartSpanOption interface.
-func (r SpanReference) Apply(o *StartSpanOptions) {
- if r.ReferencedContext != nil {
- o.References = append(o.References, r)
- }
-}
-
-// ChildOf returns a StartSpanOption pointing to a dependent parent span.
-// If sc == nil, the option has no effect.
-//
-// See ChildOfRef, SpanReference
-func ChildOf(sc SpanContext) SpanReference {
- return SpanReference{
- Type: ChildOfRef,
- ReferencedContext: sc,
- }
-}
-
-// FollowsFrom returns a StartSpanOption pointing to a parent Span that caused
-// the child Span but does not directly depend on its result in any way.
-// If sc == nil, the option has no effect.
-//
-// See FollowsFromRef, SpanReference
-func FollowsFrom(sc SpanContext) SpanReference {
- return SpanReference{
- Type: FollowsFromRef,
- ReferencedContext: sc,
- }
-}
-
-// StartTime is a StartSpanOption that sets an explicit start timestamp for the
-// new Span.
-type StartTime time.Time
-
-// Apply satisfies the StartSpanOption interface.
-func (t StartTime) Apply(o *StartSpanOptions) {
- o.StartTime = time.Time(t)
-}
-
-// Tags are a generic map from an arbitrary string key to an opaque value type.
-// The underlying tracing system is responsible for interpreting and
-// serializing the values.
-type Tags map[string]interface{}
-
-// Apply satisfies the StartSpanOption interface.
-func (t Tags) Apply(o *StartSpanOptions) {
- if o.Tags == nil {
- o.Tags = make(map[string]interface{})
- }
- for k, v := range t {
- o.Tags[k] = v
- }
-}
-
-// Tag may be passed as a StartSpanOption to add a tag to new spans,
-// or its Set method may be used to apply the tag to an existing Span,
-// for example:
-//
-// tracer.StartSpan("opName", Tag{"Key", value})
-//
-// or
-//
-// Tag{"key", value}.Set(span)
-type Tag struct {
- Key string
- Value interface{}
-}
-
-// Apply satisfies the StartSpanOption interface.
-func (t Tag) Apply(o *StartSpanOptions) {
- if o.Tags == nil {
- o.Tags = make(map[string]interface{})
- }
- o.Tags[t.Key] = t.Value
-}
-
-// Set applies the tag to an existing Span.
-func (t Tag) Set(s Span) {
- s.SetTag(t.Key, t.Value)
-}
diff --git a/vendor/github.com/sigstore/rekor/CONTRIBUTORS.md b/vendor/github.com/sigstore/rekor/CONTRIBUTORS.md
deleted file mode 100644
index bdff02765c..0000000000
--- a/vendor/github.com/sigstore/rekor/CONTRIBUTORS.md
+++ /dev/null
@@ -1,122 +0,0 @@
-# Contributing
-
-When contributing to this repository, please first discuss the change you wish
-to make via an [issue](https://github.com/sigstore/rekor/issues).
-
-## Pull Request Process
-
-1. Create an [issue](https://github.com/sigstore/rekor/issues)
- outlining the fix or feature.
-2. Fork the rekor repository to your own github account and clone it locally.
-3. Hack on your changes.
-4. Update the README.md with details of changes to any interface, this includes new environment
- variables, exposed ports, useful file locations, CLI parameters and
- new or changed configuration values.
-5. Correctly format your commit message see [Commit Messages](#Commit Message Guidelines)
- below.
-6. Ensure that CI passes, if it fails, fix the failures.
-7. Every pull request requires a review from the [core rekor team](https://github.com/orgs/github.com/sigstore/teams/core-team)
- before merging.
-8. If your pull request consists of more than one commit, please squash your
- commits as described in [Squash Commits](#Squash Commits)
-
-## Commit Message Guidelines
-
-We follow the commit formatting recommendations found on [Chris Beams' How to Write a Git Commit Message article]((https://chris.beams.io/posts/git-commit/).
-
-Well formed commit messages not only help reviewers understand the nature of
-the Pull Request, but also assists the release process where commit messages
-are used to generate release notes.
-
-A good example of a commit message would be as follows:
-
-```
-Summarize changes in around 50 characters or less
-
-More detailed explanatory text, if necessary. Wrap it to about 72
-characters or so. In some contexts, the first line is treated as the
-subject of the commit and the rest of the text as the body. The
-blank line separating the summary from the body is critical (unless
-you omit the body entirely); various tools like `log`, `shortlog`
-and `rebase` can get confused if you run the two together.
-
-Explain the problem that this commit is solving. Focus on why you
-are making this change as opposed to how (the code explains that).
-Are there side effects or other unintuitive consequences of this
-change? Here's the place to explain them.
-
-Further paragraphs come after blank lines.
-
- - Bullet points are okay, too
-
- - Typically a hyphen or asterisk is used for the bullet, preceded
- by a single space, with blank lines in between, but conventions
- vary here
-
-If you use an issue tracker, put references to them at the bottom,
-like this:
-
-Resolves: #123
-See also: #456, #789
-```
-
-Note the `Resolves #123` tag, this references the issue raised and allows us to
-ensure issues are associated and closed when a pull request is merged.
-
-Please refer to [the github help page on message types](https://help.github.com/articles/closing-issues-using-keywords/)
-for a complete list of issue references.
-
-## Squash Commits
-
-Should your pull request consist of more than one commit (perhaps due to
-a change being requested during the review cycle), please perform a git squash
-once a reviewer has approved your pull request.
-
-A squash can be performed as follows. Let's say you have the following commits:
-
- initial commit
- second commit
- final commit
-
-Run the command below with the number set to the total commits you wish to
-squash (in our case 3 commits):
-
- git rebase -i HEAD~3
-
-You default text editor will then open up and you will see the following::
-
- pick eb36612 initial commit
- pick 9ac8968 second commit
- pick a760569 final commit
-
- # Rebase eb1429f..a760569 onto eb1429f (3 commands)
-
-We want to rebase on top of our first commit, so we change the other two commits
-to `squash`:
-
- pick eb36612 initial commit
- squash 9ac8968 second commit
- squash a760569 final commit
-
-After this, should you wish to update your commit message to better summarise
-all of your pull request, run:
-
- git commit --amend
-
-You will then need to force push (assuming your initial commit(s) were posted
-to github):
-
- git push origin your-branch --force
-
-Alternatively, a core member can squash your commits within Github.
-
-## DCO Signoff
-
-Make sure to sign the [Developer Certificate of
-Origin](https://git-scm.com/docs/git-commit#Documentation/git-commit.txt---signoff).
-
-## Code of Conduct
-
-Rekor adheres to and enforces the [Contributor Covenant](http://contributor-covenant.org/version/1/4/) Code of Conduct.
-Please take a moment to read the [CODE_OF_CONDUCT.md](https://github.com/sigstore/rekor/blob/master/CODE_OF_CONDUCT.md) document.
-
diff --git a/vendor/github.com/sigstore/rekor/COPYRIGHT.txt b/vendor/github.com/sigstore/rekor/COPYRIGHT.txt
deleted file mode 100644
index 7a01c84986..0000000000
--- a/vendor/github.com/sigstore/rekor/COPYRIGHT.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-
-Copyright 2021 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/vendor/github.com/sigstore/rekor/LICENSE b/vendor/github.com/sigstore/rekor/LICENSE
deleted file mode 100644
index d645695673..0000000000
--- a/vendor/github.com/sigstore/rekor/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/sigstore/rekor/pkg/client/options.go b/vendor/github.com/sigstore/rekor/pkg/client/options.go
deleted file mode 100644
index c1135a71c3..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/client/options.go
+++ /dev/null
@@ -1,145 +0,0 @@
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package client
-
-import (
- "net/http"
- "time"
-
- "github.com/hashicorp/go-retryablehttp"
-)
-
-// Option is a functional option for customizing static signatures.
-type Option func(*options)
-
-type options struct {
- UserAgent string
- RetryCount uint
- RetryWaitMin time.Duration
- RetryWaitMax time.Duration
- InsecureTLS bool
- Logger interface{}
- NoDisableKeepalives bool
- Headers map[string][]string
-}
-
-const (
- // DefaultRetryCount is the default number of retries.
- DefaultRetryCount = 3
-)
-
-func makeOptions(opts ...Option) *options {
- o := &options{
- UserAgent: "",
- RetryCount: DefaultRetryCount,
- }
-
- for _, opt := range opts {
- opt(o)
- }
-
- return o
-}
-
-// WithUserAgent sets the media type of the signature.
-func WithUserAgent(userAgent string) Option {
- return func(o *options) {
- o.UserAgent = userAgent
- }
-}
-
-// WithRetryCount sets the number of retries.
-func WithRetryCount(retryCount uint) Option {
- return func(o *options) {
- o.RetryCount = retryCount
- }
-}
-
-// WithRetryWaitMin sets the minimum length of time to wait between retries.
-func WithRetryWaitMin(t time.Duration) Option {
- return func(o *options) {
- o.RetryWaitMin = t
- }
-}
-
-// WithRetryWaitMax sets the minimum length of time to wait between retries.
-func WithRetryWaitMax(t time.Duration) Option {
- return func(o *options) {
- o.RetryWaitMax = t
- }
-}
-
-// WithLogger sets the logger; it must implement either retryablehttp.Logger or retryablehttp.LeveledLogger; if not, this will not take effect.
-func WithLogger(logger interface{}) Option {
- return func(o *options) {
- switch logger.(type) {
- case retryablehttp.Logger, retryablehttp.LeveledLogger:
- o.Logger = logger
- }
- }
-}
-
-// WithInsecureTLS disables TLS verification.
-func WithInsecureTLS(enabled bool) Option {
- return func(o *options) {
- o.InsecureTLS = enabled
- }
-}
-
-// WithNoDisableKeepalives unsets the default DisableKeepalives setting.
-func WithNoDisableKeepalives(noDisableKeepalives bool) Option {
- return func(o *options) {
- o.NoDisableKeepalives = noDisableKeepalives
- }
-}
-
-// WithHeaders sets default headers for every client request.
-func WithHeaders(h map[string][]string) Option {
- return func(o *options) {
- o.Headers = h
- }
-}
-
-type roundTripper struct {
- http.RoundTripper
- UserAgent string
- Headers map[string][]string
-}
-
-// RoundTrip implements `http.RoundTripper`
-func (rt *roundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
- req.Header.Set("User-Agent", rt.UserAgent)
- for k, v := range rt.Headers {
- for _, h := range v {
- req.Header.Add(k, h)
- }
- }
- return rt.RoundTripper.RoundTrip(req)
-}
-
-func createRoundTripper(inner http.RoundTripper, o *options) http.RoundTripper {
- if inner == nil {
- inner = http.DefaultTransport
- }
- if o.UserAgent == "" && o.Headers == nil {
- // There's nothing to do...
- return inner
- }
- return &roundTripper{
- RoundTripper: inner,
- UserAgent: o.UserAgent,
- Headers: o.Headers,
- }
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/client/rekor_client.go b/vendor/github.com/sigstore/rekor/pkg/client/rekor_client.go
deleted file mode 100644
index 470ca5eaa2..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/client/rekor_client.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package client
-
-import (
- "crypto/tls"
- "net/http"
- "net/url"
-
- "github.com/go-openapi/runtime"
- httptransport "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-
- "github.com/hashicorp/go-cleanhttp"
- retryablehttp "github.com/hashicorp/go-retryablehttp"
- "github.com/sigstore/rekor/pkg/generated/client"
- "github.com/sigstore/rekor/pkg/util"
-)
-
-func GetRekorClient(rekorServerURL string, opts ...Option) (*client.Rekor, error) {
- url, err := url.Parse(rekorServerURL)
- if err != nil {
- return nil, err
- }
- o := makeOptions(opts...)
-
- retryableClient := retryablehttp.NewClient()
- defaultTransport := cleanhttp.DefaultTransport()
- if o.NoDisableKeepalives {
- defaultTransport.DisableKeepAlives = false
- }
- if o.InsecureTLS {
- /* #nosec G402 */
- defaultTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
- }
- retryableClient.HTTPClient = &http.Client{
- Transport: defaultTransport,
- }
- retryableClient.RetryMax = int(o.RetryCount)
- retryableClient.RetryWaitMin = o.RetryWaitMin
- retryableClient.RetryWaitMax = o.RetryWaitMax
- retryableClient.Logger = o.Logger
-
- httpClient := retryableClient.StandardClient()
- httpClient.Transport = createRoundTripper(httpClient.Transport, o)
-
- // sanitize path
- if url.Path == "" {
- url.Path = client.DefaultBasePath
- }
-
- rt := httptransport.NewWithClient(url.Host, url.Path, []string{url.Scheme}, httpClient)
- rt.Consumers["application/json"] = runtime.JSONConsumer()
- rt.Consumers["application/x-pem-file"] = runtime.TextConsumer()
- rt.Producers["application/json"] = runtime.JSONProducer()
-
- registry := strfmt.Default
- registry.Add("signedCheckpoint", &util.SignedNote{}, util.SignedCheckpointValidator)
- return client.New(rt, registry), nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/create_log_entry_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/create_log_entry_parameters.go
deleted file mode 100644
index 481fa2bda5..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/create_log_entry_parameters.go
+++ /dev/null
@@ -1,164 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// NewCreateLogEntryParams creates a new CreateLogEntryParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewCreateLogEntryParams() *CreateLogEntryParams {
- return &CreateLogEntryParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewCreateLogEntryParamsWithTimeout creates a new CreateLogEntryParams object
-// with the ability to set a timeout on a request.
-func NewCreateLogEntryParamsWithTimeout(timeout time.Duration) *CreateLogEntryParams {
- return &CreateLogEntryParams{
- timeout: timeout,
- }
-}
-
-// NewCreateLogEntryParamsWithContext creates a new CreateLogEntryParams object
-// with the ability to set a context for a request.
-func NewCreateLogEntryParamsWithContext(ctx context.Context) *CreateLogEntryParams {
- return &CreateLogEntryParams{
- Context: ctx,
- }
-}
-
-// NewCreateLogEntryParamsWithHTTPClient creates a new CreateLogEntryParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewCreateLogEntryParamsWithHTTPClient(client *http.Client) *CreateLogEntryParams {
- return &CreateLogEntryParams{
- HTTPClient: client,
- }
-}
-
-/*
-CreateLogEntryParams contains all the parameters to send to the API endpoint
-
- for the create log entry operation.
-
- Typically these are written to a http.Request.
-*/
-type CreateLogEntryParams struct {
-
- // ProposedEntry.
- ProposedEntry models.ProposedEntry
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the create log entry params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *CreateLogEntryParams) WithDefaults() *CreateLogEntryParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the create log entry params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *CreateLogEntryParams) SetDefaults() {
- // no default values defined for this parameter
-}
-
-// WithTimeout adds the timeout to the create log entry params
-func (o *CreateLogEntryParams) WithTimeout(timeout time.Duration) *CreateLogEntryParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the create log entry params
-func (o *CreateLogEntryParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the create log entry params
-func (o *CreateLogEntryParams) WithContext(ctx context.Context) *CreateLogEntryParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the create log entry params
-func (o *CreateLogEntryParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the create log entry params
-func (o *CreateLogEntryParams) WithHTTPClient(client *http.Client) *CreateLogEntryParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the create log entry params
-func (o *CreateLogEntryParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithProposedEntry adds the proposedEntry to the create log entry params
-func (o *CreateLogEntryParams) WithProposedEntry(proposedEntry models.ProposedEntry) *CreateLogEntryParams {
- o.SetProposedEntry(proposedEntry)
- return o
-}
-
-// SetProposedEntry adds the proposedEntry to the create log entry params
-func (o *CreateLogEntryParams) SetProposedEntry(proposedEntry models.ProposedEntry) {
- o.ProposedEntry = proposedEntry
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *CreateLogEntryParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
- if err := r.SetBodyParam(o.ProposedEntry); err != nil {
- return err
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/create_log_entry_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/create_log_entry_responses.go
deleted file mode 100644
index 9b2845e7ab..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/create_log_entry_responses.go
+++ /dev/null
@@ -1,396 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// CreateLogEntryReader is a Reader for the CreateLogEntry structure.
-type CreateLogEntryReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *CreateLogEntryReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 201:
- result := NewCreateLogEntryCreated()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- case 400:
- result := NewCreateLogEntryBadRequest()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- case 409:
- result := NewCreateLogEntryConflict()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- default:
- result := NewCreateLogEntryDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewCreateLogEntryCreated creates a CreateLogEntryCreated with default headers values
-func NewCreateLogEntryCreated() *CreateLogEntryCreated {
- return &CreateLogEntryCreated{}
-}
-
-/*
-CreateLogEntryCreated describes a response with status code 201, with default header values.
-
-Returns the entry created in the transparency log
-*/
-type CreateLogEntryCreated struct {
-
- /* UUID of log entry
- */
- ETag string
-
- /* URI location of log entry
-
- Format: uri
- */
- Location strfmt.URI
-
- Payload models.LogEntry
-}
-
-// IsSuccess returns true when this create log entry created response has a 2xx status code
-func (o *CreateLogEntryCreated) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this create log entry created response has a 3xx status code
-func (o *CreateLogEntryCreated) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this create log entry created response has a 4xx status code
-func (o *CreateLogEntryCreated) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this create log entry created response has a 5xx status code
-func (o *CreateLogEntryCreated) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this create log entry created response a status code equal to that given
-func (o *CreateLogEntryCreated) IsCode(code int) bool {
- return code == 201
-}
-
-// Code gets the status code for the create log entry created response
-func (o *CreateLogEntryCreated) Code() int {
- return 201
-}
-
-func (o *CreateLogEntryCreated) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntryCreated %s", 201, payload)
-}
-
-func (o *CreateLogEntryCreated) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntryCreated %s", 201, payload)
-}
-
-func (o *CreateLogEntryCreated) GetPayload() models.LogEntry {
- return o.Payload
-}
-
-func (o *CreateLogEntryCreated) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // hydrates response header ETag
- hdrETag := response.GetHeader("ETag")
-
- if hdrETag != "" {
- o.ETag = hdrETag
- }
-
- // hydrates response header Location
- hdrLocation := response.GetHeader("Location")
-
- if hdrLocation != "" {
- vallocation, err := formats.Parse("uri", hdrLocation)
- if err != nil {
- return errors.InvalidType("Location", "header", "strfmt.URI", hdrLocation)
- }
- o.Location = *(vallocation.(*strfmt.URI))
- }
-
- // response payload
- if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewCreateLogEntryBadRequest creates a CreateLogEntryBadRequest with default headers values
-func NewCreateLogEntryBadRequest() *CreateLogEntryBadRequest {
- return &CreateLogEntryBadRequest{}
-}
-
-/*
-CreateLogEntryBadRequest describes a response with status code 400, with default header values.
-
-The content supplied to the server was invalid
-*/
-type CreateLogEntryBadRequest struct {
- Payload *models.Error
-}
-
-// IsSuccess returns true when this create log entry bad request response has a 2xx status code
-func (o *CreateLogEntryBadRequest) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this create log entry bad request response has a 3xx status code
-func (o *CreateLogEntryBadRequest) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this create log entry bad request response has a 4xx status code
-func (o *CreateLogEntryBadRequest) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this create log entry bad request response has a 5xx status code
-func (o *CreateLogEntryBadRequest) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this create log entry bad request response a status code equal to that given
-func (o *CreateLogEntryBadRequest) IsCode(code int) bool {
- return code == 400
-}
-
-// Code gets the status code for the create log entry bad request response
-func (o *CreateLogEntryBadRequest) Code() int {
- return 400
-}
-
-func (o *CreateLogEntryBadRequest) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntryBadRequest %s", 400, payload)
-}
-
-func (o *CreateLogEntryBadRequest) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntryBadRequest %s", 400, payload)
-}
-
-func (o *CreateLogEntryBadRequest) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *CreateLogEntryBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewCreateLogEntryConflict creates a CreateLogEntryConflict with default headers values
-func NewCreateLogEntryConflict() *CreateLogEntryConflict {
- return &CreateLogEntryConflict{}
-}
-
-/*
-CreateLogEntryConflict describes a response with status code 409, with default header values.
-
-The request conflicts with the current state of the transparency log
-*/
-type CreateLogEntryConflict struct {
- Location strfmt.URI
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this create log entry conflict response has a 2xx status code
-func (o *CreateLogEntryConflict) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this create log entry conflict response has a 3xx status code
-func (o *CreateLogEntryConflict) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this create log entry conflict response has a 4xx status code
-func (o *CreateLogEntryConflict) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this create log entry conflict response has a 5xx status code
-func (o *CreateLogEntryConflict) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this create log entry conflict response a status code equal to that given
-func (o *CreateLogEntryConflict) IsCode(code int) bool {
- return code == 409
-}
-
-// Code gets the status code for the create log entry conflict response
-func (o *CreateLogEntryConflict) Code() int {
- return 409
-}
-
-func (o *CreateLogEntryConflict) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntryConflict %s", 409, payload)
-}
-
-func (o *CreateLogEntryConflict) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntryConflict %s", 409, payload)
-}
-
-func (o *CreateLogEntryConflict) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *CreateLogEntryConflict) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // hydrates response header Location
- hdrLocation := response.GetHeader("Location")
-
- if hdrLocation != "" {
- vallocation, err := formats.Parse("uri", hdrLocation)
- if err != nil {
- return errors.InvalidType("Location", "header", "strfmt.URI", hdrLocation)
- }
- o.Location = *(vallocation.(*strfmt.URI))
- }
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewCreateLogEntryDefault creates a CreateLogEntryDefault with default headers values
-func NewCreateLogEntryDefault(code int) *CreateLogEntryDefault {
- return &CreateLogEntryDefault{
- _statusCode: code,
- }
-}
-
-/*
-CreateLogEntryDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type CreateLogEntryDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this create log entry default response has a 2xx status code
-func (o *CreateLogEntryDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this create log entry default response has a 3xx status code
-func (o *CreateLogEntryDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this create log entry default response has a 4xx status code
-func (o *CreateLogEntryDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this create log entry default response has a 5xx status code
-func (o *CreateLogEntryDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this create log entry default response a status code equal to that given
-func (o *CreateLogEntryDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the create log entry default response
-func (o *CreateLogEntryDefault) Code() int {
- return o._statusCode
-}
-
-func (o *CreateLogEntryDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntry default %s", o._statusCode, payload)
-}
-
-func (o *CreateLogEntryDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries][%d] createLogEntry default %s", o._statusCode, payload)
-}
-
-func (o *CreateLogEntryDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *CreateLogEntryDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/entries_client.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/entries_client.go
deleted file mode 100644
index 259b38eeb4..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/entries_client.go
+++ /dev/null
@@ -1,239 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "github.com/go-openapi/runtime"
- httptransport "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-)
-
-// New creates a new entries API client.
-func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService {
- return &Client{transport: transport, formats: formats}
-}
-
-// New creates a new entries API client with basic auth credentials.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - user: user for basic authentication header.
-// - password: password for basic authentication header.
-func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BasicAuth(user, password)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-// New creates a new entries API client with a bearer token for authentication.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - bearerToken: bearer token for Bearer authentication header.
-func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BearerToken(bearerToken)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-/*
-Client for entries API
-*/
-type Client struct {
- transport runtime.ClientTransport
- formats strfmt.Registry
-}
-
-// ClientOption may be used to customize the behavior of Client methods.
-type ClientOption func(*runtime.ClientOperation)
-
-// ClientService is the interface for Client methods
-type ClientService interface {
- CreateLogEntry(params *CreateLogEntryParams, opts ...ClientOption) (*CreateLogEntryCreated, error)
-
- GetLogEntryByIndex(params *GetLogEntryByIndexParams, opts ...ClientOption) (*GetLogEntryByIndexOK, error)
-
- GetLogEntryByUUID(params *GetLogEntryByUUIDParams, opts ...ClientOption) (*GetLogEntryByUUIDOK, error)
-
- SearchLogQuery(params *SearchLogQueryParams, opts ...ClientOption) (*SearchLogQueryOK, error)
-
- SetTransport(transport runtime.ClientTransport)
-}
-
-/*
-CreateLogEntry creates an entry in the transparency log
-
-Creates an entry in the transparency log for a detached signature, public key, and content. Items can be included in the request or fetched by the server when URLs are specified.
-*/
-func (a *Client) CreateLogEntry(params *CreateLogEntryParams, opts ...ClientOption) (*CreateLogEntryCreated, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewCreateLogEntryParams()
- }
- op := &runtime.ClientOperation{
- ID: "createLogEntry",
- Method: "POST",
- PathPattern: "/api/v1/log/entries",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &CreateLogEntryReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*CreateLogEntryCreated)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*CreateLogEntryDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-/*
-GetLogEntryByIndex retrieves an entry and inclusion proof from the transparency log if it exists by index
-*/
-func (a *Client) GetLogEntryByIndex(params *GetLogEntryByIndexParams, opts ...ClientOption) (*GetLogEntryByIndexOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewGetLogEntryByIndexParams()
- }
- op := &runtime.ClientOperation{
- ID: "getLogEntryByIndex",
- Method: "GET",
- PathPattern: "/api/v1/log/entries",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &GetLogEntryByIndexReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*GetLogEntryByIndexOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*GetLogEntryByIndexDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-/*
-GetLogEntryByUUID gets log entry and information required to generate an inclusion proof for the entry in the transparency log
-
-Returns the entry, root hash, tree size, and a list of hashes that can be used to calculate proof of an entry being included in the transparency log
-*/
-func (a *Client) GetLogEntryByUUID(params *GetLogEntryByUUIDParams, opts ...ClientOption) (*GetLogEntryByUUIDOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewGetLogEntryByUUIDParams()
- }
- op := &runtime.ClientOperation{
- ID: "getLogEntryByUUID",
- Method: "GET",
- PathPattern: "/api/v1/log/entries/{entryUUID}",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &GetLogEntryByUUIDReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*GetLogEntryByUUIDOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*GetLogEntryByUUIDDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-/*
-SearchLogQuery searches transparency log for one or more log entries
-*/
-func (a *Client) SearchLogQuery(params *SearchLogQueryParams, opts ...ClientOption) (*SearchLogQueryOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewSearchLogQueryParams()
- }
- op := &runtime.ClientOperation{
- ID: "searchLogQuery",
- Method: "POST",
- PathPattern: "/api/v1/log/entries/retrieve",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &SearchLogQueryReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*SearchLogQueryOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*SearchLogQueryDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-// SetTransport changes the transport on the client
-func (a *Client) SetTransport(transport runtime.ClientTransport) {
- a.transport = transport
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_index_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_index_parameters.go
deleted file mode 100644
index e225227511..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_index_parameters.go
+++ /dev/null
@@ -1,173 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// NewGetLogEntryByIndexParams creates a new GetLogEntryByIndexParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewGetLogEntryByIndexParams() *GetLogEntryByIndexParams {
- return &GetLogEntryByIndexParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewGetLogEntryByIndexParamsWithTimeout creates a new GetLogEntryByIndexParams object
-// with the ability to set a timeout on a request.
-func NewGetLogEntryByIndexParamsWithTimeout(timeout time.Duration) *GetLogEntryByIndexParams {
- return &GetLogEntryByIndexParams{
- timeout: timeout,
- }
-}
-
-// NewGetLogEntryByIndexParamsWithContext creates a new GetLogEntryByIndexParams object
-// with the ability to set a context for a request.
-func NewGetLogEntryByIndexParamsWithContext(ctx context.Context) *GetLogEntryByIndexParams {
- return &GetLogEntryByIndexParams{
- Context: ctx,
- }
-}
-
-// NewGetLogEntryByIndexParamsWithHTTPClient creates a new GetLogEntryByIndexParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewGetLogEntryByIndexParamsWithHTTPClient(client *http.Client) *GetLogEntryByIndexParams {
- return &GetLogEntryByIndexParams{
- HTTPClient: client,
- }
-}
-
-/*
-GetLogEntryByIndexParams contains all the parameters to send to the API endpoint
-
- for the get log entry by index operation.
-
- Typically these are written to a http.Request.
-*/
-type GetLogEntryByIndexParams struct {
-
- /* LogIndex.
-
- specifies the index of the entry in the transparency log to be retrieved
- */
- LogIndex int64
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the get log entry by index params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogEntryByIndexParams) WithDefaults() *GetLogEntryByIndexParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the get log entry by index params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogEntryByIndexParams) SetDefaults() {
- // no default values defined for this parameter
-}
-
-// WithTimeout adds the timeout to the get log entry by index params
-func (o *GetLogEntryByIndexParams) WithTimeout(timeout time.Duration) *GetLogEntryByIndexParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the get log entry by index params
-func (o *GetLogEntryByIndexParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the get log entry by index params
-func (o *GetLogEntryByIndexParams) WithContext(ctx context.Context) *GetLogEntryByIndexParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the get log entry by index params
-func (o *GetLogEntryByIndexParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the get log entry by index params
-func (o *GetLogEntryByIndexParams) WithHTTPClient(client *http.Client) *GetLogEntryByIndexParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the get log entry by index params
-func (o *GetLogEntryByIndexParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithLogIndex adds the logIndex to the get log entry by index params
-func (o *GetLogEntryByIndexParams) WithLogIndex(logIndex int64) *GetLogEntryByIndexParams {
- o.SetLogIndex(logIndex)
- return o
-}
-
-// SetLogIndex adds the logIndex to the get log entry by index params
-func (o *GetLogEntryByIndexParams) SetLogIndex(logIndex int64) {
- o.LogIndex = logIndex
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *GetLogEntryByIndexParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
-
- // query param logIndex
- qrLogIndex := o.LogIndex
- qLogIndex := swag.FormatInt64(qrLogIndex)
- if qLogIndex != "" {
-
- if err := r.SetQueryParam("logIndex", qLogIndex); err != nil {
- return err
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_index_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_index_responses.go
deleted file mode 100644
index 4268f75641..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_index_responses.go
+++ /dev/null
@@ -1,263 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// GetLogEntryByIndexReader is a Reader for the GetLogEntryByIndex structure.
-type GetLogEntryByIndexReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *GetLogEntryByIndexReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewGetLogEntryByIndexOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- case 404:
- result := NewGetLogEntryByIndexNotFound()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- default:
- result := NewGetLogEntryByIndexDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewGetLogEntryByIndexOK creates a GetLogEntryByIndexOK with default headers values
-func NewGetLogEntryByIndexOK() *GetLogEntryByIndexOK {
- return &GetLogEntryByIndexOK{}
-}
-
-/*
-GetLogEntryByIndexOK describes a response with status code 200, with default header values.
-
-the entry in the transparency log requested along with an inclusion proof
-*/
-type GetLogEntryByIndexOK struct {
- Payload models.LogEntry
-}
-
-// IsSuccess returns true when this get log entry by index o k response has a 2xx status code
-func (o *GetLogEntryByIndexOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this get log entry by index o k response has a 3xx status code
-func (o *GetLogEntryByIndexOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log entry by index o k response has a 4xx status code
-func (o *GetLogEntryByIndexOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this get log entry by index o k response has a 5xx status code
-func (o *GetLogEntryByIndexOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log entry by index o k response a status code equal to that given
-func (o *GetLogEntryByIndexOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the get log entry by index o k response
-func (o *GetLogEntryByIndexOK) Code() int {
- return 200
-}
-
-func (o *GetLogEntryByIndexOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries][%d] getLogEntryByIndexOK %s", 200, payload)
-}
-
-func (o *GetLogEntryByIndexOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries][%d] getLogEntryByIndexOK %s", 200, payload)
-}
-
-func (o *GetLogEntryByIndexOK) GetPayload() models.LogEntry {
- return o.Payload
-}
-
-func (o *GetLogEntryByIndexOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // response payload
- if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewGetLogEntryByIndexNotFound creates a GetLogEntryByIndexNotFound with default headers values
-func NewGetLogEntryByIndexNotFound() *GetLogEntryByIndexNotFound {
- return &GetLogEntryByIndexNotFound{}
-}
-
-/*
-GetLogEntryByIndexNotFound describes a response with status code 404, with default header values.
-
-The content requested could not be found
-*/
-type GetLogEntryByIndexNotFound struct {
-}
-
-// IsSuccess returns true when this get log entry by index not found response has a 2xx status code
-func (o *GetLogEntryByIndexNotFound) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this get log entry by index not found response has a 3xx status code
-func (o *GetLogEntryByIndexNotFound) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log entry by index not found response has a 4xx status code
-func (o *GetLogEntryByIndexNotFound) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this get log entry by index not found response has a 5xx status code
-func (o *GetLogEntryByIndexNotFound) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log entry by index not found response a status code equal to that given
-func (o *GetLogEntryByIndexNotFound) IsCode(code int) bool {
- return code == 404
-}
-
-// Code gets the status code for the get log entry by index not found response
-func (o *GetLogEntryByIndexNotFound) Code() int {
- return 404
-}
-
-func (o *GetLogEntryByIndexNotFound) Error() string {
- return fmt.Sprintf("[GET /api/v1/log/entries][%d] getLogEntryByIndexNotFound", 404)
-}
-
-func (o *GetLogEntryByIndexNotFound) String() string {
- return fmt.Sprintf("[GET /api/v1/log/entries][%d] getLogEntryByIndexNotFound", 404)
-}
-
-func (o *GetLogEntryByIndexNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- return nil
-}
-
-// NewGetLogEntryByIndexDefault creates a GetLogEntryByIndexDefault with default headers values
-func NewGetLogEntryByIndexDefault(code int) *GetLogEntryByIndexDefault {
- return &GetLogEntryByIndexDefault{
- _statusCode: code,
- }
-}
-
-/*
-GetLogEntryByIndexDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type GetLogEntryByIndexDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this get log entry by index default response has a 2xx status code
-func (o *GetLogEntryByIndexDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this get log entry by index default response has a 3xx status code
-func (o *GetLogEntryByIndexDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this get log entry by index default response has a 4xx status code
-func (o *GetLogEntryByIndexDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this get log entry by index default response has a 5xx status code
-func (o *GetLogEntryByIndexDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this get log entry by index default response a status code equal to that given
-func (o *GetLogEntryByIndexDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the get log entry by index default response
-func (o *GetLogEntryByIndexDefault) Code() int {
- return o._statusCode
-}
-
-func (o *GetLogEntryByIndexDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries][%d] getLogEntryByIndex default %s", o._statusCode, payload)
-}
-
-func (o *GetLogEntryByIndexDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries][%d] getLogEntryByIndex default %s", o._statusCode, payload)
-}
-
-func (o *GetLogEntryByIndexDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *GetLogEntryByIndexDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_uuid_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_uuid_parameters.go
deleted file mode 100644
index 5c88b52654..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_uuid_parameters.go
+++ /dev/null
@@ -1,167 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-)
-
-// NewGetLogEntryByUUIDParams creates a new GetLogEntryByUUIDParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewGetLogEntryByUUIDParams() *GetLogEntryByUUIDParams {
- return &GetLogEntryByUUIDParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewGetLogEntryByUUIDParamsWithTimeout creates a new GetLogEntryByUUIDParams object
-// with the ability to set a timeout on a request.
-func NewGetLogEntryByUUIDParamsWithTimeout(timeout time.Duration) *GetLogEntryByUUIDParams {
- return &GetLogEntryByUUIDParams{
- timeout: timeout,
- }
-}
-
-// NewGetLogEntryByUUIDParamsWithContext creates a new GetLogEntryByUUIDParams object
-// with the ability to set a context for a request.
-func NewGetLogEntryByUUIDParamsWithContext(ctx context.Context) *GetLogEntryByUUIDParams {
- return &GetLogEntryByUUIDParams{
- Context: ctx,
- }
-}
-
-// NewGetLogEntryByUUIDParamsWithHTTPClient creates a new GetLogEntryByUUIDParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewGetLogEntryByUUIDParamsWithHTTPClient(client *http.Client) *GetLogEntryByUUIDParams {
- return &GetLogEntryByUUIDParams{
- HTTPClient: client,
- }
-}
-
-/*
-GetLogEntryByUUIDParams contains all the parameters to send to the API endpoint
-
- for the get log entry by UUID operation.
-
- Typically these are written to a http.Request.
-*/
-type GetLogEntryByUUIDParams struct {
-
- /* EntryUUID.
-
- the UUID of the entry for which the inclusion proof information should be returned
- */
- EntryUUID string
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the get log entry by UUID params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogEntryByUUIDParams) WithDefaults() *GetLogEntryByUUIDParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the get log entry by UUID params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogEntryByUUIDParams) SetDefaults() {
- // no default values defined for this parameter
-}
-
-// WithTimeout adds the timeout to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) WithTimeout(timeout time.Duration) *GetLogEntryByUUIDParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) WithContext(ctx context.Context) *GetLogEntryByUUIDParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) WithHTTPClient(client *http.Client) *GetLogEntryByUUIDParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithEntryUUID adds the entryUUID to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) WithEntryUUID(entryUUID string) *GetLogEntryByUUIDParams {
- o.SetEntryUUID(entryUUID)
- return o
-}
-
-// SetEntryUUID adds the entryUuid to the get log entry by UUID params
-func (o *GetLogEntryByUUIDParams) SetEntryUUID(entryUUID string) {
- o.EntryUUID = entryUUID
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *GetLogEntryByUUIDParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
-
- // path param entryUUID
- if err := r.SetPathParam("entryUUID", o.EntryUUID); err != nil {
- return err
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_uuid_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_uuid_responses.go
deleted file mode 100644
index df6ede589f..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/get_log_entry_by_uuid_responses.go
+++ /dev/null
@@ -1,263 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// GetLogEntryByUUIDReader is a Reader for the GetLogEntryByUUID structure.
-type GetLogEntryByUUIDReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *GetLogEntryByUUIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewGetLogEntryByUUIDOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- case 404:
- result := NewGetLogEntryByUUIDNotFound()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- default:
- result := NewGetLogEntryByUUIDDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewGetLogEntryByUUIDOK creates a GetLogEntryByUUIDOK with default headers values
-func NewGetLogEntryByUUIDOK() *GetLogEntryByUUIDOK {
- return &GetLogEntryByUUIDOK{}
-}
-
-/*
-GetLogEntryByUUIDOK describes a response with status code 200, with default header values.
-
-Information needed for a client to compute the inclusion proof
-*/
-type GetLogEntryByUUIDOK struct {
- Payload models.LogEntry
-}
-
-// IsSuccess returns true when this get log entry by Uuid o k response has a 2xx status code
-func (o *GetLogEntryByUUIDOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this get log entry by Uuid o k response has a 3xx status code
-func (o *GetLogEntryByUUIDOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log entry by Uuid o k response has a 4xx status code
-func (o *GetLogEntryByUUIDOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this get log entry by Uuid o k response has a 5xx status code
-func (o *GetLogEntryByUUIDOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log entry by Uuid o k response a status code equal to that given
-func (o *GetLogEntryByUUIDOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the get log entry by Uuid o k response
-func (o *GetLogEntryByUUIDOK) Code() int {
- return 200
-}
-
-func (o *GetLogEntryByUUIDOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries/{entryUUID}][%d] getLogEntryByUuidOK %s", 200, payload)
-}
-
-func (o *GetLogEntryByUUIDOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries/{entryUUID}][%d] getLogEntryByUuidOK %s", 200, payload)
-}
-
-func (o *GetLogEntryByUUIDOK) GetPayload() models.LogEntry {
- return o.Payload
-}
-
-func (o *GetLogEntryByUUIDOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // response payload
- if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewGetLogEntryByUUIDNotFound creates a GetLogEntryByUUIDNotFound with default headers values
-func NewGetLogEntryByUUIDNotFound() *GetLogEntryByUUIDNotFound {
- return &GetLogEntryByUUIDNotFound{}
-}
-
-/*
-GetLogEntryByUUIDNotFound describes a response with status code 404, with default header values.
-
-The content requested could not be found
-*/
-type GetLogEntryByUUIDNotFound struct {
-}
-
-// IsSuccess returns true when this get log entry by Uuid not found response has a 2xx status code
-func (o *GetLogEntryByUUIDNotFound) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this get log entry by Uuid not found response has a 3xx status code
-func (o *GetLogEntryByUUIDNotFound) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log entry by Uuid not found response has a 4xx status code
-func (o *GetLogEntryByUUIDNotFound) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this get log entry by Uuid not found response has a 5xx status code
-func (o *GetLogEntryByUUIDNotFound) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log entry by Uuid not found response a status code equal to that given
-func (o *GetLogEntryByUUIDNotFound) IsCode(code int) bool {
- return code == 404
-}
-
-// Code gets the status code for the get log entry by Uuid not found response
-func (o *GetLogEntryByUUIDNotFound) Code() int {
- return 404
-}
-
-func (o *GetLogEntryByUUIDNotFound) Error() string {
- return fmt.Sprintf("[GET /api/v1/log/entries/{entryUUID}][%d] getLogEntryByUuidNotFound", 404)
-}
-
-func (o *GetLogEntryByUUIDNotFound) String() string {
- return fmt.Sprintf("[GET /api/v1/log/entries/{entryUUID}][%d] getLogEntryByUuidNotFound", 404)
-}
-
-func (o *GetLogEntryByUUIDNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- return nil
-}
-
-// NewGetLogEntryByUUIDDefault creates a GetLogEntryByUUIDDefault with default headers values
-func NewGetLogEntryByUUIDDefault(code int) *GetLogEntryByUUIDDefault {
- return &GetLogEntryByUUIDDefault{
- _statusCode: code,
- }
-}
-
-/*
-GetLogEntryByUUIDDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type GetLogEntryByUUIDDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this get log entry by UUID default response has a 2xx status code
-func (o *GetLogEntryByUUIDDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this get log entry by UUID default response has a 3xx status code
-func (o *GetLogEntryByUUIDDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this get log entry by UUID default response has a 4xx status code
-func (o *GetLogEntryByUUIDDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this get log entry by UUID default response has a 5xx status code
-func (o *GetLogEntryByUUIDDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this get log entry by UUID default response a status code equal to that given
-func (o *GetLogEntryByUUIDDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the get log entry by UUID default response
-func (o *GetLogEntryByUUIDDefault) Code() int {
- return o._statusCode
-}
-
-func (o *GetLogEntryByUUIDDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries/{entryUUID}][%d] getLogEntryByUUID default %s", o._statusCode, payload)
-}
-
-func (o *GetLogEntryByUUIDDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/entries/{entryUUID}][%d] getLogEntryByUUID default %s", o._statusCode, payload)
-}
-
-func (o *GetLogEntryByUUIDDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *GetLogEntryByUUIDDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/search_log_query_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/search_log_query_parameters.go
deleted file mode 100644
index ed158ce23e..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/search_log_query_parameters.go
+++ /dev/null
@@ -1,166 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// NewSearchLogQueryParams creates a new SearchLogQueryParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewSearchLogQueryParams() *SearchLogQueryParams {
- return &SearchLogQueryParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewSearchLogQueryParamsWithTimeout creates a new SearchLogQueryParams object
-// with the ability to set a timeout on a request.
-func NewSearchLogQueryParamsWithTimeout(timeout time.Duration) *SearchLogQueryParams {
- return &SearchLogQueryParams{
- timeout: timeout,
- }
-}
-
-// NewSearchLogQueryParamsWithContext creates a new SearchLogQueryParams object
-// with the ability to set a context for a request.
-func NewSearchLogQueryParamsWithContext(ctx context.Context) *SearchLogQueryParams {
- return &SearchLogQueryParams{
- Context: ctx,
- }
-}
-
-// NewSearchLogQueryParamsWithHTTPClient creates a new SearchLogQueryParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewSearchLogQueryParamsWithHTTPClient(client *http.Client) *SearchLogQueryParams {
- return &SearchLogQueryParams{
- HTTPClient: client,
- }
-}
-
-/*
-SearchLogQueryParams contains all the parameters to send to the API endpoint
-
- for the search log query operation.
-
- Typically these are written to a http.Request.
-*/
-type SearchLogQueryParams struct {
-
- // Entry.
- Entry *models.SearchLogQuery
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the search log query params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *SearchLogQueryParams) WithDefaults() *SearchLogQueryParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the search log query params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *SearchLogQueryParams) SetDefaults() {
- // no default values defined for this parameter
-}
-
-// WithTimeout adds the timeout to the search log query params
-func (o *SearchLogQueryParams) WithTimeout(timeout time.Duration) *SearchLogQueryParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the search log query params
-func (o *SearchLogQueryParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the search log query params
-func (o *SearchLogQueryParams) WithContext(ctx context.Context) *SearchLogQueryParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the search log query params
-func (o *SearchLogQueryParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the search log query params
-func (o *SearchLogQueryParams) WithHTTPClient(client *http.Client) *SearchLogQueryParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the search log query params
-func (o *SearchLogQueryParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithEntry adds the entry to the search log query params
-func (o *SearchLogQueryParams) WithEntry(entry *models.SearchLogQuery) *SearchLogQueryParams {
- o.SetEntry(entry)
- return o
-}
-
-// SetEntry adds the entry to the search log query params
-func (o *SearchLogQueryParams) SetEntry(entry *models.SearchLogQuery) {
- o.Entry = entry
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *SearchLogQueryParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
- if o.Entry != nil {
- if err := r.SetBodyParam(o.Entry); err != nil {
- return err
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/search_log_query_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/search_log_query_responses.go
deleted file mode 100644
index e064bcdec6..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/entries/search_log_query_responses.go
+++ /dev/null
@@ -1,353 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package entries
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// SearchLogQueryReader is a Reader for the SearchLogQuery structure.
-type SearchLogQueryReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *SearchLogQueryReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewSearchLogQueryOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- case 400:
- result := NewSearchLogQueryBadRequest()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- case 422:
- result := NewSearchLogQueryUnprocessableEntity()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- default:
- result := NewSearchLogQueryDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewSearchLogQueryOK creates a SearchLogQueryOK with default headers values
-func NewSearchLogQueryOK() *SearchLogQueryOK {
- return &SearchLogQueryOK{}
-}
-
-/*
-SearchLogQueryOK describes a response with status code 200, with default header values.
-
-Returns zero or more entries from the transparency log, according to how many were included in request query
-*/
-type SearchLogQueryOK struct {
- Payload []models.LogEntry
-}
-
-// IsSuccess returns true when this search log query o k response has a 2xx status code
-func (o *SearchLogQueryOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this search log query o k response has a 3xx status code
-func (o *SearchLogQueryOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this search log query o k response has a 4xx status code
-func (o *SearchLogQueryOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this search log query o k response has a 5xx status code
-func (o *SearchLogQueryOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this search log query o k response a status code equal to that given
-func (o *SearchLogQueryOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the search log query o k response
-func (o *SearchLogQueryOK) Code() int {
- return 200
-}
-
-func (o *SearchLogQueryOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQueryOK %s", 200, payload)
-}
-
-func (o *SearchLogQueryOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQueryOK %s", 200, payload)
-}
-
-func (o *SearchLogQueryOK) GetPayload() []models.LogEntry {
- return o.Payload
-}
-
-func (o *SearchLogQueryOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // response payload
- if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewSearchLogQueryBadRequest creates a SearchLogQueryBadRequest with default headers values
-func NewSearchLogQueryBadRequest() *SearchLogQueryBadRequest {
- return &SearchLogQueryBadRequest{}
-}
-
-/*
-SearchLogQueryBadRequest describes a response with status code 400, with default header values.
-
-The content supplied to the server was invalid
-*/
-type SearchLogQueryBadRequest struct {
- Payload *models.Error
-}
-
-// IsSuccess returns true when this search log query bad request response has a 2xx status code
-func (o *SearchLogQueryBadRequest) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this search log query bad request response has a 3xx status code
-func (o *SearchLogQueryBadRequest) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this search log query bad request response has a 4xx status code
-func (o *SearchLogQueryBadRequest) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this search log query bad request response has a 5xx status code
-func (o *SearchLogQueryBadRequest) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this search log query bad request response a status code equal to that given
-func (o *SearchLogQueryBadRequest) IsCode(code int) bool {
- return code == 400
-}
-
-// Code gets the status code for the search log query bad request response
-func (o *SearchLogQueryBadRequest) Code() int {
- return 400
-}
-
-func (o *SearchLogQueryBadRequest) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQueryBadRequest %s", 400, payload)
-}
-
-func (o *SearchLogQueryBadRequest) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQueryBadRequest %s", 400, payload)
-}
-
-func (o *SearchLogQueryBadRequest) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *SearchLogQueryBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewSearchLogQueryUnprocessableEntity creates a SearchLogQueryUnprocessableEntity with default headers values
-func NewSearchLogQueryUnprocessableEntity() *SearchLogQueryUnprocessableEntity {
- return &SearchLogQueryUnprocessableEntity{}
-}
-
-/*
-SearchLogQueryUnprocessableEntity describes a response with status code 422, with default header values.
-
-The server understood the request but is unable to process the contained instructions
-*/
-type SearchLogQueryUnprocessableEntity struct {
- Payload *models.Error
-}
-
-// IsSuccess returns true when this search log query unprocessable entity response has a 2xx status code
-func (o *SearchLogQueryUnprocessableEntity) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this search log query unprocessable entity response has a 3xx status code
-func (o *SearchLogQueryUnprocessableEntity) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this search log query unprocessable entity response has a 4xx status code
-func (o *SearchLogQueryUnprocessableEntity) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this search log query unprocessable entity response has a 5xx status code
-func (o *SearchLogQueryUnprocessableEntity) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this search log query unprocessable entity response a status code equal to that given
-func (o *SearchLogQueryUnprocessableEntity) IsCode(code int) bool {
- return code == 422
-}
-
-// Code gets the status code for the search log query unprocessable entity response
-func (o *SearchLogQueryUnprocessableEntity) Code() int {
- return 422
-}
-
-func (o *SearchLogQueryUnprocessableEntity) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQueryUnprocessableEntity %s", 422, payload)
-}
-
-func (o *SearchLogQueryUnprocessableEntity) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQueryUnprocessableEntity %s", 422, payload)
-}
-
-func (o *SearchLogQueryUnprocessableEntity) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *SearchLogQueryUnprocessableEntity) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewSearchLogQueryDefault creates a SearchLogQueryDefault with default headers values
-func NewSearchLogQueryDefault(code int) *SearchLogQueryDefault {
- return &SearchLogQueryDefault{
- _statusCode: code,
- }
-}
-
-/*
-SearchLogQueryDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type SearchLogQueryDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this search log query default response has a 2xx status code
-func (o *SearchLogQueryDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this search log query default response has a 3xx status code
-func (o *SearchLogQueryDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this search log query default response has a 4xx status code
-func (o *SearchLogQueryDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this search log query default response has a 5xx status code
-func (o *SearchLogQueryDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this search log query default response a status code equal to that given
-func (o *SearchLogQueryDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the search log query default response
-func (o *SearchLogQueryDefault) Code() int {
- return o._statusCode
-}
-
-func (o *SearchLogQueryDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQuery default %s", o._statusCode, payload)
-}
-
-func (o *SearchLogQueryDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/log/entries/retrieve][%d] searchLogQuery default %s", o._statusCode, payload)
-}
-
-func (o *SearchLogQueryDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *SearchLogQueryDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/index/index_client.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/index/index_client.go
deleted file mode 100644
index e5262055de..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/index/index_client.go
+++ /dev/null
@@ -1,122 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package index
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "github.com/go-openapi/runtime"
- httptransport "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-)
-
-// New creates a new index API client.
-func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService {
- return &Client{transport: transport, formats: formats}
-}
-
-// New creates a new index API client with basic auth credentials.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - user: user for basic authentication header.
-// - password: password for basic authentication header.
-func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BasicAuth(user, password)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-// New creates a new index API client with a bearer token for authentication.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - bearerToken: bearer token for Bearer authentication header.
-func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BearerToken(bearerToken)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-/*
-Client for index API
-*/
-type Client struct {
- transport runtime.ClientTransport
- formats strfmt.Registry
-}
-
-// ClientOption may be used to customize the behavior of Client methods.
-type ClientOption func(*runtime.ClientOperation)
-
-// ClientService is the interface for Client methods
-type ClientService interface {
- SearchIndex(params *SearchIndexParams, opts ...ClientOption) (*SearchIndexOK, error)
-
- SetTransport(transport runtime.ClientTransport)
-}
-
-/*
- SearchIndex searches index by entry metadata
-
- EXPERIMENTAL - this endpoint is offered as best effort only and may be changed or removed in future releases.
-
-The results returned from this endpoint may be incomplete.
-*/
-func (a *Client) SearchIndex(params *SearchIndexParams, opts ...ClientOption) (*SearchIndexOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewSearchIndexParams()
- }
- op := &runtime.ClientOperation{
- ID: "searchIndex",
- Method: "POST",
- PathPattern: "/api/v1/index/retrieve",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &SearchIndexReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*SearchIndexOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*SearchIndexDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-// SetTransport changes the transport on the client
-func (a *Client) SetTransport(transport runtime.ClientTransport) {
- a.transport = transport
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/index/search_index_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/index/search_index_parameters.go
deleted file mode 100644
index c1694193ef..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/index/search_index_parameters.go
+++ /dev/null
@@ -1,166 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package index
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// NewSearchIndexParams creates a new SearchIndexParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewSearchIndexParams() *SearchIndexParams {
- return &SearchIndexParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewSearchIndexParamsWithTimeout creates a new SearchIndexParams object
-// with the ability to set a timeout on a request.
-func NewSearchIndexParamsWithTimeout(timeout time.Duration) *SearchIndexParams {
- return &SearchIndexParams{
- timeout: timeout,
- }
-}
-
-// NewSearchIndexParamsWithContext creates a new SearchIndexParams object
-// with the ability to set a context for a request.
-func NewSearchIndexParamsWithContext(ctx context.Context) *SearchIndexParams {
- return &SearchIndexParams{
- Context: ctx,
- }
-}
-
-// NewSearchIndexParamsWithHTTPClient creates a new SearchIndexParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewSearchIndexParamsWithHTTPClient(client *http.Client) *SearchIndexParams {
- return &SearchIndexParams{
- HTTPClient: client,
- }
-}
-
-/*
-SearchIndexParams contains all the parameters to send to the API endpoint
-
- for the search index operation.
-
- Typically these are written to a http.Request.
-*/
-type SearchIndexParams struct {
-
- // Query.
- Query *models.SearchIndex
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the search index params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *SearchIndexParams) WithDefaults() *SearchIndexParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the search index params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *SearchIndexParams) SetDefaults() {
- // no default values defined for this parameter
-}
-
-// WithTimeout adds the timeout to the search index params
-func (o *SearchIndexParams) WithTimeout(timeout time.Duration) *SearchIndexParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the search index params
-func (o *SearchIndexParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the search index params
-func (o *SearchIndexParams) WithContext(ctx context.Context) *SearchIndexParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the search index params
-func (o *SearchIndexParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the search index params
-func (o *SearchIndexParams) WithHTTPClient(client *http.Client) *SearchIndexParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the search index params
-func (o *SearchIndexParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithQuery adds the query to the search index params
-func (o *SearchIndexParams) WithQuery(query *models.SearchIndex) *SearchIndexParams {
- o.SetQuery(query)
- return o
-}
-
-// SetQuery adds the query to the search index params
-func (o *SearchIndexParams) SetQuery(query *models.SearchIndex) {
- o.Query = query
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *SearchIndexParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
- if o.Query != nil {
- if err := r.SetBodyParam(o.Query); err != nil {
- return err
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/index/search_index_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/index/search_index_responses.go
deleted file mode 100644
index 0fa2a34db1..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/index/search_index_responses.go
+++ /dev/null
@@ -1,277 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package index
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// SearchIndexReader is a Reader for the SearchIndex structure.
-type SearchIndexReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *SearchIndexReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewSearchIndexOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- case 400:
- result := NewSearchIndexBadRequest()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- default:
- result := NewSearchIndexDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewSearchIndexOK creates a SearchIndexOK with default headers values
-func NewSearchIndexOK() *SearchIndexOK {
- return &SearchIndexOK{}
-}
-
-/*
-SearchIndexOK describes a response with status code 200, with default header values.
-
-Returns zero or more entry UUIDs from the transparency log based on search query
-*/
-type SearchIndexOK struct {
- Payload []string
-}
-
-// IsSuccess returns true when this search index o k response has a 2xx status code
-func (o *SearchIndexOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this search index o k response has a 3xx status code
-func (o *SearchIndexOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this search index o k response has a 4xx status code
-func (o *SearchIndexOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this search index o k response has a 5xx status code
-func (o *SearchIndexOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this search index o k response a status code equal to that given
-func (o *SearchIndexOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the search index o k response
-func (o *SearchIndexOK) Code() int {
- return 200
-}
-
-func (o *SearchIndexOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/index/retrieve][%d] searchIndexOK %s", 200, payload)
-}
-
-func (o *SearchIndexOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/index/retrieve][%d] searchIndexOK %s", 200, payload)
-}
-
-func (o *SearchIndexOK) GetPayload() []string {
- return o.Payload
-}
-
-func (o *SearchIndexOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // response payload
- if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewSearchIndexBadRequest creates a SearchIndexBadRequest with default headers values
-func NewSearchIndexBadRequest() *SearchIndexBadRequest {
- return &SearchIndexBadRequest{}
-}
-
-/*
-SearchIndexBadRequest describes a response with status code 400, with default header values.
-
-The content supplied to the server was invalid
-*/
-type SearchIndexBadRequest struct {
- Payload *models.Error
-}
-
-// IsSuccess returns true when this search index bad request response has a 2xx status code
-func (o *SearchIndexBadRequest) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this search index bad request response has a 3xx status code
-func (o *SearchIndexBadRequest) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this search index bad request response has a 4xx status code
-func (o *SearchIndexBadRequest) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this search index bad request response has a 5xx status code
-func (o *SearchIndexBadRequest) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this search index bad request response a status code equal to that given
-func (o *SearchIndexBadRequest) IsCode(code int) bool {
- return code == 400
-}
-
-// Code gets the status code for the search index bad request response
-func (o *SearchIndexBadRequest) Code() int {
- return 400
-}
-
-func (o *SearchIndexBadRequest) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/index/retrieve][%d] searchIndexBadRequest %s", 400, payload)
-}
-
-func (o *SearchIndexBadRequest) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/index/retrieve][%d] searchIndexBadRequest %s", 400, payload)
-}
-
-func (o *SearchIndexBadRequest) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *SearchIndexBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewSearchIndexDefault creates a SearchIndexDefault with default headers values
-func NewSearchIndexDefault(code int) *SearchIndexDefault {
- return &SearchIndexDefault{
- _statusCode: code,
- }
-}
-
-/*
-SearchIndexDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type SearchIndexDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this search index default response has a 2xx status code
-func (o *SearchIndexDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this search index default response has a 3xx status code
-func (o *SearchIndexDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this search index default response has a 4xx status code
-func (o *SearchIndexDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this search index default response has a 5xx status code
-func (o *SearchIndexDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this search index default response a status code equal to that given
-func (o *SearchIndexDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the search index default response
-func (o *SearchIndexDefault) Code() int {
- return o._statusCode
-}
-
-func (o *SearchIndexDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/index/retrieve][%d] searchIndex default %s", o._statusCode, payload)
-}
-
-func (o *SearchIndexDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[POST /api/v1/index/retrieve][%d] searchIndex default %s", o._statusCode, payload)
-}
-
-func (o *SearchIndexDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *SearchIndexDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/get_public_key_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/get_public_key_parameters.go
deleted file mode 100644
index b4248c933a..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/get_public_key_parameters.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package pubkey
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-)
-
-// NewGetPublicKeyParams creates a new GetPublicKeyParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewGetPublicKeyParams() *GetPublicKeyParams {
- return &GetPublicKeyParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewGetPublicKeyParamsWithTimeout creates a new GetPublicKeyParams object
-// with the ability to set a timeout on a request.
-func NewGetPublicKeyParamsWithTimeout(timeout time.Duration) *GetPublicKeyParams {
- return &GetPublicKeyParams{
- timeout: timeout,
- }
-}
-
-// NewGetPublicKeyParamsWithContext creates a new GetPublicKeyParams object
-// with the ability to set a context for a request.
-func NewGetPublicKeyParamsWithContext(ctx context.Context) *GetPublicKeyParams {
- return &GetPublicKeyParams{
- Context: ctx,
- }
-}
-
-// NewGetPublicKeyParamsWithHTTPClient creates a new GetPublicKeyParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewGetPublicKeyParamsWithHTTPClient(client *http.Client) *GetPublicKeyParams {
- return &GetPublicKeyParams{
- HTTPClient: client,
- }
-}
-
-/*
-GetPublicKeyParams contains all the parameters to send to the API endpoint
-
- for the get public key operation.
-
- Typically these are written to a http.Request.
-*/
-type GetPublicKeyParams struct {
-
- /* TreeID.
-
- The tree ID of the tree you wish to get a public key for
- */
- TreeID *string
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the get public key params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetPublicKeyParams) WithDefaults() *GetPublicKeyParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the get public key params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetPublicKeyParams) SetDefaults() {
- // no default values defined for this parameter
-}
-
-// WithTimeout adds the timeout to the get public key params
-func (o *GetPublicKeyParams) WithTimeout(timeout time.Duration) *GetPublicKeyParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the get public key params
-func (o *GetPublicKeyParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the get public key params
-func (o *GetPublicKeyParams) WithContext(ctx context.Context) *GetPublicKeyParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the get public key params
-func (o *GetPublicKeyParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the get public key params
-func (o *GetPublicKeyParams) WithHTTPClient(client *http.Client) *GetPublicKeyParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the get public key params
-func (o *GetPublicKeyParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithTreeID adds the treeID to the get public key params
-func (o *GetPublicKeyParams) WithTreeID(treeID *string) *GetPublicKeyParams {
- o.SetTreeID(treeID)
- return o
-}
-
-// SetTreeID adds the treeId to the get public key params
-func (o *GetPublicKeyParams) SetTreeID(treeID *string) {
- o.TreeID = treeID
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *GetPublicKeyParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
-
- if o.TreeID != nil {
-
- // query param treeID
- var qrTreeID string
-
- if o.TreeID != nil {
- qrTreeID = *o.TreeID
- }
- qTreeID := qrTreeID
- if qTreeID != "" {
-
- if err := r.SetQueryParam("treeID", qTreeID); err != nil {
- return err
- }
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/get_public_key_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/get_public_key_responses.go
deleted file mode 100644
index c9e33d7bf5..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/get_public_key_responses.go
+++ /dev/null
@@ -1,201 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package pubkey
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// GetPublicKeyReader is a Reader for the GetPublicKey structure.
-type GetPublicKeyReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *GetPublicKeyReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewGetPublicKeyOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- default:
- result := NewGetPublicKeyDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewGetPublicKeyOK creates a GetPublicKeyOK with default headers values
-func NewGetPublicKeyOK() *GetPublicKeyOK {
- return &GetPublicKeyOK{}
-}
-
-/*
-GetPublicKeyOK describes a response with status code 200, with default header values.
-
-The public key
-*/
-type GetPublicKeyOK struct {
- Payload string
-}
-
-// IsSuccess returns true when this get public key o k response has a 2xx status code
-func (o *GetPublicKeyOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this get public key o k response has a 3xx status code
-func (o *GetPublicKeyOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get public key o k response has a 4xx status code
-func (o *GetPublicKeyOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this get public key o k response has a 5xx status code
-func (o *GetPublicKeyOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get public key o k response a status code equal to that given
-func (o *GetPublicKeyOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the get public key o k response
-func (o *GetPublicKeyOK) Code() int {
- return 200
-}
-
-func (o *GetPublicKeyOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/publicKey][%d] getPublicKeyOK %s", 200, payload)
-}
-
-func (o *GetPublicKeyOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/publicKey][%d] getPublicKeyOK %s", 200, payload)
-}
-
-func (o *GetPublicKeyOK) GetPayload() string {
- return o.Payload
-}
-
-func (o *GetPublicKeyOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- // response payload
- if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewGetPublicKeyDefault creates a GetPublicKeyDefault with default headers values
-func NewGetPublicKeyDefault(code int) *GetPublicKeyDefault {
- return &GetPublicKeyDefault{
- _statusCode: code,
- }
-}
-
-/*
-GetPublicKeyDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type GetPublicKeyDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this get public key default response has a 2xx status code
-func (o *GetPublicKeyDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this get public key default response has a 3xx status code
-func (o *GetPublicKeyDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this get public key default response has a 4xx status code
-func (o *GetPublicKeyDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this get public key default response has a 5xx status code
-func (o *GetPublicKeyDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this get public key default response a status code equal to that given
-func (o *GetPublicKeyDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the get public key default response
-func (o *GetPublicKeyDefault) Code() int {
- return o._statusCode
-}
-
-func (o *GetPublicKeyDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/publicKey][%d] getPublicKey default %s", o._statusCode, payload)
-}
-
-func (o *GetPublicKeyDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/publicKey][%d] getPublicKey default %s", o._statusCode, payload)
-}
-
-func (o *GetPublicKeyDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *GetPublicKeyDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/pubkey_client.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/pubkey_client.go
deleted file mode 100644
index 64b5222646..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/pubkey/pubkey_client.go
+++ /dev/null
@@ -1,144 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package pubkey
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "github.com/go-openapi/runtime"
- httptransport "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-)
-
-// New creates a new pubkey API client.
-func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService {
- return &Client{transport: transport, formats: formats}
-}
-
-// New creates a new pubkey API client with basic auth credentials.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - user: user for basic authentication header.
-// - password: password for basic authentication header.
-func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BasicAuth(user, password)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-// New creates a new pubkey API client with a bearer token for authentication.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - bearerToken: bearer token for Bearer authentication header.
-func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BearerToken(bearerToken)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-/*
-Client for pubkey API
-*/
-type Client struct {
- transport runtime.ClientTransport
- formats strfmt.Registry
-}
-
-// ClientOption may be used to customize the behavior of Client methods.
-type ClientOption func(*runtime.ClientOperation)
-
-// This client is generated with a few options you might find useful for your swagger spec.
-//
-// Feel free to add you own set of options.
-
-// WithAccept allows the client to force the Accept header
-// to negotiate a specific Producer from the server.
-//
-// You may use this option to set arbitrary extensions to your MIME media type.
-func WithAccept(mime string) ClientOption {
- return func(r *runtime.ClientOperation) {
- r.ProducesMediaTypes = []string{mime}
- }
-}
-
-// WithAcceptApplicationJSON sets the Accept header to "application/json".
-func WithAcceptApplicationJSON(r *runtime.ClientOperation) {
- r.ProducesMediaTypes = []string{"application/json"}
-}
-
-// WithAcceptApplicationxPemFile sets the Accept header to "application/x-pem-file".
-func WithAcceptApplicationxPemFile(r *runtime.ClientOperation) {
- r.ProducesMediaTypes = []string{"application/x-pem-file"}
-}
-
-// ClientService is the interface for Client methods
-type ClientService interface {
- GetPublicKey(params *GetPublicKeyParams, opts ...ClientOption) (*GetPublicKeyOK, error)
-
- SetTransport(transport runtime.ClientTransport)
-}
-
-/*
-GetPublicKey retrieves the public key that can be used to validate the signed tree head
-
-Returns the public key that can be used to validate the signed tree head
-*/
-func (a *Client) GetPublicKey(params *GetPublicKeyParams, opts ...ClientOption) (*GetPublicKeyOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewGetPublicKeyParams()
- }
- op := &runtime.ClientOperation{
- ID: "getPublicKey",
- Method: "GET",
- PathPattern: "/api/v1/log/publicKey",
- ProducesMediaTypes: []string{"application/x-pem-file"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &GetPublicKeyReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*GetPublicKeyOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*GetPublicKeyDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-// SetTransport changes the transport on the client
-func (a *Client) SetTransport(transport runtime.ClientTransport) {
- a.transport = transport
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/rekor_client.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/rekor_client.go
deleted file mode 100644
index bee3811184..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/rekor_client.go
+++ /dev/null
@@ -1,143 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package client
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "github.com/go-openapi/runtime"
- httptransport "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/client/entries"
- "github.com/sigstore/rekor/pkg/generated/client/index"
- "github.com/sigstore/rekor/pkg/generated/client/pubkey"
- "github.com/sigstore/rekor/pkg/generated/client/tlog"
-)
-
-// Default rekor HTTP client.
-var Default = NewHTTPClient(nil)
-
-const (
- // DefaultHost is the default Host
- // found in Meta (info) section of spec file
- DefaultHost string = "rekor.sigstore.dev"
- // DefaultBasePath is the default BasePath
- // found in Meta (info) section of spec file
- DefaultBasePath string = "/"
-)
-
-// DefaultSchemes are the default schemes found in Meta (info) section of spec file
-var DefaultSchemes = []string{"http"}
-
-// NewHTTPClient creates a new rekor HTTP client.
-func NewHTTPClient(formats strfmt.Registry) *Rekor {
- return NewHTTPClientWithConfig(formats, nil)
-}
-
-// NewHTTPClientWithConfig creates a new rekor HTTP client,
-// using a customizable transport config.
-func NewHTTPClientWithConfig(formats strfmt.Registry, cfg *TransportConfig) *Rekor {
- // ensure nullable parameters have default
- if cfg == nil {
- cfg = DefaultTransportConfig()
- }
-
- // create transport and client
- transport := httptransport.New(cfg.Host, cfg.BasePath, cfg.Schemes)
- return New(transport, formats)
-}
-
-// New creates a new rekor client
-func New(transport runtime.ClientTransport, formats strfmt.Registry) *Rekor {
- // ensure nullable parameters have default
- if formats == nil {
- formats = strfmt.Default
- }
-
- cli := new(Rekor)
- cli.Transport = transport
- cli.Entries = entries.New(transport, formats)
- cli.Index = index.New(transport, formats)
- cli.Pubkey = pubkey.New(transport, formats)
- cli.Tlog = tlog.New(transport, formats)
- return cli
-}
-
-// DefaultTransportConfig creates a TransportConfig with the
-// default settings taken from the meta section of the spec file.
-func DefaultTransportConfig() *TransportConfig {
- return &TransportConfig{
- Host: DefaultHost,
- BasePath: DefaultBasePath,
- Schemes: DefaultSchemes,
- }
-}
-
-// TransportConfig contains the transport related info,
-// found in the meta section of the spec file.
-type TransportConfig struct {
- Host string
- BasePath string
- Schemes []string
-}
-
-// WithHost overrides the default host,
-// provided by the meta section of the spec file.
-func (cfg *TransportConfig) WithHost(host string) *TransportConfig {
- cfg.Host = host
- return cfg
-}
-
-// WithBasePath overrides the default basePath,
-// provided by the meta section of the spec file.
-func (cfg *TransportConfig) WithBasePath(basePath string) *TransportConfig {
- cfg.BasePath = basePath
- return cfg
-}
-
-// WithSchemes overrides the default schemes,
-// provided by the meta section of the spec file.
-func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig {
- cfg.Schemes = schemes
- return cfg
-}
-
-// Rekor is a client for rekor
-type Rekor struct {
- Entries entries.ClientService
-
- Index index.ClientService
-
- Pubkey pubkey.ClientService
-
- Tlog tlog.ClientService
-
- Transport runtime.ClientTransport
-}
-
-// SetTransport changes the transport on the client and all its subresources
-func (c *Rekor) SetTransport(transport runtime.ClientTransport) {
- c.Transport = transport
- c.Entries.SetTransport(transport)
- c.Index.SetTransport(transport)
- c.Pubkey.SetTransport(transport)
- c.Tlog.SetTransport(transport)
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_info_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_info_parameters.go
deleted file mode 100644
index b2e329427c..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_info_parameters.go
+++ /dev/null
@@ -1,191 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package tlog
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// NewGetLogInfoParams creates a new GetLogInfoParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewGetLogInfoParams() *GetLogInfoParams {
- return &GetLogInfoParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewGetLogInfoParamsWithTimeout creates a new GetLogInfoParams object
-// with the ability to set a timeout on a request.
-func NewGetLogInfoParamsWithTimeout(timeout time.Duration) *GetLogInfoParams {
- return &GetLogInfoParams{
- timeout: timeout,
- }
-}
-
-// NewGetLogInfoParamsWithContext creates a new GetLogInfoParams object
-// with the ability to set a context for a request.
-func NewGetLogInfoParamsWithContext(ctx context.Context) *GetLogInfoParams {
- return &GetLogInfoParams{
- Context: ctx,
- }
-}
-
-// NewGetLogInfoParamsWithHTTPClient creates a new GetLogInfoParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewGetLogInfoParamsWithHTTPClient(client *http.Client) *GetLogInfoParams {
- return &GetLogInfoParams{
- HTTPClient: client,
- }
-}
-
-/*
-GetLogInfoParams contains all the parameters to send to the API endpoint
-
- for the get log info operation.
-
- Typically these are written to a http.Request.
-*/
-type GetLogInfoParams struct {
-
- /* Stable.
-
- Whether to return a stable checkpoint for the active shard
- */
- Stable *bool
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the get log info params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogInfoParams) WithDefaults() *GetLogInfoParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the get log info params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogInfoParams) SetDefaults() {
- var (
- stableDefault = bool(false)
- )
-
- val := GetLogInfoParams{
- Stable: &stableDefault,
- }
-
- val.timeout = o.timeout
- val.Context = o.Context
- val.HTTPClient = o.HTTPClient
- *o = val
-}
-
-// WithTimeout adds the timeout to the get log info params
-func (o *GetLogInfoParams) WithTimeout(timeout time.Duration) *GetLogInfoParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the get log info params
-func (o *GetLogInfoParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the get log info params
-func (o *GetLogInfoParams) WithContext(ctx context.Context) *GetLogInfoParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the get log info params
-func (o *GetLogInfoParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the get log info params
-func (o *GetLogInfoParams) WithHTTPClient(client *http.Client) *GetLogInfoParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the get log info params
-func (o *GetLogInfoParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithStable adds the stable to the get log info params
-func (o *GetLogInfoParams) WithStable(stable *bool) *GetLogInfoParams {
- o.SetStable(stable)
- return o
-}
-
-// SetStable adds the stable to the get log info params
-func (o *GetLogInfoParams) SetStable(stable *bool) {
- o.Stable = stable
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *GetLogInfoParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
-
- if o.Stable != nil {
-
- // query param stable
- var qrStable bool
-
- if o.Stable != nil {
- qrStable = *o.Stable
- }
- qStable := swag.FormatBool(qrStable)
- if qStable != "" {
-
- if err := r.SetQueryParam("stable", qStable); err != nil {
- return err
- }
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_info_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_info_responses.go
deleted file mode 100644
index a43ac75203..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_info_responses.go
+++ /dev/null
@@ -1,203 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package tlog
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// GetLogInfoReader is a Reader for the GetLogInfo structure.
-type GetLogInfoReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *GetLogInfoReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewGetLogInfoOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- default:
- result := NewGetLogInfoDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewGetLogInfoOK creates a GetLogInfoOK with default headers values
-func NewGetLogInfoOK() *GetLogInfoOK {
- return &GetLogInfoOK{}
-}
-
-/*
-GetLogInfoOK describes a response with status code 200, with default header values.
-
-A JSON object with the root hash and tree size as properties
-*/
-type GetLogInfoOK struct {
- Payload *models.LogInfo
-}
-
-// IsSuccess returns true when this get log info o k response has a 2xx status code
-func (o *GetLogInfoOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this get log info o k response has a 3xx status code
-func (o *GetLogInfoOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log info o k response has a 4xx status code
-func (o *GetLogInfoOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this get log info o k response has a 5xx status code
-func (o *GetLogInfoOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log info o k response a status code equal to that given
-func (o *GetLogInfoOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the get log info o k response
-func (o *GetLogInfoOK) Code() int {
- return 200
-}
-
-func (o *GetLogInfoOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log][%d] getLogInfoOK %s", 200, payload)
-}
-
-func (o *GetLogInfoOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log][%d] getLogInfoOK %s", 200, payload)
-}
-
-func (o *GetLogInfoOK) GetPayload() *models.LogInfo {
- return o.Payload
-}
-
-func (o *GetLogInfoOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.LogInfo)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewGetLogInfoDefault creates a GetLogInfoDefault with default headers values
-func NewGetLogInfoDefault(code int) *GetLogInfoDefault {
- return &GetLogInfoDefault{
- _statusCode: code,
- }
-}
-
-/*
-GetLogInfoDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type GetLogInfoDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this get log info default response has a 2xx status code
-func (o *GetLogInfoDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this get log info default response has a 3xx status code
-func (o *GetLogInfoDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this get log info default response has a 4xx status code
-func (o *GetLogInfoDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this get log info default response has a 5xx status code
-func (o *GetLogInfoDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this get log info default response a status code equal to that given
-func (o *GetLogInfoDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the get log info default response
-func (o *GetLogInfoDefault) Code() int {
- return o._statusCode
-}
-
-func (o *GetLogInfoDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log][%d] getLogInfo default %s", o._statusCode, payload)
-}
-
-func (o *GetLogInfoDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log][%d] getLogInfo default %s", o._statusCode, payload)
-}
-
-func (o *GetLogInfoDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *GetLogInfoDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_proof_parameters.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_proof_parameters.go
deleted file mode 100644
index 2b21ad887c..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_proof_parameters.go
+++ /dev/null
@@ -1,255 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package tlog
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "net/http"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- cr "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// NewGetLogProofParams creates a new GetLogProofParams object,
-// with the default timeout for this client.
-//
-// Default values are not hydrated, since defaults are normally applied by the API server side.
-//
-// To enforce default values in parameter, use SetDefaults or WithDefaults.
-func NewGetLogProofParams() *GetLogProofParams {
- return &GetLogProofParams{
- timeout: cr.DefaultTimeout,
- }
-}
-
-// NewGetLogProofParamsWithTimeout creates a new GetLogProofParams object
-// with the ability to set a timeout on a request.
-func NewGetLogProofParamsWithTimeout(timeout time.Duration) *GetLogProofParams {
- return &GetLogProofParams{
- timeout: timeout,
- }
-}
-
-// NewGetLogProofParamsWithContext creates a new GetLogProofParams object
-// with the ability to set a context for a request.
-func NewGetLogProofParamsWithContext(ctx context.Context) *GetLogProofParams {
- return &GetLogProofParams{
- Context: ctx,
- }
-}
-
-// NewGetLogProofParamsWithHTTPClient creates a new GetLogProofParams object
-// with the ability to set a custom HTTPClient for a request.
-func NewGetLogProofParamsWithHTTPClient(client *http.Client) *GetLogProofParams {
- return &GetLogProofParams{
- HTTPClient: client,
- }
-}
-
-/*
-GetLogProofParams contains all the parameters to send to the API endpoint
-
- for the get log proof operation.
-
- Typically these are written to a http.Request.
-*/
-type GetLogProofParams struct {
-
- /* FirstSize.
-
- The size of the tree that you wish to prove consistency from (1 means the beginning of the log) Defaults to 1 if not specified
-
-
- Default: 1
- */
- FirstSize *int64
-
- /* LastSize.
-
- The size of the tree that you wish to prove consistency to
- */
- LastSize int64
-
- /* TreeID.
-
- The tree ID of the tree that you wish to prove consistency for
- */
- TreeID *string
-
- timeout time.Duration
- Context context.Context
- HTTPClient *http.Client
-}
-
-// WithDefaults hydrates default values in the get log proof params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogProofParams) WithDefaults() *GetLogProofParams {
- o.SetDefaults()
- return o
-}
-
-// SetDefaults hydrates default values in the get log proof params (not the query body).
-//
-// All values with no default are reset to their zero value.
-func (o *GetLogProofParams) SetDefaults() {
- var (
- firstSizeDefault = int64(1)
- )
-
- val := GetLogProofParams{
- FirstSize: &firstSizeDefault,
- }
-
- val.timeout = o.timeout
- val.Context = o.Context
- val.HTTPClient = o.HTTPClient
- *o = val
-}
-
-// WithTimeout adds the timeout to the get log proof params
-func (o *GetLogProofParams) WithTimeout(timeout time.Duration) *GetLogProofParams {
- o.SetTimeout(timeout)
- return o
-}
-
-// SetTimeout adds the timeout to the get log proof params
-func (o *GetLogProofParams) SetTimeout(timeout time.Duration) {
- o.timeout = timeout
-}
-
-// WithContext adds the context to the get log proof params
-func (o *GetLogProofParams) WithContext(ctx context.Context) *GetLogProofParams {
- o.SetContext(ctx)
- return o
-}
-
-// SetContext adds the context to the get log proof params
-func (o *GetLogProofParams) SetContext(ctx context.Context) {
- o.Context = ctx
-}
-
-// WithHTTPClient adds the HTTPClient to the get log proof params
-func (o *GetLogProofParams) WithHTTPClient(client *http.Client) *GetLogProofParams {
- o.SetHTTPClient(client)
- return o
-}
-
-// SetHTTPClient adds the HTTPClient to the get log proof params
-func (o *GetLogProofParams) SetHTTPClient(client *http.Client) {
- o.HTTPClient = client
-}
-
-// WithFirstSize adds the firstSize to the get log proof params
-func (o *GetLogProofParams) WithFirstSize(firstSize *int64) *GetLogProofParams {
- o.SetFirstSize(firstSize)
- return o
-}
-
-// SetFirstSize adds the firstSize to the get log proof params
-func (o *GetLogProofParams) SetFirstSize(firstSize *int64) {
- o.FirstSize = firstSize
-}
-
-// WithLastSize adds the lastSize to the get log proof params
-func (o *GetLogProofParams) WithLastSize(lastSize int64) *GetLogProofParams {
- o.SetLastSize(lastSize)
- return o
-}
-
-// SetLastSize adds the lastSize to the get log proof params
-func (o *GetLogProofParams) SetLastSize(lastSize int64) {
- o.LastSize = lastSize
-}
-
-// WithTreeID adds the treeID to the get log proof params
-func (o *GetLogProofParams) WithTreeID(treeID *string) *GetLogProofParams {
- o.SetTreeID(treeID)
- return o
-}
-
-// SetTreeID adds the treeId to the get log proof params
-func (o *GetLogProofParams) SetTreeID(treeID *string) {
- o.TreeID = treeID
-}
-
-// WriteToRequest writes these params to a swagger request
-func (o *GetLogProofParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
-
- if err := r.SetTimeout(o.timeout); err != nil {
- return err
- }
- var res []error
-
- if o.FirstSize != nil {
-
- // query param firstSize
- var qrFirstSize int64
-
- if o.FirstSize != nil {
- qrFirstSize = *o.FirstSize
- }
- qFirstSize := swag.FormatInt64(qrFirstSize)
- if qFirstSize != "" {
-
- if err := r.SetQueryParam("firstSize", qFirstSize); err != nil {
- return err
- }
- }
- }
-
- // query param lastSize
- qrLastSize := o.LastSize
- qLastSize := swag.FormatInt64(qrLastSize)
- if qLastSize != "" {
-
- if err := r.SetQueryParam("lastSize", qLastSize); err != nil {
- return err
- }
- }
-
- if o.TreeID != nil {
-
- // query param treeID
- var qrTreeID string
-
- if o.TreeID != nil {
- qrTreeID = *o.TreeID
- }
- qTreeID := qrTreeID
- if qTreeID != "" {
-
- if err := r.SetQueryParam("treeID", qTreeID); err != nil {
- return err
- }
- }
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_proof_responses.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_proof_responses.go
deleted file mode 100644
index 9dc9d5854b..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/get_log_proof_responses.go
+++ /dev/null
@@ -1,279 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package tlog
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "encoding/json"
- "fmt"
- "io"
-
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
-
- "github.com/sigstore/rekor/pkg/generated/models"
-)
-
-// GetLogProofReader is a Reader for the GetLogProof structure.
-type GetLogProofReader struct {
- formats strfmt.Registry
-}
-
-// ReadResponse reads a server response into the received o.
-func (o *GetLogProofReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
- switch response.Code() {
- case 200:
- result := NewGetLogProofOK()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return result, nil
- case 400:
- result := NewGetLogProofBadRequest()
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- return nil, result
- default:
- result := NewGetLogProofDefault(response.Code())
- if err := result.readResponse(response, consumer, o.formats); err != nil {
- return nil, err
- }
- if response.Code()/100 == 2 {
- return result, nil
- }
- return nil, result
- }
-}
-
-// NewGetLogProofOK creates a GetLogProofOK with default headers values
-func NewGetLogProofOK() *GetLogProofOK {
- return &GetLogProofOK{}
-}
-
-/*
-GetLogProofOK describes a response with status code 200, with default header values.
-
-All hashes required to compute the consistency proof
-*/
-type GetLogProofOK struct {
- Payload *models.ConsistencyProof
-}
-
-// IsSuccess returns true when this get log proof o k response has a 2xx status code
-func (o *GetLogProofOK) IsSuccess() bool {
- return true
-}
-
-// IsRedirect returns true when this get log proof o k response has a 3xx status code
-func (o *GetLogProofOK) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log proof o k response has a 4xx status code
-func (o *GetLogProofOK) IsClientError() bool {
- return false
-}
-
-// IsServerError returns true when this get log proof o k response has a 5xx status code
-func (o *GetLogProofOK) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log proof o k response a status code equal to that given
-func (o *GetLogProofOK) IsCode(code int) bool {
- return code == 200
-}
-
-// Code gets the status code for the get log proof o k response
-func (o *GetLogProofOK) Code() int {
- return 200
-}
-
-func (o *GetLogProofOK) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/proof][%d] getLogProofOK %s", 200, payload)
-}
-
-func (o *GetLogProofOK) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/proof][%d] getLogProofOK %s", 200, payload)
-}
-
-func (o *GetLogProofOK) GetPayload() *models.ConsistencyProof {
- return o.Payload
-}
-
-func (o *GetLogProofOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.ConsistencyProof)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewGetLogProofBadRequest creates a GetLogProofBadRequest with default headers values
-func NewGetLogProofBadRequest() *GetLogProofBadRequest {
- return &GetLogProofBadRequest{}
-}
-
-/*
-GetLogProofBadRequest describes a response with status code 400, with default header values.
-
-The content supplied to the server was invalid
-*/
-type GetLogProofBadRequest struct {
- Payload *models.Error
-}
-
-// IsSuccess returns true when this get log proof bad request response has a 2xx status code
-func (o *GetLogProofBadRequest) IsSuccess() bool {
- return false
-}
-
-// IsRedirect returns true when this get log proof bad request response has a 3xx status code
-func (o *GetLogProofBadRequest) IsRedirect() bool {
- return false
-}
-
-// IsClientError returns true when this get log proof bad request response has a 4xx status code
-func (o *GetLogProofBadRequest) IsClientError() bool {
- return true
-}
-
-// IsServerError returns true when this get log proof bad request response has a 5xx status code
-func (o *GetLogProofBadRequest) IsServerError() bool {
- return false
-}
-
-// IsCode returns true when this get log proof bad request response a status code equal to that given
-func (o *GetLogProofBadRequest) IsCode(code int) bool {
- return code == 400
-}
-
-// Code gets the status code for the get log proof bad request response
-func (o *GetLogProofBadRequest) Code() int {
- return 400
-}
-
-func (o *GetLogProofBadRequest) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/proof][%d] getLogProofBadRequest %s", 400, payload)
-}
-
-func (o *GetLogProofBadRequest) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/proof][%d] getLogProofBadRequest %s", 400, payload)
-}
-
-func (o *GetLogProofBadRequest) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *GetLogProofBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
-
-// NewGetLogProofDefault creates a GetLogProofDefault with default headers values
-func NewGetLogProofDefault(code int) *GetLogProofDefault {
- return &GetLogProofDefault{
- _statusCode: code,
- }
-}
-
-/*
-GetLogProofDefault describes a response with status code -1, with default header values.
-
-There was an internal error in the server while processing the request
-*/
-type GetLogProofDefault struct {
- _statusCode int
-
- Payload *models.Error
-}
-
-// IsSuccess returns true when this get log proof default response has a 2xx status code
-func (o *GetLogProofDefault) IsSuccess() bool {
- return o._statusCode/100 == 2
-}
-
-// IsRedirect returns true when this get log proof default response has a 3xx status code
-func (o *GetLogProofDefault) IsRedirect() bool {
- return o._statusCode/100 == 3
-}
-
-// IsClientError returns true when this get log proof default response has a 4xx status code
-func (o *GetLogProofDefault) IsClientError() bool {
- return o._statusCode/100 == 4
-}
-
-// IsServerError returns true when this get log proof default response has a 5xx status code
-func (o *GetLogProofDefault) IsServerError() bool {
- return o._statusCode/100 == 5
-}
-
-// IsCode returns true when this get log proof default response a status code equal to that given
-func (o *GetLogProofDefault) IsCode(code int) bool {
- return o._statusCode == code
-}
-
-// Code gets the status code for the get log proof default response
-func (o *GetLogProofDefault) Code() int {
- return o._statusCode
-}
-
-func (o *GetLogProofDefault) Error() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/proof][%d] getLogProof default %s", o._statusCode, payload)
-}
-
-func (o *GetLogProofDefault) String() string {
- payload, _ := json.Marshal(o.Payload)
- return fmt.Sprintf("[GET /api/v1/log/proof][%d] getLogProof default %s", o._statusCode, payload)
-}
-
-func (o *GetLogProofDefault) GetPayload() *models.Error {
- return o.Payload
-}
-
-func (o *GetLogProofDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
-
- o.Payload = new(models.Error)
-
- // response payload
- if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
- return err
- }
-
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/tlog_client.go b/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/tlog_client.go
deleted file mode 100644
index 9befb5c90d..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/client/tlog/tlog_client.go
+++ /dev/null
@@ -1,161 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package tlog
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "github.com/go-openapi/runtime"
- httptransport "github.com/go-openapi/runtime/client"
- "github.com/go-openapi/strfmt"
-)
-
-// New creates a new tlog API client.
-func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService {
- return &Client{transport: transport, formats: formats}
-}
-
-// New creates a new tlog API client with basic auth credentials.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - user: user for basic authentication header.
-// - password: password for basic authentication header.
-func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BasicAuth(user, password)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-// New creates a new tlog API client with a bearer token for authentication.
-// It takes the following parameters:
-// - host: http host (github.com).
-// - basePath: any base path for the API client ("/v1", "/v3").
-// - scheme: http scheme ("http", "https").
-// - bearerToken: bearer token for Bearer authentication header.
-func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService {
- transport := httptransport.New(host, basePath, []string{scheme})
- transport.DefaultAuthentication = httptransport.BearerToken(bearerToken)
- return &Client{transport: transport, formats: strfmt.Default}
-}
-
-/*
-Client for tlog API
-*/
-type Client struct {
- transport runtime.ClientTransport
- formats strfmt.Registry
-}
-
-// ClientOption may be used to customize the behavior of Client methods.
-type ClientOption func(*runtime.ClientOperation)
-
-// ClientService is the interface for Client methods
-type ClientService interface {
- GetLogInfo(params *GetLogInfoParams, opts ...ClientOption) (*GetLogInfoOK, error)
-
- GetLogProof(params *GetLogProofParams, opts ...ClientOption) (*GetLogProofOK, error)
-
- SetTransport(transport runtime.ClientTransport)
-}
-
-/*
-GetLogInfo gets information about the current state of the transparency log
-
-Returns the current root hash and size of the merkle tree used to store the log entries.
-*/
-func (a *Client) GetLogInfo(params *GetLogInfoParams, opts ...ClientOption) (*GetLogInfoOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewGetLogInfoParams()
- }
- op := &runtime.ClientOperation{
- ID: "getLogInfo",
- Method: "GET",
- PathPattern: "/api/v1/log",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &GetLogInfoReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*GetLogInfoOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*GetLogInfoDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-/*
-GetLogProof gets information required to generate a consistency proof for the transparency log
-
-Returns a list of hashes for specified tree sizes that can be used to confirm the consistency of the transparency log
-*/
-func (a *Client) GetLogProof(params *GetLogProofParams, opts ...ClientOption) (*GetLogProofOK, error) {
- // TODO: Validate the params before sending
- if params == nil {
- params = NewGetLogProofParams()
- }
- op := &runtime.ClientOperation{
- ID: "getLogProof",
- Method: "GET",
- PathPattern: "/api/v1/log/proof",
- ProducesMediaTypes: []string{"application/json"},
- ConsumesMediaTypes: []string{"application/json"},
- Schemes: []string{"http"},
- Params: params,
- Reader: &GetLogProofReader{formats: a.formats},
- Context: params.Context,
- Client: params.HTTPClient,
- }
- for _, opt := range opts {
- opt(op)
- }
-
- result, err := a.transport.Submit(op)
- if err != nil {
- return nil, err
- }
- success, ok := result.(*GetLogProofOK)
- if ok {
- return success, nil
- }
- // unexpected success response
- unexpectedSuccess := result.(*GetLogProofDefault)
- return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
-}
-
-// SetTransport changes the transport on the client
-func (a *Client) SetTransport(transport runtime.ClientTransport) {
- a.transport = transport
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine.go
deleted file mode 100644
index 5607679fdf..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Alpine Alpine package
-//
-// swagger:model alpine
-type Alpine struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec AlpineSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Alpine) Kind() string {
- return "alpine"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Alpine) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Alpine) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec AlpineSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Alpine
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Alpine) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec AlpineSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this alpine
-func (m *Alpine) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Alpine) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Alpine) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this alpine based on the context it is used
-func (m *Alpine) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Alpine) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Alpine) UnmarshalBinary(b []byte) error {
- var res Alpine
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine_schema.go
deleted file mode 100644
index edd25408bb..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// AlpineSchema Alpine Package Schema
-//
-// # Schema for Alpine package objects
-//
-// swagger:model alpineSchema
-type AlpineSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine_v001_schema.go
deleted file mode 100644
index a239c84faa..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/alpine_v001_schema.go
+++ /dev/null
@@ -1,455 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// AlpineV001Schema Alpine v0.0.1 Schema
-//
-// # Schema for Alpine Package entries
-//
-// swagger:model alpineV001Schema
-type AlpineV001Schema struct {
-
- // package
- // Required: true
- Package *AlpineV001SchemaPackage `json:"package"`
-
- // public key
- // Required: true
- PublicKey *AlpineV001SchemaPublicKey `json:"publicKey"`
-}
-
-// Validate validates this alpine v001 schema
-func (m *AlpineV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validatePackage(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *AlpineV001Schema) validatePackage(formats strfmt.Registry) error {
-
- if err := validate.Required("package", "body", m.Package); err != nil {
- return err
- }
-
- if m.Package != nil {
- if err := m.Package.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *AlpineV001Schema) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this alpine v001 schema based on the context it is used
-func (m *AlpineV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidatePackage(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *AlpineV001Schema) contextValidatePackage(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Package != nil {
-
- if err := m.Package.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *AlpineV001Schema) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *AlpineV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *AlpineV001Schema) UnmarshalBinary(b []byte) error {
- var res AlpineV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// AlpineV001SchemaPackage Information about the package associated with the entry
-//
-// swagger:model AlpineV001SchemaPackage
-type AlpineV001SchemaPackage struct {
-
- // Specifies the package inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // hash
- Hash *AlpineV001SchemaPackageHash `json:"hash,omitempty"`
-
- // Values of the .PKGINFO key / value pairs
- // Read Only: true
- Pkginfo map[string]string `json:"pkginfo,omitempty"`
-}
-
-// Validate validates this alpine v001 schema package
-func (m *AlpineV001SchemaPackage) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *AlpineV001SchemaPackage) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this alpine v001 schema package based on the context it is used
-func (m *AlpineV001SchemaPackage) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePkginfo(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *AlpineV001SchemaPackage) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *AlpineV001SchemaPackage) contextValidatePkginfo(ctx context.Context, formats strfmt.Registry) error {
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *AlpineV001SchemaPackage) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *AlpineV001SchemaPackage) UnmarshalBinary(b []byte) error {
- var res AlpineV001SchemaPackage
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// AlpineV001SchemaPackageHash Specifies the hash algorithm and value for the package
-//
-// swagger:model AlpineV001SchemaPackageHash
-type AlpineV001SchemaPackageHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the package
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this alpine v001 schema package hash
-func (m *AlpineV001SchemaPackageHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var alpineV001SchemaPackageHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- alpineV001SchemaPackageHashTypeAlgorithmPropEnum = append(alpineV001SchemaPackageHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // AlpineV001SchemaPackageHashAlgorithmSha256 captures enum value "sha256"
- AlpineV001SchemaPackageHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *AlpineV001SchemaPackageHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, alpineV001SchemaPackageHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *AlpineV001SchemaPackageHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("package"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("package"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *AlpineV001SchemaPackageHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("package"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this alpine v001 schema package hash based on the context it is used
-func (m *AlpineV001SchemaPackageHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *AlpineV001SchemaPackageHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *AlpineV001SchemaPackageHash) UnmarshalBinary(b []byte) error {
- var res AlpineV001SchemaPackageHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// AlpineV001SchemaPublicKey The public key that can verify the package signature
-//
-// swagger:model AlpineV001SchemaPublicKey
-type AlpineV001SchemaPublicKey struct {
-
- // Specifies the content of the public key inline within the document
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this alpine v001 schema public key
-func (m *AlpineV001SchemaPublicKey) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *AlpineV001SchemaPublicKey) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this alpine v001 schema public key based on context it is used
-func (m *AlpineV001SchemaPublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *AlpineV001SchemaPublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *AlpineV001SchemaPublicKey) UnmarshalBinary(b []byte) error {
- var res AlpineV001SchemaPublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/consistency_proof.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/consistency_proof.go
deleted file mode 100644
index 804ddd11a9..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/consistency_proof.go
+++ /dev/null
@@ -1,118 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// ConsistencyProof consistency proof
-//
-// swagger:model ConsistencyProof
-type ConsistencyProof struct {
-
- // hashes
- // Required: true
- Hashes []string `json:"hashes"`
-
- // The hash value stored at the root of the merkle tree at the time the proof was generated
- // Required: true
- // Pattern: ^[0-9a-fA-F]{64}$
- RootHash *string `json:"rootHash"`
-}
-
-// Validate validates this consistency proof
-func (m *ConsistencyProof) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHashes(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateRootHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *ConsistencyProof) validateHashes(formats strfmt.Registry) error {
-
- if err := validate.Required("hashes", "body", m.Hashes); err != nil {
- return err
- }
-
- for i := 0; i < len(m.Hashes); i++ {
-
- if err := validate.Pattern("hashes"+"."+strconv.Itoa(i), "body", m.Hashes[i], `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- }
-
- return nil
-}
-
-func (m *ConsistencyProof) validateRootHash(formats strfmt.Registry) error {
-
- if err := validate.Required("rootHash", "body", m.RootHash); err != nil {
- return err
- }
-
- if err := validate.Pattern("rootHash", "body", *m.RootHash, `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this consistency proof based on context it is used
-func (m *ConsistencyProof) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *ConsistencyProof) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *ConsistencyProof) UnmarshalBinary(b []byte) error {
- var res ConsistencyProof
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/cose.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/cose.go
deleted file mode 100644
index 8de4083baf..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/cose.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Cose COSE object
-//
-// swagger:model cose
-type Cose struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec CoseSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Cose) Kind() string {
- return "cose"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Cose) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Cose) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec CoseSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Cose
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Cose) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec CoseSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this cose
-func (m *Cose) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Cose) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Cose) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this cose based on the context it is used
-func (m *Cose) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Cose) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Cose) UnmarshalBinary(b []byte) error {
- var res Cose
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/cose_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/cose_schema.go
deleted file mode 100644
index e653f22029..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/cose_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// CoseSchema COSE Schema
-//
-// # COSE for Rekord objects
-//
-// swagger:model coseSchema
-type CoseSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/cose_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/cose_v001_schema.go
deleted file mode 100644
index 5818dca1c8..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/cose_v001_schema.go
+++ /dev/null
@@ -1,521 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// CoseV001Schema cose v0.0.1 Schema
-//
-// # Schema for cose object
-//
-// swagger:model coseV001Schema
-type CoseV001Schema struct {
-
- // data
- Data *CoseV001SchemaData `json:"data,omitempty"`
-
- // The COSE Sign1 Message
- // Format: byte
- Message strfmt.Base64 `json:"message,omitempty"`
-
- // The public key that can verify the signature
- // Required: true
- // Format: byte
- PublicKey *strfmt.Base64 `json:"publicKey"`
-}
-
-// Validate validates this cose v001 schema
-func (m *CoseV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateData(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *CoseV001Schema) validateData(formats strfmt.Registry) error {
- if swag.IsZero(m.Data) { // not required
- return nil
- }
-
- if m.Data != nil {
- if err := m.Data.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *CoseV001Schema) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this cose v001 schema based on the context it is used
-func (m *CoseV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateData(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *CoseV001Schema) contextValidateData(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Data != nil {
-
- if swag.IsZero(m.Data) { // not required
- return nil
- }
-
- if err := m.Data.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *CoseV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *CoseV001Schema) UnmarshalBinary(b []byte) error {
- var res CoseV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// CoseV001SchemaData Information about the content associated with the entry
-//
-// swagger:model CoseV001SchemaData
-type CoseV001SchemaData struct {
-
- // Specifies the additional authenticated data required to verify the signature
- // Format: byte
- Aad strfmt.Base64 `json:"aad,omitempty"`
-
- // envelope hash
- EnvelopeHash *CoseV001SchemaDataEnvelopeHash `json:"envelopeHash,omitempty"`
-
- // payload hash
- PayloadHash *CoseV001SchemaDataPayloadHash `json:"payloadHash,omitempty"`
-}
-
-// Validate validates this cose v001 schema data
-func (m *CoseV001SchemaData) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateEnvelopeHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePayloadHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *CoseV001SchemaData) validateEnvelopeHash(formats strfmt.Registry) error {
- if swag.IsZero(m.EnvelopeHash) { // not required
- return nil
- }
-
- if m.EnvelopeHash != nil {
- if err := m.EnvelopeHash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "envelopeHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "envelopeHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *CoseV001SchemaData) validatePayloadHash(formats strfmt.Registry) error {
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if m.PayloadHash != nil {
- if err := m.PayloadHash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this cose v001 schema data based on the context it is used
-func (m *CoseV001SchemaData) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateEnvelopeHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePayloadHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *CoseV001SchemaData) contextValidateEnvelopeHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.EnvelopeHash != nil {
-
- if swag.IsZero(m.EnvelopeHash) { // not required
- return nil
- }
-
- if err := m.EnvelopeHash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "envelopeHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "envelopeHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *CoseV001SchemaData) contextValidatePayloadHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PayloadHash != nil {
-
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if err := m.PayloadHash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *CoseV001SchemaData) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *CoseV001SchemaData) UnmarshalBinary(b []byte) error {
- var res CoseV001SchemaData
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// CoseV001SchemaDataEnvelopeHash Specifies the hash algorithm and value for the COSE envelope
-//
-// swagger:model CoseV001SchemaDataEnvelopeHash
-type CoseV001SchemaDataEnvelopeHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the envelope
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this cose v001 schema data envelope hash
-func (m *CoseV001SchemaDataEnvelopeHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var coseV001SchemaDataEnvelopeHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- coseV001SchemaDataEnvelopeHashTypeAlgorithmPropEnum = append(coseV001SchemaDataEnvelopeHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // CoseV001SchemaDataEnvelopeHashAlgorithmSha256 captures enum value "sha256"
- CoseV001SchemaDataEnvelopeHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *CoseV001SchemaDataEnvelopeHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, coseV001SchemaDataEnvelopeHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *CoseV001SchemaDataEnvelopeHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"envelopeHash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("data"+"."+"envelopeHash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *CoseV001SchemaDataEnvelopeHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"envelopeHash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this cose v001 schema data envelope hash based on the context it is used
-func (m *CoseV001SchemaDataEnvelopeHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *CoseV001SchemaDataEnvelopeHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *CoseV001SchemaDataEnvelopeHash) UnmarshalBinary(b []byte) error {
- var res CoseV001SchemaDataEnvelopeHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// CoseV001SchemaDataPayloadHash Specifies the hash algorithm and value for the content
-//
-// swagger:model CoseV001SchemaDataPayloadHash
-type CoseV001SchemaDataPayloadHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the content
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this cose v001 schema data payload hash
-func (m *CoseV001SchemaDataPayloadHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var coseV001SchemaDataPayloadHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- coseV001SchemaDataPayloadHashTypeAlgorithmPropEnum = append(coseV001SchemaDataPayloadHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // CoseV001SchemaDataPayloadHashAlgorithmSha256 captures enum value "sha256"
- CoseV001SchemaDataPayloadHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *CoseV001SchemaDataPayloadHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, coseV001SchemaDataPayloadHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *CoseV001SchemaDataPayloadHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"payloadHash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("data"+"."+"payloadHash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *CoseV001SchemaDataPayloadHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"payloadHash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this cose v001 schema data payload hash based on the context it is used
-func (m *CoseV001SchemaDataPayloadHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *CoseV001SchemaDataPayloadHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *CoseV001SchemaDataPayloadHash) UnmarshalBinary(b []byte) error {
- var res CoseV001SchemaDataPayloadHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse.go
deleted file mode 100644
index dde562054c..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// DSSE DSSE envelope
-//
-// swagger:model dsse
-type DSSE struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec DSSESchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *DSSE) Kind() string {
- return "dsse"
-}
-
-// SetKind sets the kind of this subtype
-func (m *DSSE) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *DSSE) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec DSSESchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result DSSE
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m DSSE) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec DSSESchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this dsse
-func (m *DSSE) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *DSSE) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *DSSE) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this dsse based on the context it is used
-func (m *DSSE) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *DSSE) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *DSSE) UnmarshalBinary(b []byte) error {
- var res DSSE
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse_schema.go
deleted file mode 100644
index 7795626438..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// DSSESchema DSSE Schema
-//
-// log entry schema for dsse envelopes
-//
-// swagger:model dsseSchema
-type DSSESchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse_v001_schema.go
deleted file mode 100644
index 5fde2a77e1..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/dsse_v001_schema.go
+++ /dev/null
@@ -1,685 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// DSSEV001Schema DSSE v0.0.1 Schema
-//
-// # Schema for DSSE envelopes
-//
-// swagger:model dsseV001Schema
-type DSSEV001Schema struct {
-
- // envelope hash
- EnvelopeHash *DSSEV001SchemaEnvelopeHash `json:"envelopeHash,omitempty"`
-
- // payload hash
- PayloadHash *DSSEV001SchemaPayloadHash `json:"payloadHash,omitempty"`
-
- // proposed content
- ProposedContent *DSSEV001SchemaProposedContent `json:"proposedContent,omitempty"`
-
- // extracted collection of all signatures of the envelope's payload; elements will be sorted by lexicographical order of the base64 encoded signature strings
- // Read Only: true
- // Min Items: 1
- Signatures []*DSSEV001SchemaSignaturesItems0 `json:"signatures"`
-}
-
-// Validate validates this dsse v001 schema
-func (m *DSSEV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateEnvelopeHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePayloadHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateProposedContent(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignatures(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *DSSEV001Schema) validateEnvelopeHash(formats strfmt.Registry) error {
- if swag.IsZero(m.EnvelopeHash) { // not required
- return nil
- }
-
- if m.EnvelopeHash != nil {
- if err := m.EnvelopeHash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("envelopeHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("envelopeHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *DSSEV001Schema) validatePayloadHash(formats strfmt.Registry) error {
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if m.PayloadHash != nil {
- if err := m.PayloadHash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *DSSEV001Schema) validateProposedContent(formats strfmt.Registry) error {
- if swag.IsZero(m.ProposedContent) { // not required
- return nil
- }
-
- if m.ProposedContent != nil {
- if err := m.ProposedContent.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("proposedContent")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("proposedContent")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *DSSEV001Schema) validateSignatures(formats strfmt.Registry) error {
- if swag.IsZero(m.Signatures) { // not required
- return nil
- }
-
- iSignaturesSize := int64(len(m.Signatures))
-
- if err := validate.MinItems("signatures", "body", iSignaturesSize, 1); err != nil {
- return err
- }
-
- for i := 0; i < len(m.Signatures); i++ {
- if swag.IsZero(m.Signatures[i]) { // not required
- continue
- }
-
- if m.Signatures[i] != nil {
- if err := m.Signatures[i].Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signatures" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signatures" + "." + strconv.Itoa(i))
- }
- return err
- }
- }
-
- }
-
- return nil
-}
-
-// ContextValidate validate this dsse v001 schema based on the context it is used
-func (m *DSSEV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateEnvelopeHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePayloadHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateProposedContent(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateSignatures(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *DSSEV001Schema) contextValidateEnvelopeHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.EnvelopeHash != nil {
-
- if swag.IsZero(m.EnvelopeHash) { // not required
- return nil
- }
-
- if err := m.EnvelopeHash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("envelopeHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("envelopeHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *DSSEV001Schema) contextValidatePayloadHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PayloadHash != nil {
-
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if err := m.PayloadHash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *DSSEV001Schema) contextValidateProposedContent(ctx context.Context, formats strfmt.Registry) error {
-
- if m.ProposedContent != nil {
-
- if swag.IsZero(m.ProposedContent) { // not required
- return nil
- }
-
- if err := m.ProposedContent.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("proposedContent")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("proposedContent")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *DSSEV001Schema) contextValidateSignatures(ctx context.Context, formats strfmt.Registry) error {
-
- if err := validate.ReadOnly(ctx, "signatures", "body", []*DSSEV001SchemaSignaturesItems0(m.Signatures)); err != nil {
- return err
- }
-
- for i := 0; i < len(m.Signatures); i++ {
-
- if m.Signatures[i] != nil {
-
- if swag.IsZero(m.Signatures[i]) { // not required
- return nil
- }
-
- if err := m.Signatures[i].ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signatures" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signatures" + "." + strconv.Itoa(i))
- }
- return err
- }
- }
-
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *DSSEV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *DSSEV001Schema) UnmarshalBinary(b []byte) error {
- var res DSSEV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// DSSEV001SchemaEnvelopeHash Specifies the hash algorithm and value encompassing the entire envelope sent to Rekor
-//
-// swagger:model DSSEV001SchemaEnvelopeHash
-type DSSEV001SchemaEnvelopeHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The value of the computed digest over the entire envelope
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this DSSE v001 schema envelope hash
-func (m *DSSEV001SchemaEnvelopeHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var dsseV001SchemaEnvelopeHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- dsseV001SchemaEnvelopeHashTypeAlgorithmPropEnum = append(dsseV001SchemaEnvelopeHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // DSSEV001SchemaEnvelopeHashAlgorithmSha256 captures enum value "sha256"
- DSSEV001SchemaEnvelopeHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *DSSEV001SchemaEnvelopeHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, dsseV001SchemaEnvelopeHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *DSSEV001SchemaEnvelopeHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("envelopeHash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("envelopeHash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *DSSEV001SchemaEnvelopeHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("envelopeHash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this DSSE v001 schema envelope hash based on the context it is used
-func (m *DSSEV001SchemaEnvelopeHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *DSSEV001SchemaEnvelopeHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *DSSEV001SchemaEnvelopeHash) UnmarshalBinary(b []byte) error {
- var res DSSEV001SchemaEnvelopeHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// DSSEV001SchemaPayloadHash Specifies the hash algorithm and value covering the payload within the DSSE envelope
-//
-// swagger:model DSSEV001SchemaPayloadHash
-type DSSEV001SchemaPayloadHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The value of the computed digest over the payload within the envelope
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this DSSE v001 schema payload hash
-func (m *DSSEV001SchemaPayloadHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var dsseV001SchemaPayloadHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- dsseV001SchemaPayloadHashTypeAlgorithmPropEnum = append(dsseV001SchemaPayloadHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // DSSEV001SchemaPayloadHashAlgorithmSha256 captures enum value "sha256"
- DSSEV001SchemaPayloadHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *DSSEV001SchemaPayloadHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, dsseV001SchemaPayloadHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *DSSEV001SchemaPayloadHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("payloadHash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("payloadHash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *DSSEV001SchemaPayloadHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("payloadHash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this DSSE v001 schema payload hash based on the context it is used
-func (m *DSSEV001SchemaPayloadHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *DSSEV001SchemaPayloadHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *DSSEV001SchemaPayloadHash) UnmarshalBinary(b []byte) error {
- var res DSSEV001SchemaPayloadHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// DSSEV001SchemaProposedContent DSSE v001 schema proposed content
-//
-// swagger:model DSSEV001SchemaProposedContent
-type DSSEV001SchemaProposedContent struct {
-
- // DSSE envelope specified as a stringified JSON object
- // Required: true
- Envelope *string `json:"envelope"`
-
- // collection of all verification material (e.g. public keys or certificates) used to verify signatures over envelope's payload, specified as base64-encoded strings
- // Required: true
- // Min Items: 1
- Verifiers []strfmt.Base64 `json:"verifiers"`
-}
-
-// Validate validates this DSSE v001 schema proposed content
-func (m *DSSEV001SchemaProposedContent) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateEnvelope(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateVerifiers(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *DSSEV001SchemaProposedContent) validateEnvelope(formats strfmt.Registry) error {
-
- if err := validate.Required("proposedContent"+"."+"envelope", "body", m.Envelope); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *DSSEV001SchemaProposedContent) validateVerifiers(formats strfmt.Registry) error {
-
- if err := validate.Required("proposedContent"+"."+"verifiers", "body", m.Verifiers); err != nil {
- return err
- }
-
- iVerifiersSize := int64(len(m.Verifiers))
-
- if err := validate.MinItems("proposedContent"+"."+"verifiers", "body", iVerifiersSize, 1); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this DSSE v001 schema proposed content based on context it is used
-func (m *DSSEV001SchemaProposedContent) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *DSSEV001SchemaProposedContent) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *DSSEV001SchemaProposedContent) UnmarshalBinary(b []byte) error {
- var res DSSEV001SchemaProposedContent
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// DSSEV001SchemaSignaturesItems0 a signature of the envelope's payload along with the verification material for the signature
-//
-// swagger:model DSSEV001SchemaSignaturesItems0
-type DSSEV001SchemaSignaturesItems0 struct {
-
- // base64 encoded signature of the payload
- // Required: true
- // Pattern: ^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=|[A-Za-z0-9+\/]{4})$
- Signature *string `json:"signature"`
-
- // verification material that was used to verify the corresponding signature, specified as a base64 encoded string
- // Required: true
- // Format: byte
- Verifier *strfmt.Base64 `json:"verifier"`
-}
-
-// Validate validates this DSSE v001 schema signatures items0
-func (m *DSSEV001SchemaSignaturesItems0) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateSignature(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateVerifier(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *DSSEV001SchemaSignaturesItems0) validateSignature(formats strfmt.Registry) error {
-
- if err := validate.Required("signature", "body", m.Signature); err != nil {
- return err
- }
-
- if err := validate.Pattern("signature", "body", *m.Signature, `^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=|[A-Za-z0-9+\/]{4})$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *DSSEV001SchemaSignaturesItems0) validateVerifier(formats strfmt.Registry) error {
-
- if err := validate.Required("verifier", "body", m.Verifier); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this DSSE v001 schema signatures items0 based on context it is used
-func (m *DSSEV001SchemaSignaturesItems0) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *DSSEV001SchemaSignaturesItems0) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *DSSEV001SchemaSignaturesItems0) UnmarshalBinary(b []byte) error {
- var res DSSEV001SchemaSignaturesItems0
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/error.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/error.go
deleted file mode 100644
index ac14f2026e..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/error.go
+++ /dev/null
@@ -1,69 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
-
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
-)
-
-// Error error
-//
-// swagger:model Error
-type Error struct {
-
- // code
- Code int64 `json:"code,omitempty"`
-
- // message
- Message string `json:"message,omitempty"`
-}
-
-// Validate validates this error
-func (m *Error) Validate(formats strfmt.Registry) error {
- return nil
-}
-
-// ContextValidate validates this error based on context it is used
-func (m *Error) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Error) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Error) UnmarshalBinary(b []byte) error {
- var res Error
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord.go
deleted file mode 100644
index b3e1f8a3bd..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Hashedrekord Hashed Rekord object
-//
-// swagger:model hashedrekord
-type Hashedrekord struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec HashedrekordSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Hashedrekord) Kind() string {
- return "hashedrekord"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Hashedrekord) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Hashedrekord) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec HashedrekordSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Hashedrekord
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Hashedrekord) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec HashedrekordSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this hashedrekord
-func (m *Hashedrekord) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Hashedrekord) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Hashedrekord) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this hashedrekord based on the context it is used
-func (m *Hashedrekord) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Hashedrekord) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Hashedrekord) UnmarshalBinary(b []byte) error {
- var res Hashedrekord
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord_schema.go
deleted file mode 100644
index 56034a579e..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// HashedrekordSchema Hashedrekord Schema
-//
-// # Schema for Hashedrekord objects
-//
-// swagger:model hashedrekordSchema
-type HashedrekordSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord_v001_schema.go
deleted file mode 100644
index 586025c5bb..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/hashedrekord_v001_schema.go
+++ /dev/null
@@ -1,519 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// HashedrekordV001Schema Hashed Rekor v0.0.1 Schema
-//
-// # Schema for Hashed Rekord object
-//
-// swagger:model hashedrekordV001Schema
-type HashedrekordV001Schema struct {
-
- // data
- // Required: true
- Data *HashedrekordV001SchemaData `json:"data"`
-
- // signature
- // Required: true
- Signature *HashedrekordV001SchemaSignature `json:"signature"`
-}
-
-// Validate validates this hashedrekord v001 schema
-func (m *HashedrekordV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateData(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignature(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HashedrekordV001Schema) validateData(formats strfmt.Registry) error {
-
- if err := validate.Required("data", "body", m.Data); err != nil {
- return err
- }
-
- if m.Data != nil {
- if err := m.Data.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *HashedrekordV001Schema) validateSignature(formats strfmt.Registry) error {
-
- if err := validate.Required("signature", "body", m.Signature); err != nil {
- return err
- }
-
- if m.Signature != nil {
- if err := m.Signature.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this hashedrekord v001 schema based on the context it is used
-func (m *HashedrekordV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateData(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateSignature(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HashedrekordV001Schema) contextValidateData(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Data != nil {
-
- if err := m.Data.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *HashedrekordV001Schema) contextValidateSignature(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Signature != nil {
-
- if err := m.Signature.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HashedrekordV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HashedrekordV001Schema) UnmarshalBinary(b []byte) error {
- var res HashedrekordV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HashedrekordV001SchemaData Information about the content associated with the entry
-//
-// swagger:model HashedrekordV001SchemaData
-type HashedrekordV001SchemaData struct {
-
- // hash
- Hash *HashedrekordV001SchemaDataHash `json:"hash,omitempty"`
-}
-
-// Validate validates this hashedrekord v001 schema data
-func (m *HashedrekordV001SchemaData) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HashedrekordV001SchemaData) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this hashedrekord v001 schema data based on the context it is used
-func (m *HashedrekordV001SchemaData) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HashedrekordV001SchemaData) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HashedrekordV001SchemaData) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HashedrekordV001SchemaData) UnmarshalBinary(b []byte) error {
- var res HashedrekordV001SchemaData
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HashedrekordV001SchemaDataHash Specifies the hash algorithm and value for the content
-//
-// swagger:model HashedrekordV001SchemaDataHash
-type HashedrekordV001SchemaDataHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256","sha384","sha512"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the content, as represented by a lower case hexadecimal string
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this hashedrekord v001 schema data hash
-func (m *HashedrekordV001SchemaDataHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var hashedrekordV001SchemaDataHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256","sha384","sha512"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- hashedrekordV001SchemaDataHashTypeAlgorithmPropEnum = append(hashedrekordV001SchemaDataHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // HashedrekordV001SchemaDataHashAlgorithmSha256 captures enum value "sha256"
- HashedrekordV001SchemaDataHashAlgorithmSha256 string = "sha256"
-
- // HashedrekordV001SchemaDataHashAlgorithmSha384 captures enum value "sha384"
- HashedrekordV001SchemaDataHashAlgorithmSha384 string = "sha384"
-
- // HashedrekordV001SchemaDataHashAlgorithmSha512 captures enum value "sha512"
- HashedrekordV001SchemaDataHashAlgorithmSha512 string = "sha512"
-)
-
-// prop value enum
-func (m *HashedrekordV001SchemaDataHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, hashedrekordV001SchemaDataHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *HashedrekordV001SchemaDataHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("data"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *HashedrekordV001SchemaDataHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this hashedrekord v001 schema data hash based on context it is used
-func (m *HashedrekordV001SchemaDataHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HashedrekordV001SchemaDataHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HashedrekordV001SchemaDataHash) UnmarshalBinary(b []byte) error {
- var res HashedrekordV001SchemaDataHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HashedrekordV001SchemaSignature Information about the detached signature associated with the entry
-//
-// swagger:model HashedrekordV001SchemaSignature
-type HashedrekordV001SchemaSignature struct {
-
- // Specifies the content of the signature inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // public key
- PublicKey *HashedrekordV001SchemaSignaturePublicKey `json:"publicKey,omitempty"`
-}
-
-// Validate validates this hashedrekord v001 schema signature
-func (m *HashedrekordV001SchemaSignature) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HashedrekordV001SchemaSignature) validatePublicKey(formats strfmt.Registry) error {
- if swag.IsZero(m.PublicKey) { // not required
- return nil
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature" + "." + "publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature" + "." + "publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this hashedrekord v001 schema signature based on the context it is used
-func (m *HashedrekordV001SchemaSignature) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HashedrekordV001SchemaSignature) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if swag.IsZero(m.PublicKey) { // not required
- return nil
- }
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature" + "." + "publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature" + "." + "publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HashedrekordV001SchemaSignature) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HashedrekordV001SchemaSignature) UnmarshalBinary(b []byte) error {
- var res HashedrekordV001SchemaSignature
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HashedrekordV001SchemaSignaturePublicKey The public key that can verify the signature; this can also be an X509 code signing certificate that contains the raw public key information
-//
-// swagger:model HashedrekordV001SchemaSignaturePublicKey
-type HashedrekordV001SchemaSignaturePublicKey struct {
-
- // Specifies the content of the public key or code signing certificate inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-}
-
-// Validate validates this hashedrekord v001 schema signature public key
-func (m *HashedrekordV001SchemaSignaturePublicKey) Validate(formats strfmt.Registry) error {
- return nil
-}
-
-// ContextValidate validates this hashedrekord v001 schema signature public key based on context it is used
-func (m *HashedrekordV001SchemaSignaturePublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HashedrekordV001SchemaSignaturePublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HashedrekordV001SchemaSignaturePublicKey) UnmarshalBinary(b []byte) error {
- var res HashedrekordV001SchemaSignaturePublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/helm.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/helm.go
deleted file mode 100644
index d19b8bc8c9..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/helm.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Helm Helm chart
-//
-// swagger:model helm
-type Helm struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec HelmSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Helm) Kind() string {
- return "helm"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Helm) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Helm) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec HelmSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Helm
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Helm) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec HelmSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this helm
-func (m *Helm) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Helm) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Helm) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this helm based on the context it is used
-func (m *Helm) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Helm) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Helm) UnmarshalBinary(b []byte) error {
- var res Helm
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/helm_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/helm_schema.go
deleted file mode 100644
index 0ab87df9ce..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/helm_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// HelmSchema Helm Schema
-//
-// # Schema for Helm objects
-//
-// swagger:model helmSchema
-type HelmSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/helm_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/helm_v001_schema.go
deleted file mode 100644
index 13c00597c6..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/helm_v001_schema.go
+++ /dev/null
@@ -1,662 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// HelmV001Schema Helm v0.0.1 Schema
-//
-// # Schema for Helm object
-//
-// swagger:model helmV001Schema
-type HelmV001Schema struct {
-
- // chart
- // Required: true
- Chart *HelmV001SchemaChart `json:"chart"`
-
- // public key
- // Required: true
- PublicKey *HelmV001SchemaPublicKey `json:"publicKey"`
-}
-
-// Validate validates this helm v001 schema
-func (m *HelmV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateChart(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001Schema) validateChart(formats strfmt.Registry) error {
-
- if err := validate.Required("chart", "body", m.Chart); err != nil {
- return err
- }
-
- if m.Chart != nil {
- if err := m.Chart.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *HelmV001Schema) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this helm v001 schema based on the context it is used
-func (m *HelmV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateChart(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001Schema) contextValidateChart(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Chart != nil {
-
- if err := m.Chart.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *HelmV001Schema) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HelmV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HelmV001Schema) UnmarshalBinary(b []byte) error {
- var res HelmV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HelmV001SchemaChart Information about the Helm chart associated with the entry
-//
-// swagger:model HelmV001SchemaChart
-type HelmV001SchemaChart struct {
-
- // hash
- Hash *HelmV001SchemaChartHash `json:"hash,omitempty"`
-
- // provenance
- // Required: true
- Provenance *HelmV001SchemaChartProvenance `json:"provenance"`
-}
-
-// Validate validates this helm v001 schema chart
-func (m *HelmV001SchemaChart) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateProvenance(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaChart) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *HelmV001SchemaChart) validateProvenance(formats strfmt.Registry) error {
-
- if err := validate.Required("chart"+"."+"provenance", "body", m.Provenance); err != nil {
- return err
- }
-
- if m.Provenance != nil {
- if err := m.Provenance.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart" + "." + "provenance")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart" + "." + "provenance")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this helm v001 schema chart based on the context it is used
-func (m *HelmV001SchemaChart) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateProvenance(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaChart) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *HelmV001SchemaChart) contextValidateProvenance(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Provenance != nil {
-
- if err := m.Provenance.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart" + "." + "provenance")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart" + "." + "provenance")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HelmV001SchemaChart) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HelmV001SchemaChart) UnmarshalBinary(b []byte) error {
- var res HelmV001SchemaChart
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HelmV001SchemaChartHash Specifies the hash algorithm and value for the chart
-//
-// swagger:model HelmV001SchemaChartHash
-type HelmV001SchemaChartHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the chart
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this helm v001 schema chart hash
-func (m *HelmV001SchemaChartHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var helmV001SchemaChartHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- helmV001SchemaChartHashTypeAlgorithmPropEnum = append(helmV001SchemaChartHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // HelmV001SchemaChartHashAlgorithmSha256 captures enum value "sha256"
- HelmV001SchemaChartHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *HelmV001SchemaChartHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, helmV001SchemaChartHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *HelmV001SchemaChartHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("chart"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("chart"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *HelmV001SchemaChartHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("chart"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this helm v001 schema chart hash based on the context it is used
-func (m *HelmV001SchemaChartHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HelmV001SchemaChartHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HelmV001SchemaChartHash) UnmarshalBinary(b []byte) error {
- var res HelmV001SchemaChartHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HelmV001SchemaChartProvenance The provenance entry associated with the signed Helm Chart
-//
-// swagger:model HelmV001SchemaChartProvenance
-type HelmV001SchemaChartProvenance struct {
-
- // Specifies the content of the provenance file inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // signature
- Signature *HelmV001SchemaChartProvenanceSignature `json:"signature,omitempty"`
-}
-
-// Validate validates this helm v001 schema chart provenance
-func (m *HelmV001SchemaChartProvenance) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateSignature(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaChartProvenance) validateSignature(formats strfmt.Registry) error {
- if swag.IsZero(m.Signature) { // not required
- return nil
- }
-
- if m.Signature != nil {
- if err := m.Signature.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart" + "." + "provenance" + "." + "signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart" + "." + "provenance" + "." + "signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this helm v001 schema chart provenance based on the context it is used
-func (m *HelmV001SchemaChartProvenance) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateSignature(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaChartProvenance) contextValidateSignature(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Signature != nil {
-
- if swag.IsZero(m.Signature) { // not required
- return nil
- }
-
- if err := m.Signature.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("chart" + "." + "provenance" + "." + "signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("chart" + "." + "provenance" + "." + "signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HelmV001SchemaChartProvenance) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HelmV001SchemaChartProvenance) UnmarshalBinary(b []byte) error {
- var res HelmV001SchemaChartProvenance
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HelmV001SchemaChartProvenanceSignature Information about the included signature in the provenance file
-//
-// swagger:model HelmV001SchemaChartProvenanceSignature
-type HelmV001SchemaChartProvenanceSignature struct {
-
- // Specifies the signature embedded within the provenance file
- // Required: true
- // Read Only: true
- // Format: byte
- Content strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this helm v001 schema chart provenance signature
-func (m *HelmV001SchemaChartProvenanceSignature) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaChartProvenanceSignature) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("chart"+"."+"provenance"+"."+"signature"+"."+"content", "body", strfmt.Base64(m.Content)); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this helm v001 schema chart provenance signature based on the context it is used
-func (m *HelmV001SchemaChartProvenanceSignature) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateContent(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaChartProvenanceSignature) contextValidateContent(ctx context.Context, formats strfmt.Registry) error {
-
- if err := validate.ReadOnly(ctx, "chart"+"."+"provenance"+"."+"signature"+"."+"content", "body", strfmt.Base64(m.Content)); err != nil {
- return err
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HelmV001SchemaChartProvenanceSignature) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HelmV001SchemaChartProvenanceSignature) UnmarshalBinary(b []byte) error {
- var res HelmV001SchemaChartProvenanceSignature
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// HelmV001SchemaPublicKey The public key that can verify the package signature
-//
-// swagger:model HelmV001SchemaPublicKey
-type HelmV001SchemaPublicKey struct {
-
- // Specifies the content of the public key inline within the document
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this helm v001 schema public key
-func (m *HelmV001SchemaPublicKey) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *HelmV001SchemaPublicKey) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this helm v001 schema public key based on context it is used
-func (m *HelmV001SchemaPublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *HelmV001SchemaPublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *HelmV001SchemaPublicKey) UnmarshalBinary(b []byte) error {
- var res HelmV001SchemaPublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/inactive_shard_log_info.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/inactive_shard_log_info.go
deleted file mode 100644
index c555eb2da6..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/inactive_shard_log_info.go
+++ /dev/null
@@ -1,153 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// InactiveShardLogInfo inactive shard log info
-//
-// swagger:model InactiveShardLogInfo
-type InactiveShardLogInfo struct {
-
- // The current hash value stored at the root of the merkle tree
- // Required: true
- // Pattern: ^[0-9a-fA-F]{64}$
- RootHash *string `json:"rootHash"`
-
- // The current signed tree head
- // Required: true
- SignedTreeHead *string `json:"signedTreeHead"`
-
- // The current treeID
- // Required: true
- // Pattern: ^[0-9]+$
- TreeID *string `json:"treeID"`
-
- // The current number of nodes in the merkle tree
- // Required: true
- // Minimum: 1
- TreeSize *int64 `json:"treeSize"`
-}
-
-// Validate validates this inactive shard log info
-func (m *InactiveShardLogInfo) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateRootHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignedTreeHead(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateTreeID(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateTreeSize(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *InactiveShardLogInfo) validateRootHash(formats strfmt.Registry) error {
-
- if err := validate.Required("rootHash", "body", m.RootHash); err != nil {
- return err
- }
-
- if err := validate.Pattern("rootHash", "body", *m.RootHash, `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *InactiveShardLogInfo) validateSignedTreeHead(formats strfmt.Registry) error {
-
- if err := validate.Required("signedTreeHead", "body", m.SignedTreeHead); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *InactiveShardLogInfo) validateTreeID(formats strfmt.Registry) error {
-
- if err := validate.Required("treeID", "body", m.TreeID); err != nil {
- return err
- }
-
- if err := validate.Pattern("treeID", "body", *m.TreeID, `^[0-9]+$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *InactiveShardLogInfo) validateTreeSize(formats strfmt.Registry) error {
-
- if err := validate.Required("treeSize", "body", m.TreeSize); err != nil {
- return err
- }
-
- if err := validate.MinimumInt("treeSize", "body", *m.TreeSize, 1, false); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this inactive shard log info based on context it is used
-func (m *InactiveShardLogInfo) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *InactiveShardLogInfo) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *InactiveShardLogInfo) UnmarshalBinary(b []byte) error {
- var res InactiveShardLogInfo
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/inclusion_proof.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/inclusion_proof.go
deleted file mode 100644
index 86f0d7b94e..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/inclusion_proof.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// InclusionProof inclusion proof
-//
-// swagger:model InclusionProof
-type InclusionProof struct {
-
- // The checkpoint (signed tree head) that the inclusion proof is based on
- // Required: true
- Checkpoint *string `json:"checkpoint"`
-
- // A list of hashes required to compute the inclusion proof, sorted in order from leaf to root
- // Required: true
- Hashes []string `json:"hashes"`
-
- // The index of the entry in the transparency log
- // Required: true
- // Minimum: 0
- LogIndex *int64 `json:"logIndex"`
-
- // The hash value stored at the root of the merkle tree at the time the proof was generated
- // Required: true
- // Pattern: ^[0-9a-fA-F]{64}$
- RootHash *string `json:"rootHash"`
-
- // The size of the merkle tree at the time the inclusion proof was generated
- // Required: true
- // Minimum: 1
- TreeSize *int64 `json:"treeSize"`
-}
-
-// Validate validates this inclusion proof
-func (m *InclusionProof) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateCheckpoint(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateHashes(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateLogIndex(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateRootHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateTreeSize(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *InclusionProof) validateCheckpoint(formats strfmt.Registry) error {
-
- if err := validate.Required("checkpoint", "body", m.Checkpoint); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *InclusionProof) validateHashes(formats strfmt.Registry) error {
-
- if err := validate.Required("hashes", "body", m.Hashes); err != nil {
- return err
- }
-
- for i := 0; i < len(m.Hashes); i++ {
-
- if err := validate.Pattern("hashes"+"."+strconv.Itoa(i), "body", m.Hashes[i], `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- }
-
- return nil
-}
-
-func (m *InclusionProof) validateLogIndex(formats strfmt.Registry) error {
-
- if err := validate.Required("logIndex", "body", m.LogIndex); err != nil {
- return err
- }
-
- if err := validate.MinimumInt("logIndex", "body", *m.LogIndex, 0, false); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *InclusionProof) validateRootHash(formats strfmt.Registry) error {
-
- if err := validate.Required("rootHash", "body", m.RootHash); err != nil {
- return err
- }
-
- if err := validate.Pattern("rootHash", "body", *m.RootHash, `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *InclusionProof) validateTreeSize(formats strfmt.Registry) error {
-
- if err := validate.Required("treeSize", "body", m.TreeSize); err != nil {
- return err
- }
-
- if err := validate.MinimumInt("treeSize", "body", *m.TreeSize, 1, false); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this inclusion proof based on context it is used
-func (m *InclusionProof) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *InclusionProof) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *InclusionProof) UnmarshalBinary(b []byte) error {
- var res InclusionProof
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto.go
deleted file mode 100644
index 4f208de1d5..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Intoto Intoto object
-//
-// swagger:model intoto
-type Intoto struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec IntotoSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Intoto) Kind() string {
- return "intoto"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Intoto) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Intoto) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec IntotoSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Intoto
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Intoto) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec IntotoSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this intoto
-func (m *Intoto) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Intoto) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Intoto) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto based on the context it is used
-func (m *Intoto) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Intoto) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Intoto) UnmarshalBinary(b []byte) error {
- var res Intoto
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_schema.go
deleted file mode 100644
index a7fdaa6a6d..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// IntotoSchema Intoto Schema
-//
-// # Intoto for Rekord objects
-//
-// swagger:model intotoSchema
-type IntotoSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_v001_schema.go
deleted file mode 100644
index 6973c72990..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_v001_schema.go
+++ /dev/null
@@ -1,514 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// IntotoV001Schema intoto v0.0.1 Schema
-//
-// # Schema for intoto object
-//
-// swagger:model intotoV001Schema
-type IntotoV001Schema struct {
-
- // content
- // Required: true
- Content *IntotoV001SchemaContent `json:"content"`
-
- // The public key that can verify the signature
- // Required: true
- // Format: byte
- PublicKey *strfmt.Base64 `json:"publicKey"`
-}
-
-// Validate validates this intoto v001 schema
-func (m *IntotoV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV001Schema) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("content", "body", m.Content); err != nil {
- return err
- }
-
- if m.Content != nil {
- if err := m.Content.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV001Schema) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v001 schema based on the context it is used
-func (m *IntotoV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateContent(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV001Schema) contextValidateContent(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Content != nil {
-
- if err := m.Content.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV001Schema) UnmarshalBinary(b []byte) error {
- var res IntotoV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV001SchemaContent intoto v001 schema content
-//
-// swagger:model IntotoV001SchemaContent
-type IntotoV001SchemaContent struct {
-
- // envelope
- Envelope string `json:"envelope,omitempty"`
-
- // hash
- Hash *IntotoV001SchemaContentHash `json:"hash,omitempty"`
-
- // payload hash
- PayloadHash *IntotoV001SchemaContentPayloadHash `json:"payloadHash,omitempty"`
-}
-
-// Validate validates this intoto v001 schema content
-func (m *IntotoV001SchemaContent) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePayloadHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV001SchemaContent) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV001SchemaContent) validatePayloadHash(formats strfmt.Registry) error {
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if m.PayloadHash != nil {
- if err := m.PayloadHash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v001 schema content based on the context it is used
-func (m *IntotoV001SchemaContent) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePayloadHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV001SchemaContent) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV001SchemaContent) contextValidatePayloadHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PayloadHash != nil {
-
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if err := m.PayloadHash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV001SchemaContent) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV001SchemaContent) UnmarshalBinary(b []byte) error {
- var res IntotoV001SchemaContent
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV001SchemaContentHash Specifies the hash algorithm and value encompassing the entire signed envelope; this is computed by the rekor server, client-provided values are ignored
-//
-// swagger:model IntotoV001SchemaContentHash
-type IntotoV001SchemaContentHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the archive
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this intoto v001 schema content hash
-func (m *IntotoV001SchemaContentHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var intotoV001SchemaContentHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- intotoV001SchemaContentHashTypeAlgorithmPropEnum = append(intotoV001SchemaContentHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // IntotoV001SchemaContentHashAlgorithmSha256 captures enum value "sha256"
- IntotoV001SchemaContentHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *IntotoV001SchemaContentHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, intotoV001SchemaContentHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *IntotoV001SchemaContentHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("content"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *IntotoV001SchemaContentHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v001 schema content hash based on the context it is used
-func (m *IntotoV001SchemaContentHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV001SchemaContentHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV001SchemaContentHash) UnmarshalBinary(b []byte) error {
- var res IntotoV001SchemaContentHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV001SchemaContentPayloadHash Specifies the hash algorithm and value covering the payload within the DSSE envelope; this is computed by the rekor server, client-provided values are ignored
-//
-// swagger:model IntotoV001SchemaContentPayloadHash
-type IntotoV001SchemaContentPayloadHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the envelope's payload
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this intoto v001 schema content payload hash
-func (m *IntotoV001SchemaContentPayloadHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var intotoV001SchemaContentPayloadHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- intotoV001SchemaContentPayloadHashTypeAlgorithmPropEnum = append(intotoV001SchemaContentPayloadHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // IntotoV001SchemaContentPayloadHashAlgorithmSha256 captures enum value "sha256"
- IntotoV001SchemaContentPayloadHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *IntotoV001SchemaContentPayloadHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, intotoV001SchemaContentPayloadHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *IntotoV001SchemaContentPayloadHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"payloadHash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("content"+"."+"payloadHash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *IntotoV001SchemaContentPayloadHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"payloadHash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v001 schema content payload hash based on the context it is used
-func (m *IntotoV001SchemaContentPayloadHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV001SchemaContentPayloadHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV001SchemaContentPayloadHash) UnmarshalBinary(b []byte) error {
- var res IntotoV001SchemaContentPayloadHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_v002_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_v002_schema.go
deleted file mode 100644
index 309073a1c7..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/intoto_v002_schema.go
+++ /dev/null
@@ -1,757 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// IntotoV002Schema intoto v0.0.2 Schema
-//
-// # Schema for intoto object
-//
-// swagger:model intotoV002Schema
-type IntotoV002Schema struct {
-
- // content
- // Required: true
- Content *IntotoV002SchemaContent `json:"content"`
-}
-
-// Validate validates this intoto v002 schema
-func (m *IntotoV002Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002Schema) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("content", "body", m.Content); err != nil {
- return err
- }
-
- if m.Content != nil {
- if err := m.Content.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v002 schema based on the context it is used
-func (m *IntotoV002Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateContent(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002Schema) contextValidateContent(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Content != nil {
-
- if err := m.Content.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV002Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV002Schema) UnmarshalBinary(b []byte) error {
- var res IntotoV002Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV002SchemaContent intoto v002 schema content
-//
-// swagger:model IntotoV002SchemaContent
-type IntotoV002SchemaContent struct {
-
- // envelope
- // Required: true
- Envelope *IntotoV002SchemaContentEnvelope `json:"envelope"`
-
- // hash
- Hash *IntotoV002SchemaContentHash `json:"hash,omitempty"`
-
- // payload hash
- PayloadHash *IntotoV002SchemaContentPayloadHash `json:"payloadHash,omitempty"`
-}
-
-// Validate validates this intoto v002 schema content
-func (m *IntotoV002SchemaContent) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateEnvelope(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePayloadHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContent) validateEnvelope(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"envelope", "body", m.Envelope); err != nil {
- return err
- }
-
- if m.Envelope != nil {
- if err := m.Envelope.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "envelope")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "envelope")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContent) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContent) validatePayloadHash(formats strfmt.Registry) error {
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if m.PayloadHash != nil {
- if err := m.PayloadHash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v002 schema content based on the context it is used
-func (m *IntotoV002SchemaContent) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateEnvelope(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePayloadHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContent) contextValidateEnvelope(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Envelope != nil {
-
- if err := m.Envelope.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "envelope")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "envelope")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContent) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContent) contextValidatePayloadHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PayloadHash != nil {
-
- if swag.IsZero(m.PayloadHash) { // not required
- return nil
- }
-
- if err := m.PayloadHash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "payloadHash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "payloadHash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV002SchemaContent) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV002SchemaContent) UnmarshalBinary(b []byte) error {
- var res IntotoV002SchemaContent
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV002SchemaContentEnvelope dsse envelope
-//
-// swagger:model IntotoV002SchemaContentEnvelope
-type IntotoV002SchemaContentEnvelope struct {
-
- // payload of the envelope
- // Format: byte
- Payload strfmt.Base64 `json:"payload,omitempty"`
-
- // type describing the payload
- // Required: true
- PayloadType *string `json:"payloadType"`
-
- // collection of all signatures of the envelope's payload
- // Required: true
- // Min Items: 1
- Signatures []*IntotoV002SchemaContentEnvelopeSignaturesItems0 `json:"signatures"`
-}
-
-// Validate validates this intoto v002 schema content envelope
-func (m *IntotoV002SchemaContentEnvelope) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validatePayloadType(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignatures(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContentEnvelope) validatePayloadType(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"envelope"+"."+"payloadType", "body", m.PayloadType); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContentEnvelope) validateSignatures(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"envelope"+"."+"signatures", "body", m.Signatures); err != nil {
- return err
- }
-
- iSignaturesSize := int64(len(m.Signatures))
-
- if err := validate.MinItems("content"+"."+"envelope"+"."+"signatures", "body", iSignaturesSize, 1); err != nil {
- return err
- }
-
- for i := 0; i < len(m.Signatures); i++ {
- if swag.IsZero(m.Signatures[i]) { // not required
- continue
- }
-
- if m.Signatures[i] != nil {
- if err := m.Signatures[i].Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "envelope" + "." + "signatures" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "envelope" + "." + "signatures" + "." + strconv.Itoa(i))
- }
- return err
- }
- }
-
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v002 schema content envelope based on the context it is used
-func (m *IntotoV002SchemaContentEnvelope) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateSignatures(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContentEnvelope) contextValidateSignatures(ctx context.Context, formats strfmt.Registry) error {
-
- for i := 0; i < len(m.Signatures); i++ {
-
- if m.Signatures[i] != nil {
-
- if swag.IsZero(m.Signatures[i]) { // not required
- return nil
- }
-
- if err := m.Signatures[i].ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("content" + "." + "envelope" + "." + "signatures" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("content" + "." + "envelope" + "." + "signatures" + "." + strconv.Itoa(i))
- }
- return err
- }
- }
-
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV002SchemaContentEnvelope) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV002SchemaContentEnvelope) UnmarshalBinary(b []byte) error {
- var res IntotoV002SchemaContentEnvelope
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV002SchemaContentEnvelopeSignaturesItems0 a signature of the envelope's payload along with the public key for the signature
-//
-// swagger:model IntotoV002SchemaContentEnvelopeSignaturesItems0
-type IntotoV002SchemaContentEnvelopeSignaturesItems0 struct {
-
- // optional id of the key used to create the signature
- Keyid string `json:"keyid,omitempty"`
-
- // public key that corresponds to this signature
- // Required: true
- // Format: byte
- PublicKey *strfmt.Base64 `json:"publicKey"`
-
- // signature of the payload
- // Required: true
- // Format: byte
- Sig *strfmt.Base64 `json:"sig"`
-}
-
-// Validate validates this intoto v002 schema content envelope signatures items0
-func (m *IntotoV002SchemaContentEnvelopeSignaturesItems0) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSig(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContentEnvelopeSignaturesItems0) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContentEnvelopeSignaturesItems0) validateSig(formats strfmt.Registry) error {
-
- if err := validate.Required("sig", "body", m.Sig); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this intoto v002 schema content envelope signatures items0 based on context it is used
-func (m *IntotoV002SchemaContentEnvelopeSignaturesItems0) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV002SchemaContentEnvelopeSignaturesItems0) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV002SchemaContentEnvelopeSignaturesItems0) UnmarshalBinary(b []byte) error {
- var res IntotoV002SchemaContentEnvelopeSignaturesItems0
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV002SchemaContentHash Specifies the hash algorithm and value encompassing the entire signed envelope
-//
-// swagger:model IntotoV002SchemaContentHash
-type IntotoV002SchemaContentHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the archive
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this intoto v002 schema content hash
-func (m *IntotoV002SchemaContentHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var intotoV002SchemaContentHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- intotoV002SchemaContentHashTypeAlgorithmPropEnum = append(intotoV002SchemaContentHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // IntotoV002SchemaContentHashAlgorithmSha256 captures enum value "sha256"
- IntotoV002SchemaContentHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *IntotoV002SchemaContentHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, intotoV002SchemaContentHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContentHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("content"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContentHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v002 schema content hash based on the context it is used
-func (m *IntotoV002SchemaContentHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV002SchemaContentHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV002SchemaContentHash) UnmarshalBinary(b []byte) error {
- var res IntotoV002SchemaContentHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// IntotoV002SchemaContentPayloadHash Specifies the hash algorithm and value covering the payload within the DSSE envelope
-//
-// swagger:model IntotoV002SchemaContentPayloadHash
-type IntotoV002SchemaContentPayloadHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value of the payload
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this intoto v002 schema content payload hash
-func (m *IntotoV002SchemaContentPayloadHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var intotoV002SchemaContentPayloadHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- intotoV002SchemaContentPayloadHashTypeAlgorithmPropEnum = append(intotoV002SchemaContentPayloadHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // IntotoV002SchemaContentPayloadHashAlgorithmSha256 captures enum value "sha256"
- IntotoV002SchemaContentPayloadHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *IntotoV002SchemaContentPayloadHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, intotoV002SchemaContentPayloadHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *IntotoV002SchemaContentPayloadHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"payloadHash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("content"+"."+"payloadHash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *IntotoV002SchemaContentPayloadHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("content"+"."+"payloadHash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this intoto v002 schema content payload hash based on the context it is used
-func (m *IntotoV002SchemaContentPayloadHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *IntotoV002SchemaContentPayloadHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *IntotoV002SchemaContentPayloadHash) UnmarshalBinary(b []byte) error {
- var res IntotoV002SchemaContentPayloadHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/jar.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/jar.go
deleted file mode 100644
index 3df3d21b8a..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/jar.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Jar Java Archive (JAR)
-//
-// swagger:model jar
-type Jar struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec JarSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Jar) Kind() string {
- return "jar"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Jar) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Jar) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec JarSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Jar
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Jar) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec JarSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this jar
-func (m *Jar) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Jar) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Jar) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this jar based on the context it is used
-func (m *Jar) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Jar) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Jar) UnmarshalBinary(b []byte) error {
- var res Jar
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/jar_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/jar_schema.go
deleted file mode 100644
index e7b9a590ed..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/jar_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// JarSchema JAR Schema
-//
-// # Schema for JAR objects
-//
-// swagger:model jarSchema
-type JarSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/jar_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/jar_v001_schema.go
deleted file mode 100644
index 2d741f3c52..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/jar_v001_schema.go
+++ /dev/null
@@ -1,569 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// JarV001Schema JAR v0.0.1 Schema
-//
-// # Schema for JAR entries
-//
-// swagger:model jarV001Schema
-type JarV001Schema struct {
-
- // archive
- // Required: true
- Archive *JarV001SchemaArchive `json:"archive"`
-
- // signature
- Signature *JarV001SchemaSignature `json:"signature,omitempty"`
-}
-
-// Validate validates this jar v001 schema
-func (m *JarV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateArchive(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignature(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001Schema) validateArchive(formats strfmt.Registry) error {
-
- if err := validate.Required("archive", "body", m.Archive); err != nil {
- return err
- }
-
- if m.Archive != nil {
- if err := m.Archive.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("archive")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("archive")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *JarV001Schema) validateSignature(formats strfmt.Registry) error {
- if swag.IsZero(m.Signature) { // not required
- return nil
- }
-
- if m.Signature != nil {
- if err := m.Signature.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this jar v001 schema based on the context it is used
-func (m *JarV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateArchive(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateSignature(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001Schema) contextValidateArchive(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Archive != nil {
-
- if err := m.Archive.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("archive")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("archive")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *JarV001Schema) contextValidateSignature(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Signature != nil {
-
- if swag.IsZero(m.Signature) { // not required
- return nil
- }
-
- if err := m.Signature.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *JarV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *JarV001Schema) UnmarshalBinary(b []byte) error {
- var res JarV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// JarV001SchemaArchive Information about the archive associated with the entry
-//
-// swagger:model JarV001SchemaArchive
-type JarV001SchemaArchive struct {
-
- // Specifies the archive inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // hash
- Hash *JarV001SchemaArchiveHash `json:"hash,omitempty"`
-}
-
-// Validate validates this jar v001 schema archive
-func (m *JarV001SchemaArchive) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001SchemaArchive) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("archive" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("archive" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this jar v001 schema archive based on the context it is used
-func (m *JarV001SchemaArchive) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001SchemaArchive) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("archive" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("archive" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *JarV001SchemaArchive) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *JarV001SchemaArchive) UnmarshalBinary(b []byte) error {
- var res JarV001SchemaArchive
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// JarV001SchemaArchiveHash Specifies the hash algorithm and value encompassing the entire signed archive
-//
-// swagger:model JarV001SchemaArchiveHash
-type JarV001SchemaArchiveHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the archive
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this jar v001 schema archive hash
-func (m *JarV001SchemaArchiveHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var jarV001SchemaArchiveHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- jarV001SchemaArchiveHashTypeAlgorithmPropEnum = append(jarV001SchemaArchiveHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // JarV001SchemaArchiveHashAlgorithmSha256 captures enum value "sha256"
- JarV001SchemaArchiveHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *JarV001SchemaArchiveHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, jarV001SchemaArchiveHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *JarV001SchemaArchiveHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("archive"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("archive"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *JarV001SchemaArchiveHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("archive"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this jar v001 schema archive hash based on context it is used
-func (m *JarV001SchemaArchiveHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *JarV001SchemaArchiveHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *JarV001SchemaArchiveHash) UnmarshalBinary(b []byte) error {
- var res JarV001SchemaArchiveHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// JarV001SchemaSignature Information about the included signature in the JAR file
-//
-// swagger:model JarV001SchemaSignature
-type JarV001SchemaSignature struct {
-
- // Specifies the PKCS7 signature embedded within the JAR file
- // Required: true
- // Read Only: true
- // Format: byte
- Content strfmt.Base64 `json:"content"`
-
- // public key
- // Required: true
- PublicKey *JarV001SchemaSignaturePublicKey `json:"publicKey"`
-}
-
-// Validate validates this jar v001 schema signature
-func (m *JarV001SchemaSignature) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001SchemaSignature) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"content", "body", strfmt.Base64(m.Content)); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *JarV001SchemaSignature) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature" + "." + "publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature" + "." + "publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this jar v001 schema signature based on the context it is used
-func (m *JarV001SchemaSignature) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateContent(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001SchemaSignature) contextValidateContent(ctx context.Context, formats strfmt.Registry) error {
-
- if err := validate.ReadOnly(ctx, "signature"+"."+"content", "body", strfmt.Base64(m.Content)); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *JarV001SchemaSignature) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature" + "." + "publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature" + "." + "publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *JarV001SchemaSignature) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *JarV001SchemaSignature) UnmarshalBinary(b []byte) error {
- var res JarV001SchemaSignature
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// JarV001SchemaSignaturePublicKey The X509 certificate containing the public key JAR which verifies the signature of the JAR
-//
-// swagger:model JarV001SchemaSignaturePublicKey
-type JarV001SchemaSignaturePublicKey struct {
-
- // Specifies the content of the X509 certificate containing the public key used to verify the signature
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this jar v001 schema signature public key
-func (m *JarV001SchemaSignaturePublicKey) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *JarV001SchemaSignaturePublicKey) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"publicKey"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this jar v001 schema signature public key based on the context it is used
-func (m *JarV001SchemaSignaturePublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *JarV001SchemaSignaturePublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *JarV001SchemaSignaturePublicKey) UnmarshalBinary(b []byte) error {
- var res JarV001SchemaSignaturePublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/log_entry.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/log_entry.go
deleted file mode 100644
index ee32ded414..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/log_entry.go
+++ /dev/null
@@ -1,445 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// LogEntry log entry
-//
-// swagger:model LogEntry
-type LogEntry map[string]LogEntryAnon
-
-// Validate validates this log entry
-func (m LogEntry) Validate(formats strfmt.Registry) error {
- var res []error
-
- for k := range m {
-
- if swag.IsZero(m[k]) { // not required
- continue
- }
- if val, ok := m[k]; ok {
- if err := val.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName(k)
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName(k)
- }
- return err
- }
- }
-
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// ContextValidate validate this log entry based on the context it is used
-func (m LogEntry) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- for k := range m {
-
- if val, ok := m[k]; ok {
- if err := val.ContextValidate(ctx, formats); err != nil {
- return err
- }
- }
-
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// LogEntryAnon log entry anon
-//
-// swagger:model LogEntryAnon
-type LogEntryAnon struct {
-
- // attestation
- Attestation *LogEntryAnonAttestation `json:"attestation,omitempty"`
-
- // body
- // Required: true
- Body interface{} `json:"body"`
-
- // The time the entry was added to the log as a Unix timestamp in seconds
- // Required: true
- IntegratedTime *int64 `json:"integratedTime"`
-
- // This is the SHA256 hash of the DER-encoded public key for the log at the time the entry was included in the log
- // Required: true
- // Pattern: ^[0-9a-fA-F]{64}$
- LogID *string `json:"logID"`
-
- // log index
- // Required: true
- // Minimum: 0
- LogIndex *int64 `json:"logIndex"`
-
- // verification
- Verification *LogEntryAnonVerification `json:"verification,omitempty"`
-}
-
-// Validate validates this log entry anon
-func (m *LogEntryAnon) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAttestation(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateBody(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateIntegratedTime(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateLogID(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateLogIndex(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateVerification(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *LogEntryAnon) validateAttestation(formats strfmt.Registry) error {
- if swag.IsZero(m.Attestation) { // not required
- return nil
- }
-
- if m.Attestation != nil {
- if err := m.Attestation.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("attestation")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("attestation")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *LogEntryAnon) validateBody(formats strfmt.Registry) error {
-
- if m.Body == nil {
- return errors.Required("body", "body", nil)
- }
-
- return nil
-}
-
-func (m *LogEntryAnon) validateIntegratedTime(formats strfmt.Registry) error {
-
- if err := validate.Required("integratedTime", "body", m.IntegratedTime); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *LogEntryAnon) validateLogID(formats strfmt.Registry) error {
-
- if err := validate.Required("logID", "body", m.LogID); err != nil {
- return err
- }
-
- if err := validate.Pattern("logID", "body", *m.LogID, `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *LogEntryAnon) validateLogIndex(formats strfmt.Registry) error {
-
- if err := validate.Required("logIndex", "body", m.LogIndex); err != nil {
- return err
- }
-
- if err := validate.MinimumInt("logIndex", "body", *m.LogIndex, 0, false); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *LogEntryAnon) validateVerification(formats strfmt.Registry) error {
- if swag.IsZero(m.Verification) { // not required
- return nil
- }
-
- if m.Verification != nil {
- if err := m.Verification.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("verification")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("verification")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this log entry anon based on the context it is used
-func (m *LogEntryAnon) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateAttestation(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateVerification(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *LogEntryAnon) contextValidateAttestation(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Attestation != nil {
-
- if swag.IsZero(m.Attestation) { // not required
- return nil
- }
-
- if err := m.Attestation.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("attestation")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("attestation")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *LogEntryAnon) contextValidateVerification(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Verification != nil {
-
- if swag.IsZero(m.Verification) { // not required
- return nil
- }
-
- if err := m.Verification.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("verification")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("verification")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *LogEntryAnon) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *LogEntryAnon) UnmarshalBinary(b []byte) error {
- var res LogEntryAnon
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// LogEntryAnonAttestation log entry anon attestation
-//
-// swagger:model LogEntryAnonAttestation
-type LogEntryAnonAttestation struct {
-
- // data
- // Format: byte
- Data strfmt.Base64 `json:"data,omitempty"`
-}
-
-// Validate validates this log entry anon attestation
-func (m *LogEntryAnonAttestation) Validate(formats strfmt.Registry) error {
- return nil
-}
-
-// ContextValidate validates this log entry anon attestation based on context it is used
-func (m *LogEntryAnonAttestation) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *LogEntryAnonAttestation) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *LogEntryAnonAttestation) UnmarshalBinary(b []byte) error {
- var res LogEntryAnonAttestation
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// LogEntryAnonVerification log entry anon verification
-//
-// swagger:model LogEntryAnonVerification
-type LogEntryAnonVerification struct {
-
- // inclusion proof
- InclusionProof *InclusionProof `json:"inclusionProof,omitempty"`
-
- // Signature over the logID, logIndex, body and integratedTime.
- // Format: byte
- SignedEntryTimestamp strfmt.Base64 `json:"signedEntryTimestamp,omitempty"`
-}
-
-// Validate validates this log entry anon verification
-func (m *LogEntryAnonVerification) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateInclusionProof(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *LogEntryAnonVerification) validateInclusionProof(formats strfmt.Registry) error {
- if swag.IsZero(m.InclusionProof) { // not required
- return nil
- }
-
- if m.InclusionProof != nil {
- if err := m.InclusionProof.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("verification" + "." + "inclusionProof")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("verification" + "." + "inclusionProof")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this log entry anon verification based on the context it is used
-func (m *LogEntryAnonVerification) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateInclusionProof(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *LogEntryAnonVerification) contextValidateInclusionProof(ctx context.Context, formats strfmt.Registry) error {
-
- if m.InclusionProof != nil {
-
- if swag.IsZero(m.InclusionProof) { // not required
- return nil
- }
-
- if err := m.InclusionProof.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("verification" + "." + "inclusionProof")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("verification" + "." + "inclusionProof")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *LogEntryAnonVerification) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *LogEntryAnonVerification) UnmarshalBinary(b []byte) error {
- var res LogEntryAnonVerification
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/log_info.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/log_info.go
deleted file mode 100644
index cb57b27f51..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/log_info.go
+++ /dev/null
@@ -1,221 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// LogInfo log info
-//
-// swagger:model LogInfo
-type LogInfo struct {
-
- // inactive shards
- InactiveShards []*InactiveShardLogInfo `json:"inactiveShards"`
-
- // The current hash value stored at the root of the merkle tree
- // Required: true
- // Pattern: ^[0-9a-fA-F]{64}$
- RootHash *string `json:"rootHash"`
-
- // The current signed tree head
- // Required: true
- SignedTreeHead *string `json:"signedTreeHead"`
-
- // The current treeID
- // Required: true
- // Pattern: ^[0-9]+$
- TreeID *string `json:"treeID"`
-
- // The current number of nodes in the merkle tree
- // Required: true
- // Minimum: 1
- TreeSize *int64 `json:"treeSize"`
-}
-
-// Validate validates this log info
-func (m *LogInfo) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateInactiveShards(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateRootHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignedTreeHead(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateTreeID(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateTreeSize(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *LogInfo) validateInactiveShards(formats strfmt.Registry) error {
- if swag.IsZero(m.InactiveShards) { // not required
- return nil
- }
-
- for i := 0; i < len(m.InactiveShards); i++ {
- if swag.IsZero(m.InactiveShards[i]) { // not required
- continue
- }
-
- if m.InactiveShards[i] != nil {
- if err := m.InactiveShards[i].Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("inactiveShards" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("inactiveShards" + "." + strconv.Itoa(i))
- }
- return err
- }
- }
-
- }
-
- return nil
-}
-
-func (m *LogInfo) validateRootHash(formats strfmt.Registry) error {
-
- if err := validate.Required("rootHash", "body", m.RootHash); err != nil {
- return err
- }
-
- if err := validate.Pattern("rootHash", "body", *m.RootHash, `^[0-9a-fA-F]{64}$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *LogInfo) validateSignedTreeHead(formats strfmt.Registry) error {
-
- if err := validate.Required("signedTreeHead", "body", m.SignedTreeHead); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *LogInfo) validateTreeID(formats strfmt.Registry) error {
-
- if err := validate.Required("treeID", "body", m.TreeID); err != nil {
- return err
- }
-
- if err := validate.Pattern("treeID", "body", *m.TreeID, `^[0-9]+$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *LogInfo) validateTreeSize(formats strfmt.Registry) error {
-
- if err := validate.Required("treeSize", "body", m.TreeSize); err != nil {
- return err
- }
-
- if err := validate.MinimumInt("treeSize", "body", *m.TreeSize, 1, false); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this log info based on the context it is used
-func (m *LogInfo) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateInactiveShards(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *LogInfo) contextValidateInactiveShards(ctx context.Context, formats strfmt.Registry) error {
-
- for i := 0; i < len(m.InactiveShards); i++ {
-
- if m.InactiveShards[i] != nil {
-
- if swag.IsZero(m.InactiveShards[i]) { // not required
- return nil
- }
-
- if err := m.InactiveShards[i].ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("inactiveShards" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("inactiveShards" + "." + strconv.Itoa(i))
- }
- return err
- }
- }
-
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *LogInfo) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *LogInfo) UnmarshalBinary(b []byte) error {
- var res LogInfo
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/proposed_entry.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/proposed_entry.go
deleted file mode 100644
index 5b734a5fff..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/proposed_entry.go
+++ /dev/null
@@ -1,195 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
- "io"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/validate"
-)
-
-// ProposedEntry proposed entry
-//
-// swagger:discriminator ProposedEntry kind
-type ProposedEntry interface {
- runtime.Validatable
- runtime.ContextValidatable
-
- // kind
- // Required: true
- Kind() string
- SetKind(string)
-
- // AdditionalProperties in base type shoud be handled just like regular properties
- // At this moment, the base type property is pushed down to the subtype
-}
-
-type proposedEntry struct {
- kindField string
-}
-
-// Kind gets the kind of this polymorphic type
-func (m *proposedEntry) Kind() string {
- return "ProposedEntry"
-}
-
-// SetKind sets the kind of this polymorphic type
-func (m *proposedEntry) SetKind(val string) {
-}
-
-// UnmarshalProposedEntrySlice unmarshals polymorphic slices of ProposedEntry
-func UnmarshalProposedEntrySlice(reader io.Reader, consumer runtime.Consumer) ([]ProposedEntry, error) {
- var elements []json.RawMessage
- if err := consumer.Consume(reader, &elements); err != nil {
- return nil, err
- }
-
- var result []ProposedEntry
- for _, element := range elements {
- obj, err := unmarshalProposedEntry(element, consumer)
- if err != nil {
- return nil, err
- }
- result = append(result, obj)
- }
- return result, nil
-}
-
-// UnmarshalProposedEntry unmarshals polymorphic ProposedEntry
-func UnmarshalProposedEntry(reader io.Reader, consumer runtime.Consumer) (ProposedEntry, error) {
- // we need to read this twice, so first into a buffer
- data, err := io.ReadAll(reader)
- if err != nil {
- return nil, err
- }
- return unmarshalProposedEntry(data, consumer)
-}
-
-func unmarshalProposedEntry(data []byte, consumer runtime.Consumer) (ProposedEntry, error) {
- buf := bytes.NewBuffer(data)
- buf2 := bytes.NewBuffer(data)
-
- // the first time this is read is to fetch the value of the kind property.
- var getType struct {
- Kind string `json:"kind"`
- }
- if err := consumer.Consume(buf, &getType); err != nil {
- return nil, err
- }
-
- if err := validate.RequiredString("kind", "body", getType.Kind); err != nil {
- return nil, err
- }
-
- // The value of kind is used to determine which type to create and unmarshal the data into
- switch getType.Kind {
- case "ProposedEntry":
- var result proposedEntry
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "alpine":
- var result Alpine
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "cose":
- var result Cose
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "dsse":
- var result DSSE
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "hashedrekord":
- var result Hashedrekord
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "helm":
- var result Helm
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "intoto":
- var result Intoto
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "jar":
- var result Jar
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "rekord":
- var result Rekord
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "rfc3161":
- var result Rfc3161
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "rpm":
- var result Rpm
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- case "tuf":
- var result TUF
- if err := consumer.Consume(buf2, &result); err != nil {
- return nil, err
- }
- return &result, nil
- }
- return nil, errors.New(422, "invalid kind value: %q", getType.Kind)
-}
-
-// Validate validates this proposed entry
-func (m *proposedEntry) Validate(formats strfmt.Registry) error {
- return nil
-}
-
-// ContextValidate validates this proposed entry based on context it is used
-func (m *proposedEntry) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord.go
deleted file mode 100644
index 81c8ff0545..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Rekord Rekord object
-//
-// swagger:model rekord
-type Rekord struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec RekordSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Rekord) Kind() string {
- return "rekord"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Rekord) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Rekord) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec RekordSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Rekord
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Rekord) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec RekordSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this rekord
-func (m *Rekord) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Rekord) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Rekord) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this rekord based on the context it is used
-func (m *Rekord) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Rekord) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Rekord) UnmarshalBinary(b []byte) error {
- var res Rekord
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord_schema.go
deleted file mode 100644
index e85442ae97..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// RekordSchema Rekor Schema
-//
-// # Schema for Rekord objects
-//
-// swagger:model rekordSchema
-type RekordSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord_v001_schema.go
deleted file mode 100644
index aaaad9d7b4..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rekord_v001_schema.go
+++ /dev/null
@@ -1,611 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// RekordV001Schema Rekor v0.0.1 Schema
-//
-// # Schema for Rekord object
-//
-// swagger:model rekordV001Schema
-type RekordV001Schema struct {
-
- // data
- // Required: true
- Data *RekordV001SchemaData `json:"data"`
-
- // signature
- // Required: true
- Signature *RekordV001SchemaSignature `json:"signature"`
-}
-
-// Validate validates this rekord v001 schema
-func (m *RekordV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateData(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSignature(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001Schema) validateData(formats strfmt.Registry) error {
-
- if err := validate.Required("data", "body", m.Data); err != nil {
- return err
- }
-
- if m.Data != nil {
- if err := m.Data.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *RekordV001Schema) validateSignature(formats strfmt.Registry) error {
-
- if err := validate.Required("signature", "body", m.Signature); err != nil {
- return err
- }
-
- if m.Signature != nil {
- if err := m.Signature.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this rekord v001 schema based on the context it is used
-func (m *RekordV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateData(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateSignature(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001Schema) contextValidateData(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Data != nil {
-
- if err := m.Data.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *RekordV001Schema) contextValidateSignature(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Signature != nil {
-
- if err := m.Signature.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RekordV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RekordV001Schema) UnmarshalBinary(b []byte) error {
- var res RekordV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RekordV001SchemaData Information about the content associated with the entry
-//
-// swagger:model RekordV001SchemaData
-type RekordV001SchemaData struct {
-
- // Specifies the content inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // hash
- Hash *RekordV001SchemaDataHash `json:"hash,omitempty"`
-}
-
-// Validate validates this rekord v001 schema data
-func (m *RekordV001SchemaData) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001SchemaData) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this rekord v001 schema data based on the context it is used
-func (m *RekordV001SchemaData) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001SchemaData) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("data" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("data" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RekordV001SchemaData) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RekordV001SchemaData) UnmarshalBinary(b []byte) error {
- var res RekordV001SchemaData
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RekordV001SchemaDataHash Specifies the hash algorithm and value for the content
-//
-// swagger:model RekordV001SchemaDataHash
-type RekordV001SchemaDataHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the content
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this rekord v001 schema data hash
-func (m *RekordV001SchemaDataHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var rekordV001SchemaDataHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- rekordV001SchemaDataHashTypeAlgorithmPropEnum = append(rekordV001SchemaDataHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // RekordV001SchemaDataHashAlgorithmSha256 captures enum value "sha256"
- RekordV001SchemaDataHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *RekordV001SchemaDataHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, rekordV001SchemaDataHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *RekordV001SchemaDataHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("data"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *RekordV001SchemaDataHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("data"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validate this rekord v001 schema data hash based on the context it is used
-func (m *RekordV001SchemaDataHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RekordV001SchemaDataHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RekordV001SchemaDataHash) UnmarshalBinary(b []byte) error {
- var res RekordV001SchemaDataHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RekordV001SchemaSignature Information about the detached signature associated with the entry
-//
-// swagger:model RekordV001SchemaSignature
-type RekordV001SchemaSignature struct {
-
- // Specifies the content of the signature inline within the document
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-
- // Specifies the format of the signature
- // Required: true
- // Enum: ["pgp","minisign","x509","ssh"]
- Format *string `json:"format"`
-
- // public key
- // Required: true
- PublicKey *RekordV001SchemaSignaturePublicKey `json:"publicKey"`
-}
-
-// Validate validates this rekord v001 schema signature
-func (m *RekordV001SchemaSignature) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateFormat(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001SchemaSignature) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-var rekordV001SchemaSignatureTypeFormatPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["pgp","minisign","x509","ssh"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- rekordV001SchemaSignatureTypeFormatPropEnum = append(rekordV001SchemaSignatureTypeFormatPropEnum, v)
- }
-}
-
-const (
-
- // RekordV001SchemaSignatureFormatPgp captures enum value "pgp"
- RekordV001SchemaSignatureFormatPgp string = "pgp"
-
- // RekordV001SchemaSignatureFormatMinisign captures enum value "minisign"
- RekordV001SchemaSignatureFormatMinisign string = "minisign"
-
- // RekordV001SchemaSignatureFormatX509 captures enum value "x509"
- RekordV001SchemaSignatureFormatX509 string = "x509"
-
- // RekordV001SchemaSignatureFormatSSH captures enum value "ssh"
- RekordV001SchemaSignatureFormatSSH string = "ssh"
-)
-
-// prop value enum
-func (m *RekordV001SchemaSignature) validateFormatEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, rekordV001SchemaSignatureTypeFormatPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *RekordV001SchemaSignature) validateFormat(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"format", "body", m.Format); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateFormatEnum("signature"+"."+"format", "body", *m.Format); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *RekordV001SchemaSignature) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature" + "." + "publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature" + "." + "publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this rekord v001 schema signature based on the context it is used
-func (m *RekordV001SchemaSignature) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001SchemaSignature) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("signature" + "." + "publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("signature" + "." + "publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RekordV001SchemaSignature) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RekordV001SchemaSignature) UnmarshalBinary(b []byte) error {
- var res RekordV001SchemaSignature
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RekordV001SchemaSignaturePublicKey The public key that can verify the signature
-//
-// swagger:model RekordV001SchemaSignaturePublicKey
-type RekordV001SchemaSignaturePublicKey struct {
-
- // Specifies the content of the public key inline within the document
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this rekord v001 schema signature public key
-func (m *RekordV001SchemaSignaturePublicKey) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RekordV001SchemaSignaturePublicKey) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("signature"+"."+"publicKey"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this rekord v001 schema signature public key based on context it is used
-func (m *RekordV001SchemaSignaturePublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RekordV001SchemaSignaturePublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RekordV001SchemaSignaturePublicKey) UnmarshalBinary(b []byte) error {
- var res RekordV001SchemaSignaturePublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161.go
deleted file mode 100644
index ef8d42e7a2..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Rfc3161 RFC3161 Timestamp
-//
-// swagger:model rfc3161
-type Rfc3161 struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec Rfc3161Schema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Rfc3161) Kind() string {
- return "rfc3161"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Rfc3161) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Rfc3161) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec Rfc3161Schema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Rfc3161
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Rfc3161) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec Rfc3161Schema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this rfc3161
-func (m *Rfc3161) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Rfc3161) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Rfc3161) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this rfc3161 based on the context it is used
-func (m *Rfc3161) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Rfc3161) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Rfc3161) UnmarshalBinary(b []byte) error {
- var res Rfc3161
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161_schema.go
deleted file mode 100644
index 826013a28d..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// Rfc3161Schema Timestamp Schema
-//
-// # Schema for RFC 3161 timestamp objects
-//
-// swagger:model rfc3161Schema
-type Rfc3161Schema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161_v001_schema.go
deleted file mode 100644
index c3a50c8492..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rfc3161_v001_schema.go
+++ /dev/null
@@ -1,183 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Rfc3161V001Schema Timestamp v0.0.1 Schema
-//
-// # Schema for RFC3161 entries
-//
-// swagger:model rfc3161V001Schema
-type Rfc3161V001Schema struct {
-
- // tsr
- // Required: true
- Tsr *Rfc3161V001SchemaTsr `json:"tsr"`
-}
-
-// Validate validates this rfc3161 v001 schema
-func (m *Rfc3161V001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateTsr(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Rfc3161V001Schema) validateTsr(formats strfmt.Registry) error {
-
- if err := validate.Required("tsr", "body", m.Tsr); err != nil {
- return err
- }
-
- if m.Tsr != nil {
- if err := m.Tsr.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("tsr")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("tsr")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this rfc3161 v001 schema based on the context it is used
-func (m *Rfc3161V001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateTsr(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Rfc3161V001Schema) contextValidateTsr(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Tsr != nil {
-
- if err := m.Tsr.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("tsr")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("tsr")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Rfc3161V001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Rfc3161V001Schema) UnmarshalBinary(b []byte) error {
- var res Rfc3161V001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// Rfc3161V001SchemaTsr Information about the tsr file associated with the entry
-//
-// swagger:model Rfc3161V001SchemaTsr
-type Rfc3161V001SchemaTsr struct {
-
- // Specifies the tsr file content inline within the document
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this rfc3161 v001 schema tsr
-func (m *Rfc3161V001SchemaTsr) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Rfc3161V001SchemaTsr) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("tsr"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this rfc3161 v001 schema tsr based on context it is used
-func (m *Rfc3161V001SchemaTsr) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Rfc3161V001SchemaTsr) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Rfc3161V001SchemaTsr) UnmarshalBinary(b []byte) error {
- var res Rfc3161V001SchemaTsr
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm.go
deleted file mode 100644
index 8b1f10c77e..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// Rpm RPM package
-//
-// swagger:model rpm
-type Rpm struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec RpmSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *Rpm) Kind() string {
- return "rpm"
-}
-
-// SetKind sets the kind of this subtype
-func (m *Rpm) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *Rpm) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec RpmSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result Rpm
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m Rpm) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec RpmSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this rpm
-func (m *Rpm) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *Rpm) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *Rpm) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this rpm based on the context it is used
-func (m *Rpm) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *Rpm) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *Rpm) UnmarshalBinary(b []byte) error {
- var res Rpm
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm_schema.go
deleted file mode 100644
index 5cb378366f..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// RpmSchema RPM Schema
-//
-// # Schema for RPM objects
-//
-// swagger:model rpmSchema
-type RpmSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm_v001_schema.go
deleted file mode 100644
index 394eece414..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/rpm_v001_schema.go
+++ /dev/null
@@ -1,450 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// RpmV001Schema RPM v0.0.1 Schema
-//
-// # Schema for RPM entries
-//
-// swagger:model rpmV001Schema
-type RpmV001Schema struct {
-
- // package
- // Required: true
- Package *RpmV001SchemaPackage `json:"package"`
-
- // public key
- // Required: true
- PublicKey *RpmV001SchemaPublicKey `json:"publicKey"`
-}
-
-// Validate validates this rpm v001 schema
-func (m *RpmV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validatePackage(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RpmV001Schema) validatePackage(formats strfmt.Registry) error {
-
- if err := validate.Required("package", "body", m.Package); err != nil {
- return err
- }
-
- if m.Package != nil {
- if err := m.Package.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *RpmV001Schema) validatePublicKey(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey", "body", m.PublicKey); err != nil {
- return err
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this rpm v001 schema based on the context it is used
-func (m *RpmV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidatePackage(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RpmV001Schema) contextValidatePackage(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Package != nil {
-
- if err := m.Package.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *RpmV001Schema) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RpmV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RpmV001Schema) UnmarshalBinary(b []byte) error {
- var res RpmV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RpmV001SchemaPackage Information about the package associated with the entry
-//
-// swagger:model RpmV001SchemaPackage
-type RpmV001SchemaPackage struct {
-
- // Specifies the package inline within the document
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // hash
- Hash *RpmV001SchemaPackageHash `json:"hash,omitempty"`
-
- // Values of the RPM headers
- // Read Only: true
- Headers map[string]string `json:"headers,omitempty"`
-}
-
-// Validate validates this rpm v001 schema package
-func (m *RpmV001SchemaPackage) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RpmV001SchemaPackage) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if m.Hash != nil {
- if err := m.Hash.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this rpm v001 schema package based on the context it is used
-func (m *RpmV001SchemaPackage) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateHash(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateHeaders(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RpmV001SchemaPackage) contextValidateHash(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Hash != nil {
-
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := m.Hash.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("package" + "." + "hash")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("package" + "." + "hash")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *RpmV001SchemaPackage) contextValidateHeaders(ctx context.Context, formats strfmt.Registry) error {
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RpmV001SchemaPackage) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RpmV001SchemaPackage) UnmarshalBinary(b []byte) error {
- var res RpmV001SchemaPackage
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RpmV001SchemaPackageHash Specifies the hash algorithm and value for the package
-//
-// swagger:model RpmV001SchemaPackageHash
-type RpmV001SchemaPackageHash struct {
-
- // The hashing function used to compute the hash value
- // Required: true
- // Enum: ["sha256"]
- Algorithm *string `json:"algorithm"`
-
- // The hash value for the package
- // Required: true
- Value *string `json:"value"`
-}
-
-// Validate validates this rpm v001 schema package hash
-func (m *RpmV001SchemaPackageHash) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAlgorithm(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateValue(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var rpmV001SchemaPackageHashTypeAlgorithmPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["sha256"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- rpmV001SchemaPackageHashTypeAlgorithmPropEnum = append(rpmV001SchemaPackageHashTypeAlgorithmPropEnum, v)
- }
-}
-
-const (
-
- // RpmV001SchemaPackageHashAlgorithmSha256 captures enum value "sha256"
- RpmV001SchemaPackageHashAlgorithmSha256 string = "sha256"
-)
-
-// prop value enum
-func (m *RpmV001SchemaPackageHash) validateAlgorithmEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, rpmV001SchemaPackageHashTypeAlgorithmPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *RpmV001SchemaPackageHash) validateAlgorithm(formats strfmt.Registry) error {
-
- if err := validate.Required("package"+"."+"hash"+"."+"algorithm", "body", m.Algorithm); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateAlgorithmEnum("package"+"."+"hash"+"."+"algorithm", "body", *m.Algorithm); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *RpmV001SchemaPackageHash) validateValue(formats strfmt.Registry) error {
-
- if err := validate.Required("package"+"."+"hash"+"."+"value", "body", m.Value); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this rpm v001 schema package hash based on context it is used
-func (m *RpmV001SchemaPackageHash) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RpmV001SchemaPackageHash) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RpmV001SchemaPackageHash) UnmarshalBinary(b []byte) error {
- var res RpmV001SchemaPackageHash
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// RpmV001SchemaPublicKey The PGP public key that can verify the RPM signature
-//
-// swagger:model RpmV001SchemaPublicKey
-type RpmV001SchemaPublicKey struct {
-
- // Specifies the content of the public key inline within the document
- // Required: true
- // Format: byte
- Content *strfmt.Base64 `json:"content"`
-}
-
-// Validate validates this rpm v001 schema public key
-func (m *RpmV001SchemaPublicKey) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *RpmV001SchemaPublicKey) validateContent(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey"+"."+"content", "body", m.Content); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this rpm v001 schema public key based on context it is used
-func (m *RpmV001SchemaPublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *RpmV001SchemaPublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *RpmV001SchemaPublicKey) UnmarshalBinary(b []byte) error {
- var res RpmV001SchemaPublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/search_index.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/search_index.go
deleted file mode 100644
index 0f66abb5b6..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/search_index.go
+++ /dev/null
@@ -1,341 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// SearchIndex search index
-//
-// swagger:model SearchIndex
-type SearchIndex struct {
-
- // email
- // Format: email
- Email strfmt.Email `json:"email,omitempty"`
-
- // hash
- // Pattern: ^(sha512:)?[0-9a-fA-F]{128}$|^(sha256:)?[0-9a-fA-F]{64}$|^(sha1:)?[0-9a-fA-F]{40}$
- Hash string `json:"hash,omitempty"`
-
- // operator
- // Enum: ["and","or"]
- Operator string `json:"operator,omitempty"`
-
- // public key
- PublicKey *SearchIndexPublicKey `json:"publicKey,omitempty"`
-}
-
-// Validate validates this search index
-func (m *SearchIndex) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateEmail(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateHash(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateOperator(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validatePublicKey(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *SearchIndex) validateEmail(formats strfmt.Registry) error {
- if swag.IsZero(m.Email) { // not required
- return nil
- }
-
- if err := validate.FormatOf("email", "body", "email", m.Email.String(), formats); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *SearchIndex) validateHash(formats strfmt.Registry) error {
- if swag.IsZero(m.Hash) { // not required
- return nil
- }
-
- if err := validate.Pattern("hash", "body", m.Hash, `^(sha512:)?[0-9a-fA-F]{128}$|^(sha256:)?[0-9a-fA-F]{64}$|^(sha1:)?[0-9a-fA-F]{40}$`); err != nil {
- return err
- }
-
- return nil
-}
-
-var searchIndexTypeOperatorPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["and","or"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- searchIndexTypeOperatorPropEnum = append(searchIndexTypeOperatorPropEnum, v)
- }
-}
-
-const (
-
- // SearchIndexOperatorAnd captures enum value "and"
- SearchIndexOperatorAnd string = "and"
-
- // SearchIndexOperatorOr captures enum value "or"
- SearchIndexOperatorOr string = "or"
-)
-
-// prop value enum
-func (m *SearchIndex) validateOperatorEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, searchIndexTypeOperatorPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *SearchIndex) validateOperator(formats strfmt.Registry) error {
- if swag.IsZero(m.Operator) { // not required
- return nil
- }
-
- // value enum
- if err := m.validateOperatorEnum("operator", "body", m.Operator); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *SearchIndex) validatePublicKey(formats strfmt.Registry) error {
- if swag.IsZero(m.PublicKey) { // not required
- return nil
- }
-
- if m.PublicKey != nil {
- if err := m.PublicKey.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this search index based on the context it is used
-func (m *SearchIndex) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidatePublicKey(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *SearchIndex) contextValidatePublicKey(ctx context.Context, formats strfmt.Registry) error {
-
- if m.PublicKey != nil {
-
- if swag.IsZero(m.PublicKey) { // not required
- return nil
- }
-
- if err := m.PublicKey.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("publicKey")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("publicKey")
- }
- return err
- }
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *SearchIndex) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *SearchIndex) UnmarshalBinary(b []byte) error {
- var res SearchIndex
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// SearchIndexPublicKey search index public key
-//
-// swagger:model SearchIndexPublicKey
-type SearchIndexPublicKey struct {
-
- // content
- // Format: byte
- Content strfmt.Base64 `json:"content,omitempty"`
-
- // format
- // Required: true
- // Enum: ["pgp","x509","minisign","ssh","tuf"]
- Format *string `json:"format"`
-
- // url
- // Format: uri
- URL strfmt.URI `json:"url,omitempty"`
-}
-
-// Validate validates this search index public key
-func (m *SearchIndexPublicKey) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateFormat(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateURL(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-var searchIndexPublicKeyTypeFormatPropEnum []interface{}
-
-func init() {
- var res []string
- if err := json.Unmarshal([]byte(`["pgp","x509","minisign","ssh","tuf"]`), &res); err != nil {
- panic(err)
- }
- for _, v := range res {
- searchIndexPublicKeyTypeFormatPropEnum = append(searchIndexPublicKeyTypeFormatPropEnum, v)
- }
-}
-
-const (
-
- // SearchIndexPublicKeyFormatPgp captures enum value "pgp"
- SearchIndexPublicKeyFormatPgp string = "pgp"
-
- // SearchIndexPublicKeyFormatX509 captures enum value "x509"
- SearchIndexPublicKeyFormatX509 string = "x509"
-
- // SearchIndexPublicKeyFormatMinisign captures enum value "minisign"
- SearchIndexPublicKeyFormatMinisign string = "minisign"
-
- // SearchIndexPublicKeyFormatSSH captures enum value "ssh"
- SearchIndexPublicKeyFormatSSH string = "ssh"
-
- // SearchIndexPublicKeyFormatTUF captures enum value "tuf"
- SearchIndexPublicKeyFormatTUF string = "tuf"
-)
-
-// prop value enum
-func (m *SearchIndexPublicKey) validateFormatEnum(path, location string, value string) error {
- if err := validate.EnumCase(path, location, value, searchIndexPublicKeyTypeFormatPropEnum, true); err != nil {
- return err
- }
- return nil
-}
-
-func (m *SearchIndexPublicKey) validateFormat(formats strfmt.Registry) error {
-
- if err := validate.Required("publicKey"+"."+"format", "body", m.Format); err != nil {
- return err
- }
-
- // value enum
- if err := m.validateFormatEnum("publicKey"+"."+"format", "body", *m.Format); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *SearchIndexPublicKey) validateURL(formats strfmt.Registry) error {
- if swag.IsZero(m.URL) { // not required
- return nil
- }
-
- if err := validate.FormatOf("publicKey"+"."+"url", "body", "uri", m.URL.String(), formats); err != nil {
- return err
- }
-
- return nil
-}
-
-// ContextValidate validates this search index public key based on context it is used
-func (m *SearchIndexPublicKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *SearchIndexPublicKey) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *SearchIndexPublicKey) UnmarshalBinary(b []byte) error {
- var res SearchIndexPublicKey
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/search_log_query.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/search_log_query.go
deleted file mode 100644
index 425ec8b348..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/search_log_query.go
+++ /dev/null
@@ -1,297 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
- "io"
- "strconv"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/runtime"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// SearchLogQuery search log query
-//
-// swagger:model SearchLogQuery
-type SearchLogQuery struct {
- entriesField []ProposedEntry
-
- // entry u UI ds
- // Max Items: 10
- // Min Items: 1
- EntryUUIDs []string `json:"entryUUIDs"`
-
- // log indexes
- // Max Items: 10
- // Min Items: 1
- LogIndexes []*int64 `json:"logIndexes"`
-}
-
-// Entries gets the entries of this base type
-func (m *SearchLogQuery) Entries() []ProposedEntry {
- return m.entriesField
-}
-
-// SetEntries sets the entries of this base type
-func (m *SearchLogQuery) SetEntries(val []ProposedEntry) {
- m.entriesField = val
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *SearchLogQuery) UnmarshalJSON(raw []byte) error {
- var data struct {
- Entries json.RawMessage `json:"entries"`
-
- EntryUUIDs []string `json:"entryUUIDs"`
-
- LogIndexes []*int64 `json:"logIndexes"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var propEntries []ProposedEntry
- if string(data.Entries) != "null" {
- entries, err := UnmarshalProposedEntrySlice(bytes.NewBuffer(data.Entries), runtime.JSONConsumer())
- if err != nil && err != io.EOF {
- return err
- }
- propEntries = entries
- }
-
- var result SearchLogQuery
-
- // entries
- result.entriesField = propEntries
-
- // entryUUIDs
- result.EntryUUIDs = data.EntryUUIDs
-
- // logIndexes
- result.LogIndexes = data.LogIndexes
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m SearchLogQuery) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
- EntryUUIDs []string `json:"entryUUIDs"`
-
- LogIndexes []*int64 `json:"logIndexes"`
- }{
-
- EntryUUIDs: m.EntryUUIDs,
-
- LogIndexes: m.LogIndexes,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Entries []ProposedEntry `json:"entries"`
- }{
-
- Entries: m.entriesField,
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this search log query
-func (m *SearchLogQuery) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateEntries(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateEntryUUIDs(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateLogIndexes(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *SearchLogQuery) validateEntries(formats strfmt.Registry) error {
- if swag.IsZero(m.Entries()) { // not required
- return nil
- }
-
- iEntriesSize := int64(len(m.Entries()))
-
- if err := validate.MinItems("entries", "body", iEntriesSize, 1); err != nil {
- return err
- }
-
- if err := validate.MaxItems("entries", "body", iEntriesSize, 10); err != nil {
- return err
- }
-
- for i := 0; i < len(m.Entries()); i++ {
-
- if err := m.entriesField[i].Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("entries" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("entries" + "." + strconv.Itoa(i))
- }
- return err
- }
-
- }
-
- return nil
-}
-
-func (m *SearchLogQuery) validateEntryUUIDs(formats strfmt.Registry) error {
- if swag.IsZero(m.EntryUUIDs) { // not required
- return nil
- }
-
- iEntryUUIDsSize := int64(len(m.EntryUUIDs))
-
- if err := validate.MinItems("entryUUIDs", "body", iEntryUUIDsSize, 1); err != nil {
- return err
- }
-
- if err := validate.MaxItems("entryUUIDs", "body", iEntryUUIDsSize, 10); err != nil {
- return err
- }
-
- for i := 0; i < len(m.EntryUUIDs); i++ {
-
- if err := validate.Pattern("entryUUIDs"+"."+strconv.Itoa(i), "body", m.EntryUUIDs[i], `^([0-9a-fA-F]{64}|[0-9a-fA-F]{80})$`); err != nil {
- return err
- }
-
- }
-
- return nil
-}
-
-func (m *SearchLogQuery) validateLogIndexes(formats strfmt.Registry) error {
- if swag.IsZero(m.LogIndexes) { // not required
- return nil
- }
-
- iLogIndexesSize := int64(len(m.LogIndexes))
-
- if err := validate.MinItems("logIndexes", "body", iLogIndexesSize, 1); err != nil {
- return err
- }
-
- if err := validate.MaxItems("logIndexes", "body", iLogIndexesSize, 10); err != nil {
- return err
- }
-
- for i := 0; i < len(m.LogIndexes); i++ {
- if swag.IsZero(m.LogIndexes[i]) { // not required
- continue
- }
-
- if err := validate.MinimumInt("logIndexes"+"."+strconv.Itoa(i), "body", *m.LogIndexes[i], 0, false); err != nil {
- return err
- }
-
- }
-
- return nil
-}
-
-// ContextValidate validate this search log query based on the context it is used
-func (m *SearchLogQuery) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateEntries(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *SearchLogQuery) contextValidateEntries(ctx context.Context, formats strfmt.Registry) error {
-
- for i := 0; i < len(m.Entries()); i++ {
-
- if swag.IsZero(m.entriesField[i]) { // not required
- return nil
- }
-
- if err := m.entriesField[i].ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("entries" + "." + strconv.Itoa(i))
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("entries" + "." + strconv.Itoa(i))
- }
- return err
- }
-
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *SearchLogQuery) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *SearchLogQuery) UnmarshalBinary(b []byte) error {
- var res SearchLogQuery
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf.go
deleted file mode 100644
index a5f6eff0f7..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf.go
+++ /dev/null
@@ -1,210 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "bytes"
- "context"
- "encoding/json"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// TUF TUF metadata
-//
-// swagger:model tuf
-type TUF struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec TUFSchema `json:"spec"`
-}
-
-// Kind gets the kind of this subtype
-func (m *TUF) Kind() string {
- return "tuf"
-}
-
-// SetKind sets the kind of this subtype
-func (m *TUF) SetKind(val string) {
-}
-
-// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
-func (m *TUF) UnmarshalJSON(raw []byte) error {
- var data struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec TUFSchema `json:"spec"`
- }
- buf := bytes.NewBuffer(raw)
- dec := json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&data); err != nil {
- return err
- }
-
- var base struct {
- /* Just the base type fields. Used for unmashalling polymorphic types.*/
-
- Kind string `json:"kind"`
- }
- buf = bytes.NewBuffer(raw)
- dec = json.NewDecoder(buf)
- dec.UseNumber()
-
- if err := dec.Decode(&base); err != nil {
- return err
- }
-
- var result TUF
-
- if base.Kind != result.Kind() {
- /* Not the type we're looking for. */
- return errors.New(422, "invalid kind value: %q", base.Kind)
- }
-
- result.APIVersion = data.APIVersion
- result.Spec = data.Spec
-
- *m = result
-
- return nil
-}
-
-// MarshalJSON marshals this object with a polymorphic type to a JSON structure
-func (m TUF) MarshalJSON() ([]byte, error) {
- var b1, b2, b3 []byte
- var err error
- b1, err = json.Marshal(struct {
-
- // api version
- // Required: true
- // Pattern: ^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$
- APIVersion *string `json:"apiVersion"`
-
- // spec
- // Required: true
- Spec TUFSchema `json:"spec"`
- }{
-
- APIVersion: m.APIVersion,
-
- Spec: m.Spec,
- })
- if err != nil {
- return nil, err
- }
- b2, err = json.Marshal(struct {
- Kind string `json:"kind"`
- }{
-
- Kind: m.Kind(),
- })
- if err != nil {
- return nil, err
- }
-
- return swag.ConcatJSON(b1, b2, b3), nil
-}
-
-// Validate validates this tuf
-func (m *TUF) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateAPIVersion(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateSpec(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *TUF) validateAPIVersion(formats strfmt.Registry) error {
-
- if err := validate.Required("apiVersion", "body", m.APIVersion); err != nil {
- return err
- }
-
- if err := validate.Pattern("apiVersion", "body", *m.APIVersion, `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$`); err != nil {
- return err
- }
-
- return nil
-}
-
-func (m *TUF) validateSpec(formats strfmt.Registry) error {
-
- if m.Spec == nil {
- return errors.Required("spec", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validate this tuf based on the context it is used
-func (m *TUF) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *TUF) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *TUF) UnmarshalBinary(b []byte) error {
- var res TUF
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf_schema.go
deleted file mode 100644
index 37dca8b68e..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf_schema.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-// TUFSchema TUF Schema
-//
-// # Schema for TUF metadata objects
-//
-// swagger:model tufSchema
-type TUFSchema interface{}
diff --git a/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf_v001_schema.go b/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf_v001_schema.go
deleted file mode 100644
index 021e0ce7d3..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/generated/models/tuf_v001_schema.go
+++ /dev/null
@@ -1,304 +0,0 @@
-// Code generated by go-swagger; DO NOT EDIT.
-
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package models
-
-// This file was generated by the swagger tool.
-// Editing this file might prove futile when you re-run the swagger generate command
-
-import (
- "context"
-
- "github.com/go-openapi/errors"
- "github.com/go-openapi/strfmt"
- "github.com/go-openapi/swag"
- "github.com/go-openapi/validate"
-)
-
-// TUFV001Schema TUF v0.0.1 Schema
-//
-// # Schema for TUF metadata entries
-//
-// swagger:model tufV001Schema
-type TUFV001Schema struct {
-
- // metadata
- // Required: true
- Metadata *TUFV001SchemaMetadata `json:"metadata"`
-
- // root
- // Required: true
- Root *TUFV001SchemaRoot `json:"root"`
-
- // TUF specification version
- // Read Only: true
- SpecVersion string `json:"spec_version,omitempty"`
-}
-
-// Validate validates this tuf v001 schema
-func (m *TUFV001Schema) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateMetadata(formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.validateRoot(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *TUFV001Schema) validateMetadata(formats strfmt.Registry) error {
-
- if err := validate.Required("metadata", "body", m.Metadata); err != nil {
- return err
- }
-
- if m.Metadata != nil {
- if err := m.Metadata.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("metadata")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("metadata")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *TUFV001Schema) validateRoot(formats strfmt.Registry) error {
-
- if err := validate.Required("root", "body", m.Root); err != nil {
- return err
- }
-
- if m.Root != nil {
- if err := m.Root.Validate(formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("root")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("root")
- }
- return err
- }
- }
-
- return nil
-}
-
-// ContextValidate validate this tuf v001 schema based on the context it is used
-func (m *TUFV001Schema) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- var res []error
-
- if err := m.contextValidateMetadata(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateRoot(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if err := m.contextValidateSpecVersion(ctx, formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *TUFV001Schema) contextValidateMetadata(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Metadata != nil {
-
- if err := m.Metadata.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("metadata")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("metadata")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *TUFV001Schema) contextValidateRoot(ctx context.Context, formats strfmt.Registry) error {
-
- if m.Root != nil {
-
- if err := m.Root.ContextValidate(ctx, formats); err != nil {
- if ve, ok := err.(*errors.Validation); ok {
- return ve.ValidateName("root")
- } else if ce, ok := err.(*errors.CompositeError); ok {
- return ce.ValidateName("root")
- }
- return err
- }
- }
-
- return nil
-}
-
-func (m *TUFV001Schema) contextValidateSpecVersion(ctx context.Context, formats strfmt.Registry) error {
-
- if err := validate.ReadOnly(ctx, "spec_version", "body", string(m.SpecVersion)); err != nil {
- return err
- }
-
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *TUFV001Schema) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *TUFV001Schema) UnmarshalBinary(b []byte) error {
- var res TUFV001Schema
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// TUFV001SchemaMetadata TUF metadata
-//
-// swagger:model TUFV001SchemaMetadata
-type TUFV001SchemaMetadata struct {
-
- // Specifies the metadata inline within the document
- // Required: true
- Content interface{} `json:"content"`
-}
-
-// Validate validates this TUF v001 schema metadata
-func (m *TUFV001SchemaMetadata) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *TUFV001SchemaMetadata) validateContent(formats strfmt.Registry) error {
-
- if m.Content == nil {
- return errors.Required("metadata"+"."+"content", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validates this TUF v001 schema metadata based on context it is used
-func (m *TUFV001SchemaMetadata) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *TUFV001SchemaMetadata) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *TUFV001SchemaMetadata) UnmarshalBinary(b []byte) error {
- var res TUFV001SchemaMetadata
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
-
-// TUFV001SchemaRoot root metadata containing about the public keys used to sign the manifest
-//
-// swagger:model TUFV001SchemaRoot
-type TUFV001SchemaRoot struct {
-
- // Specifies the metadata inline within the document
- // Required: true
- Content interface{} `json:"content"`
-}
-
-// Validate validates this TUF v001 schema root
-func (m *TUFV001SchemaRoot) Validate(formats strfmt.Registry) error {
- var res []error
-
- if err := m.validateContent(formats); err != nil {
- res = append(res, err)
- }
-
- if len(res) > 0 {
- return errors.CompositeValidationError(res...)
- }
- return nil
-}
-
-func (m *TUFV001SchemaRoot) validateContent(formats strfmt.Registry) error {
-
- if m.Content == nil {
- return errors.Required("root"+"."+"content", "body", nil)
- }
-
- return nil
-}
-
-// ContextValidate validates this TUF v001 schema root based on context it is used
-func (m *TUFV001SchemaRoot) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
- return nil
-}
-
-// MarshalBinary interface implementation
-func (m *TUFV001SchemaRoot) MarshalBinary() ([]byte, error) {
- if m == nil {
- return nil, nil
- }
- return swag.WriteJSON(m)
-}
-
-// UnmarshalBinary interface implementation
-func (m *TUFV001SchemaRoot) UnmarshalBinary(b []byte) error {
- var res TUFV001SchemaRoot
- if err := swag.ReadJSON(b, &res); err != nil {
- return err
- }
- *m = res
- return nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/util/checkpoint.go b/vendor/github.com/sigstore/rekor/pkg/util/checkpoint.go
deleted file mode 100644
index 27b246d799..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/util/checkpoint.go
+++ /dev/null
@@ -1,165 +0,0 @@
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package util
-
-import (
- "bytes"
- "context"
- "encoding/base64"
- "errors"
- "fmt"
- "strconv"
- "strings"
-
- "github.com/sigstore/sigstore/pkg/signature"
- "github.com/sigstore/sigstore/pkg/signature/options"
-)
-
-// heavily borrowed from https://github.com/transparency-dev/formats/blob/main/log/checkpoint.go
-
-type Checkpoint struct {
- // Origin is the unique identifier/version string
- Origin string
- // Size is the number of entries in the log at this checkpoint.
- Size uint64
- // Hash is the hash which commits to the contents of the entire log.
- Hash []byte
- // OtherContent is any additional data to be included in the signed payload; each element is assumed to be one line
- OtherContent []string
-}
-
-// String returns the String representation of the Checkpoint
-func (c Checkpoint) String() string {
- var b strings.Builder
- fmt.Fprintf(&b, "%s\n%d\n%s\n", c.Origin, c.Size, base64.StdEncoding.EncodeToString(c.Hash))
- for _, line := range c.OtherContent {
- fmt.Fprintf(&b, "%s\n", line)
- }
- return b.String()
-}
-
-// MarshalCheckpoint returns the common format representation of this Checkpoint.
-func (c Checkpoint) MarshalCheckpoint() ([]byte, error) {
- return []byte(c.String()), nil
-}
-
-// UnmarshalCheckpoint parses the common formatted checkpoint data and stores the result
-// in the Checkpoint.
-//
-// The supplied data is expected to begin with the following 3 lines of text,
-// each followed by a newline:
-//
-//
-//
-// ...
-// ...
-//
-// This will discard any content found after the checkpoint (including signatures)
-func (c *Checkpoint) UnmarshalCheckpoint(data []byte) error {
- l := bytes.Split(data, []byte("\n"))
- if len(l) < 4 {
- return errors.New("invalid checkpoint - too few newlines")
- }
- origin := string(l[0])
- if len(origin) == 0 {
- return errors.New("invalid checkpoint - empty ecosystem")
- }
- size, err := strconv.ParseUint(string(l[1]), 10, 64)
- if err != nil {
- return fmt.Errorf("invalid checkpoint - size invalid: %w", err)
- }
- h, err := base64.StdEncoding.DecodeString(string(l[2]))
- if err != nil {
- return fmt.Errorf("invalid checkpoint - invalid hash: %w", err)
- }
- *c = Checkpoint{
- Origin: origin,
- Size: size,
- Hash: h,
- }
- if len(l) >= 3 {
- for _, line := range l[3:] {
- if len(line) == 0 {
- break
- }
- c.OtherContent = append(c.OtherContent, string(line))
- }
- }
- return nil
-}
-
-type SignedCheckpoint struct {
- Checkpoint
- SignedNote
-}
-
-func CreateSignedCheckpoint(c Checkpoint) (*SignedCheckpoint, error) {
- text, err := c.MarshalCheckpoint()
- if err != nil {
- return nil, err
- }
- return &SignedCheckpoint{
- Checkpoint: c,
- SignedNote: SignedNote{Note: string(text)},
- }, nil
-}
-
-func SignedCheckpointValidator(strToValidate string) bool {
- s := SignedNote{}
- if err := s.UnmarshalText([]byte(strToValidate)); err != nil {
- return false
- }
- c := &Checkpoint{}
- return c.UnmarshalCheckpoint([]byte(s.Note)) == nil
-}
-
-func CheckpointValidator(strToValidate string) bool {
- c := &Checkpoint{}
- return c.UnmarshalCheckpoint([]byte(strToValidate)) == nil
-}
-
-func (r *SignedCheckpoint) UnmarshalText(data []byte) error {
- s := SignedNote{}
- if err := s.UnmarshalText([]byte(data)); err != nil {
- return fmt.Errorf("unmarshalling signed note: %w", err)
- }
- c := Checkpoint{}
- if err := c.UnmarshalCheckpoint([]byte(s.Note)); err != nil {
- return fmt.Errorf("unmarshalling checkpoint: %w", err)
- }
- *r = SignedCheckpoint{Checkpoint: c, SignedNote: s}
- return nil
-}
-
-// CreateAndSignCheckpoint creates a signed checkpoint as a commitment to the current root hash
-func CreateAndSignCheckpoint(ctx context.Context, hostname string, treeID int64, treeSize uint64, rootHash []byte, signer signature.Signer) ([]byte, error) {
- sth, err := CreateSignedCheckpoint(Checkpoint{
- Origin: fmt.Sprintf("%s - %d", hostname, treeID),
- Size: treeSize,
- Hash: rootHash,
- })
- if err != nil {
- return nil, fmt.Errorf("error creating checkpoint: %w", err)
- }
- if _, err := sth.Sign(hostname, signer, options.WithContext(ctx)); err != nil {
- return nil, fmt.Errorf("error signing checkpoint: %w", err)
- }
- scBytes, err := sth.MarshalText()
- if err != nil {
- return nil, fmt.Errorf("error marshalling checkpoint: %w", err)
- }
- return scBytes, nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/util/fetch.go b/vendor/github.com/sigstore/rekor/pkg/util/fetch.go
deleted file mode 100644
index 7f8e93fb04..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/util/fetch.go
+++ /dev/null
@@ -1,49 +0,0 @@
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package util
-
-import (
- "bytes"
- "context"
- "fmt"
- "io"
- "net/http"
-)
-
-// FileOrURLReadCloser Note: caller is responsible for closing ReadCloser returned from method!
-func FileOrURLReadCloser(ctx context.Context, url string, content []byte) (io.ReadCloser, error) {
- var dataReader io.ReadCloser
- if url != "" {
- //TODO: set timeout here, SSL settings?
- client := &http.Client{}
- req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
- if err != nil {
- return nil, err
- }
- resp, err := client.Do(req)
- if err != nil {
- return nil, err
- }
- if resp.StatusCode < 200 || resp.StatusCode > 299 {
- return nil, fmt.Errorf("error received while fetching artifact '%v': %v", url, resp.Status)
- }
-
- dataReader = resp.Body
- } else {
- dataReader = io.NopCloser(bytes.NewReader(content))
- }
- return dataReader, nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/util/sha.go b/vendor/github.com/sigstore/rekor/pkg/util/sha.go
deleted file mode 100644
index 07b8fb1b53..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/util/sha.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2022 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package util
-
-import (
- "crypto"
- "fmt"
- "strings"
-)
-
-// PrefixSHA sets the prefix of a sha hash to match how it is stored based on the length.
-func PrefixSHA(sha string) string {
- var prefix string
- var components = strings.Split(sha, ":")
-
- if len(components) == 2 {
- return sha
- }
-
- switch len(sha) {
- case 40:
- prefix = "sha1:"
- case 64:
- prefix = "sha256:"
- case 96:
- prefix = "sha384:"
- case 128:
- prefix = "sha512:"
- }
-
- return fmt.Sprintf("%v%v", prefix, sha)
-}
-
-func UnprefixSHA(sha string) (crypto.Hash, string) {
- components := strings.Split(sha, ":")
-
- if len(components) == 2 {
- prefix := components[0]
- sha = components[1]
-
- switch prefix {
- case "sha1":
- return crypto.SHA1, sha
- case "sha256":
- return crypto.SHA256, sha
- case "sha384":
- return crypto.SHA384, sha
- case "sha512":
- return crypto.SHA512, sha
- default:
- return crypto.Hash(0), ""
- }
- }
-
- switch len(sha) {
- case 40:
- return crypto.SHA1, sha
- case 64:
- return crypto.SHA256, sha
- case 96:
- return crypto.SHA384, sha
- case 128:
- return crypto.SHA512, sha
- }
-
- return crypto.Hash(0), ""
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/util/signed_note.go b/vendor/github.com/sigstore/rekor/pkg/util/signed_note.go
deleted file mode 100644
index 4c9c8f8a70..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/util/signed_note.go
+++ /dev/null
@@ -1,211 +0,0 @@
-//
-// Copyright 2021 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package util
-
-import (
- "bufio"
- "bytes"
- "crypto"
- "crypto/ecdsa"
- "crypto/ed25519"
- "crypto/rsa"
- "crypto/sha256"
- "crypto/x509"
- "encoding/base64"
- "encoding/binary"
- "errors"
- "fmt"
- "strings"
-
- "github.com/sigstore/sigstore/pkg/signature"
- "github.com/sigstore/sigstore/pkg/signature/options"
- "golang.org/x/mod/sumdb/note"
-)
-
-type SignedNote struct {
- // Textual representation of a note to sign.
- Note string
- // Signatures are one or more signature lines covering the payload
- Signatures []note.Signature
-}
-
-// Sign adds a signature to a SignedCheckpoint object
-// The signature is added to the signature array as well as being directly returned to the caller
-func (s *SignedNote) Sign(identity string, signer signature.Signer, opts signature.SignOption) (*note.Signature, error) {
- sig, err := signer.SignMessage(bytes.NewReader([]byte(s.Note)), opts)
- if err != nil {
- return nil, fmt.Errorf("signing note: %w", err)
- }
-
- pk, err := signer.PublicKey()
- if err != nil {
- return nil, fmt.Errorf("retrieving public key: %w", err)
- }
- pkHash, err := getPublicKeyHash(pk)
- if err != nil {
- return nil, err
- }
-
- signature := note.Signature{
- Name: identity,
- Hash: pkHash,
- Base64: base64.StdEncoding.EncodeToString(sig),
- }
-
- s.Signatures = append(s.Signatures, signature)
- return &signature, nil
-}
-
-// Verify checks that one of the signatures can be successfully verified using
-// the supplied public key
-func (s SignedNote) Verify(verifier signature.Verifier) bool {
- if len(s.Signatures) == 0 {
- return false
- }
-
- msg := []byte(s.Note)
- digest := sha256.Sum256(msg)
-
- pk, err := verifier.PublicKey()
- if err != nil {
- return false
- }
- verifierPkHash, err := getPublicKeyHash(pk)
- if err != nil {
- return false
- }
-
- for _, s := range s.Signatures {
- sigBytes, err := base64.StdEncoding.DecodeString(s.Base64)
- if err != nil {
- return false
- }
-
- if s.Hash != verifierPkHash {
- return false
- }
-
- opts := []signature.VerifyOption{}
- switch pk.(type) {
- case *rsa.PublicKey, *ecdsa.PublicKey:
- opts = append(opts, options.WithDigest(digest[:]))
- case ed25519.PublicKey:
- break
- default:
- return false
- }
- if err := verifier.VerifySignature(bytes.NewReader(sigBytes), bytes.NewReader(msg), opts...); err != nil {
- return false
- }
- }
- return true
-}
-
-// MarshalText returns the common format representation of this SignedNote.
-func (s SignedNote) MarshalText() ([]byte, error) {
- return []byte(s.String()), nil
-}
-
-// String returns the String representation of the SignedNote
-func (s SignedNote) String() string {
- var b strings.Builder
- b.WriteString(s.Note)
- b.WriteRune('\n')
- for _, sig := range s.Signatures {
- var hbuf [4]byte
- binary.BigEndian.PutUint32(hbuf[:], sig.Hash)
- sigBytes, _ := base64.StdEncoding.DecodeString(sig.Base64)
- b64 := base64.StdEncoding.EncodeToString(append(hbuf[:], sigBytes...))
- fmt.Fprintf(&b, "%c %s %s\n", '\u2014', sig.Name, b64)
- }
-
- return b.String()
-}
-
-// UnmarshalText parses the common formatted signed note data and stores the result
-// in the SignedNote. THIS DOES NOT VERIFY SIGNATURES INSIDE THE CONTENT!
-//
-// The supplied data is expected to contain a single Note, followed by a single
-// line with no comment, followed by one or more lines with the following format:
-//
-// \u2014 name signature
-//
-// - name is the string associated with the signer
-// - signature is a base64 encoded string; the first 4 bytes of the decoded value is a
-// hint to the public key; it is a big-endian encoded uint32 representing the first
-// 4 bytes of the SHA256 hash of the public key
-func (s *SignedNote) UnmarshalText(data []byte) error {
- sigSplit := []byte("\n\n")
- // Must end with signature block preceded by blank line.
- split := bytes.LastIndex(data, sigSplit)
- if split < 0 {
- return errors.New("malformed note")
- }
- text, data := data[:split+1], data[split+2:]
- if len(data) == 0 || data[len(data)-1] != '\n' {
- return errors.New("malformed note")
- }
-
- sn := SignedNote{
- Note: string(text),
- }
-
- b := bufio.NewScanner(bytes.NewReader(data))
- for b.Scan() {
- var name, signature string
- if _, err := fmt.Fscanf(strings.NewReader(b.Text()), "\u2014 %s %s\n", &name, &signature); err != nil {
- return fmt.Errorf("parsing signature: %w", err)
- }
-
- sigBytes, err := base64.StdEncoding.DecodeString(signature)
- if err != nil {
- return fmt.Errorf("decoding signature: %w", err)
- }
- if len(sigBytes) < 5 {
- return errors.New("signature is too small")
- }
-
- sig := note.Signature{
- Name: name,
- Hash: binary.BigEndian.Uint32(sigBytes[0:4]),
- Base64: base64.StdEncoding.EncodeToString(sigBytes[4:]),
- }
- sn.Signatures = append(sn.Signatures, sig)
-
- }
- if len(sn.Signatures) == 0 {
- return errors.New("no signatures found in input")
- }
-
- // copy sc to s
- *s = sn
- return nil
-}
-
-func SignedNoteValidator(strToValidate string) bool {
- s := SignedNote{}
- return s.UnmarshalText([]byte(strToValidate)) == nil
-}
-
-func getPublicKeyHash(publicKey crypto.PublicKey) (uint32, error) {
- pubKeyBytes, err := x509.MarshalPKIXPublicKey(publicKey)
- if err != nil {
- return 0, fmt.Errorf("marshalling public key: %w", err)
- }
- pkSha := sha256.Sum256(pubKeyBytes)
- hash := binary.BigEndian.Uint32(pkSha[:])
- return hash, nil
-}
diff --git a/vendor/github.com/sigstore/rekor/pkg/util/util.go b/vendor/github.com/sigstore/rekor/pkg/util/util.go
deleted file mode 100644
index 78c1a0f513..0000000000
--- a/vendor/github.com/sigstore/rekor/pkg/util/util.go
+++ /dev/null
@@ -1,446 +0,0 @@
-//
-// Copyright 2022 The Sigstore Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-//go:build e2e
-
-package util
-
-import (
- "bytes"
- "encoding/base64"
- "fmt"
- "io/ioutil"
- "math/rand"
- "os"
- "os/exec"
- "path"
- "path/filepath"
- "strings"
- "testing"
- "time"
-
- "golang.org/x/crypto/openpgp"
-)
-
-var (
- cli = "rekor-cli"
- server = "rekor-server"
- keys openpgp.EntityList
-)
-
-type GetOut struct {
- Attestation string
- AttestationType string
- Body interface{}
- LogIndex int
- IntegratedTime int64
-}
-
-// This was generated with gpg --gen-key, and all defaults.
-// The email is "test@rekor.dev", and the name is Rekor Test.
-// It should only be used for test purposes.
-
-var secretKey = `-----BEGIN PGP PRIVATE KEY BLOCK-----
-
-lQVYBF/11g0BDADciiDQKYjWjIZYTFC55kzaf3H7VcjKb7AdBSyHsN8OIZvLkbgx
-1M5x+JPVXCiBEJMjp7YCVJeTQYixic4Ep+YeC8zIdP8ZcvLD9bgFumws+TBJMY7w
-2cy3oPv/uVW4TRFv42PwKjO/sXpRg1gJx3EX2FJV+aYAPd8Z6pHxuOk6J49wLY1E
-3hl1ZrPGUGsF4l7tVHniZG8IzTCgJGC6qrlsg1VGrIkactesr7U6+Xs4VJgNIdCs
-2/7RqwWAtkSHumAKBe1hNY2ddt3p42jEM0P2g7Uwao7/ziSiS/N96dkEAdWCT99/
-e0qLC4q6VisrFvdmfDQrY73eadL6Jf38H2IUpNrcHgVZtEBGhD6dOcjs2YBZNfX3
-wfDJooRk0efcLlSFT1YVZhxez/zZTd+7nReKPmsOxiaUmP/bQSB4FZZ4ZxsfxH2t
-wgX4dtwRV28JGHeA/ISJiWMQKrci1PRhRWF32EaE6dF+2VJwGi9mssEkAA+YHh1O
-HjPgosqFp16rb1MAEQEAAQAL+gMzi2+6H/RirH3nowcFIs8hKSRphbVP6Gc4xgFf
-kz1Um5BZmH+QrpZ/nJXCSrbk6LM3IgXn+HNOG4/dh5IQZd9rHcPjKY4oWax33/36
-oMteVVHFWGUtTt1zhspFhHWybghebVBKgd8h0ma7LgdQ+oFKxeyIPTKlCJy1slH8
-nytq8O1t8S5eEvyIoHTGghHfIVr3Q6BXrjebKD41iPnstIMGElzTmwHj8jbdg2yh
-u8+A2twwm3jcO1dhJilM0V3Zr2L5upsrb20vdD0DMAKZyEcD20VkCt8sxFtTYfGw
-q72aylHxooObicswblfgWXJMEjQ+3CJzPEfkPCEZpUb87QGRsBHSuToVfICkL6ZN
-3TE1RznrItpwXGgWTwyoahXHkMmKLuDlf2PdOcGJd8YOiMFqSyJfh3nvAI2u83dS
-/wzMZqzl77QEUo5YcmXY5LpLco6P/xQcTTgJ7VT0M2mXr/LneffbjbaxNS6q7rl4
-uiGpPcpdevXqhf/VGS+e3JliUQYA5ny7nLYQOEN34O5AKHpfIYoqvGZJkLCp9BDx
-fPGn/b7mGeB/quTb1y/7G28Ovkj7tDz3SGFfSaNeMVpLbkxcZhq05dasb13q2go+
-la0pcv49lHnVIjGcQh+AqoEASm9+ZIyj9vTt6KQ60FDJ78Xkbe1iAOj/dggTe+wj
-udYtyvmpYvK/fz5rzg10oh20afbYPTnIubVcSB8RD1muFIrHTAPSrJ4OsXt1nFgT
-rvbIjBX5Q//cKDiCd/xHJOwDvIwtBgD084KdBPr8YAtQVThF2MULHeGp11nqo0Gb
-dsOkxe8cixK7JjyDfGbK8H82fI1Fd47lcp9h1VLL5A0XnJgDGHNW/IWIdBfvhvjS
-AnF0wPaN0ohpUvkfVAErG+n+RcLricL+afX/1+YoJZTNGW+fclbTBQCfWyFYBh49
-YTxa6qH131Lj8VWbCuSdfo1jN5nUuVeutkW9VnMLuo0VCt+Phw8ok3SP8rdBMFRW
-3eYmCCRw+XvLQT0vL3K0D4udts+nmX8F/30jPprjz09hyreERUWcqvQcUO3E5uc6
-xQUOmMrIg5jVK6PdFRtUMNip+EMOfewoUDtNf2VOQ0WdSboopZyXXGG0SW+7FC5V
-m/mFkffnxqHbj8odOI8l9xiK6ejeVMc3aKIL3tTAGZxNniKr4SfEFkT+nC4rNpLF
-tM6PBxxaffTpG5G2GW2sy9A5jEygcuDz5wTjS5KnKoXlI8qaDrfeIiB/hBZMDtAM
-KmFvCQ2AO3xDtxzHPphEhPZx793S7pqru+egtBtSZWtvciBUZXN0IDx0ZXN0QHJl
-a29yLmRldj6JAdQEEwEIAD4WIQRpDIZrWp/rSB21PSTYo+vASJM64gUCX/XWDQIb
-AwUJA8JnAAULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRDYo+vASJM64j/9C/4q
-2iKsQBcOofjH7PklIlV1asTJP8Uxp2giPXnwgcfWYDGs+e/oHMjkmWwyXUkE0ki7
-N4SB4m6ztfljkTsOPUFVcDtcjj2ScOx5lsrDW8wPMwiJpFM62HkJfg7mrAqDquTB
-iue5X+9OFbxOBSRti9w+H5Uiw/jaChxUKpaDW5qtZiYEkgKpbEK03jFkewtu8SWD
-zoFt2gMKSHg6btz+hqdrqA1R/n4Z5LbBuWk+hf+N7FGO9clQWoZrRr5qorSfOpQO
-/7S4U5UN4w/IL2OtuPfajHb91aH9q81eddclutOS5kAzYLHgytHSVHIw8QJiqsbe
-YqudCcYHo7aNRlpbIXnE6+FQqa7+hZd5Cv8IQgQngDiAi+C0khYKo3riTwORvlam
-CqC30lzlNWxkFJzfW0E88B4j3rOFeqaXhIohPtxKr68vGVsuIMCnCOsbYfyoUiZm
-RGc4tVAbCuwWJe+OoZEKsS0m6tY6CjT0ugpb+oxqQvyj2eB1cK0i0aiBrAuQCZWd
-BVgEX/XWDQEMAKjSmPaQJdE9+4c+uuZ3plwfwodEY5nPG1qIQWj7LmvCtYQWwex/
-rOPE0ec/6UdlrUSjiAQ0mV5JkdN2QRoxRGy8JsrLAnXadXeO3HI9SpuZaKvsUg5d
-apvdJqcDWlzz/LoA2rl+Z/wo3q2Wx9rh/RHqPLWBUiJSkIlANsshatu9N2Mj5ody
-defGn8gnj6b0JZRpUskyg4/9Wzns/w4OWql3CVm0BIGn6Tt/EplI7eCZg4VvujWN
-T0gydK75hkbGkHE1Z45kBZU26Uge+YEyJ0PFcaXE/kCNetPOtsUz/tO+h6ZLJECI
-lZlnG5/KxOGhoS3fG9F/XfyE3DNQE6qx7CuC6cWm+92wLlPz/Ir0iKTV0tPZLCgu
-5rSNuSJyjTy71nMksFaVJxjb7PZHMbQPXEIbcIX4AvEGV0Icwsh+e6/yXlTgxux9
-RszqyS1LHydtQLvx5X84d9iENkoGGNfVH99i2P1CrTbZ2v3KCnhvy+cTVLjW82XV
-WploktfbdC55TQARAQABAAv+KR1e8N9ywlaK0SmDGZlGq/V1Kf3LFvykMARyj6dq
-qwZYsBJdyKPgfnki2KONQ9zcmZSNDd8kgdy/dcU9PiyE+klJVkaiMwMQ7BzgDbdl
-Ged+4S303vg7vDlcDj0oDu7B3CfUnOvO1c+7SYHo6uLyP+BwyBB2aRL8Dd0UaxyY
-mmrm2A94d4C1+8w5AiU2XEXl+BK9fW/+r/zXMJCKHkl7JX3uykin906mI94C8M9c
-1X/1krP+4MdpKU9WcP2miMqXIhm09rF09YDY1qLRBhvKWnaDDDjBSmIxIAc2AyCe
-JbmFzLVXynduhxhplmOMDD2aIQNfxfiw2E+jq4MLgIGhrNV+yMGOInzMwT0qguB4
-bJllfk7f7ikqwBva9hdC3pUx4zOogJyTkcBH/ETm7b1L26DyJkxlln/Je2Qr64aX
-t5bhx/Y8rC7jVxYYwtIPKtn3zppwNFL3Vysg47BpYM6aAz0AZSKm+Y6jAi2/tWtV
-jhFvQWRPBaDuMS7dzcnb4TY5BgDJ/lG27MpNMEYU5zqWQ7capmYTk8AV6nH+r5cm
-QpoWld5p0gFw6qnjeJ1Q3XZs7QlPq0RQrXzjT3Drhu5XNjqeqQGDH6YY39OQrTSS
-/1BhFhiWUMBpyqv4lc8ytJjbkgg0daNubrIKynwZ/H8Gy3vRe2rHjqaApcwQ5Fwc
-Iy8FPeQI95rnw34b/0dohkxjz6ULJahdksVggI0NS312awjg6TlQx1V3Lv7hbuOE
-Qv1p3kedwr4MgnVe0fZw6Y3ehukGANX13UKtkw6sHjO7h87F9qR5Wb47Rnb12oDa
-fZHmn2jLDAr8Sius1mHFJie9nlXRvBxtVpjyliJxjg0hYc04PLdVKvGFP2a4WQep
-WM+r3fU/Snuhn3VAI2ibMXgFUHW9ofxmhGhdDWImFnU7lvh4U+yoD8vqe9FPFMhu
-zCrGSTo7Qy8PTKCzCf3frSPt3TorFrUOa5PBpq1/fOhLAQzpVC7F+hXZ/kIAWTVm
-wSIilPk7TSVJdd07bsfNQt88xtJoxQX+OgRb8yK+pSluQxii6IgVwFWslOxuZn/O
-Eg9nPh4VAlVGYCh/oleRTLZH+a73p9VQwUzmPjXUDkUFcdM0zysU4HmTZbwTZCQJ
-608IqC+p9D6u289bdsBsCDzA6LAhEgU4vj6Zfm0N3MqEWBDuBOt9McwY1Mbo8jbp
-slVnkz2B6Rw9UkMzQNVxRFCHfIWhPvbiWeiLQPD31Bs6hdBCzn44k75/+0qyBX0a
-Jk8Wmv4z2vR7dh4ABRm4pfZx4IsFbWBS4sSJAbwEGAEIACYWIQRpDIZrWp/rSB21
-PSTYo+vASJM64gUCX/XWDQIbDAUJA8JnAAAKCRDYo+vASJM64mceDACSkr9gsNRc
-OOcnzglYJtmvtAG27ziVS6/ywGPxyZtyIwfEg8JVnIXuB0Fog1/uuZDdjiz4QO3j
-Os9E8z8i6AUKdJgPjxlcr585lSLtKiz7TTPTDmKCF8aga2Gc6+yfjI92F0fEuGh5
-GjdQu76x6hLPYT6+pjrvjmXq8gF030jTOiQ2n6o9oH7aQhehEIFsrQdtKh9ZrhWN
-QWa1P4iPlzPf+Y7sG7irZqcm4wa/U+qxQPNVcA9FUziymPtbMGlqN4x2Z3Jr3VUP
-QFhwXF6U8BM3ldZDNPmmB9OKlsDCR/7+AvwJ52hRxAzIm/lhuXj1xPj5JFuUErAX
-aBIJN0iaJaXVB+JFbzXT1DLhqCR1T37zZSKnLMSKtvIe9UOO6Jy4mgX6CDjPM/Vu
-9aJhzqmaVUbZOYwJh5ojrWLzswv1K9CdcmDEaK4X/u1z+eWiNjsHE3pzUiq4DJhb
-T4CBiqLxHYsQ9n8dT95t+poqJ10PVFkehb+8kh05e3ENd4xpkkdTfIY=
-=CwjQ
------END PGP PRIVATE KEY BLOCK-----`
-var PrivateKey = `-----BEGIN PGP PRIVATE KEY BLOCK-----
-
-lQVYBF/11g0BDADciiDQKYjWjIZYTFC55kzaf3H7VcjKb7AdBSyHsN8OIZvLkbgx
-1M5x+JPVXCiBEJMjp7YCVJeTQYixic4Ep+YeC8zIdP8ZcvLD9bgFumws+TBJMY7w
-2cy3oPv/uVW4TRFv42PwKjO/sXpRg1gJx3EX2FJV+aYAPd8Z6pHxuOk6J49wLY1E
-3hl1ZrPGUGsF4l7tVHniZG8IzTCgJGC6qrlsg1VGrIkactesr7U6+Xs4VJgNIdCs
-2/7RqwWAtkSHumAKBe1hNY2ddt3p42jEM0P2g7Uwao7/ziSiS/N96dkEAdWCT99/
-e0qLC4q6VisrFvdmfDQrY73eadL6Jf38H2IUpNrcHgVZtEBGhD6dOcjs2YBZNfX3
-wfDJooRk0efcLlSFT1YVZhxez/zZTd+7nReKPmsOxiaUmP/bQSB4FZZ4ZxsfxH2t
-wgX4dtwRV28JGHeA/ISJiWMQKrci1PRhRWF32EaE6dF+2VJwGi9mssEkAA+YHh1O
-HjPgosqFp16rb1MAEQEAAQAL+gMzi2+6H/RirH3nowcFIs8hKSRphbVP6Gc4xgFf
-kz1Um5BZmH+QrpZ/nJXCSrbk6LM3IgXn+HNOG4/dh5IQZd9rHcPjKY4oWax33/36
-oMteVVHFWGUtTt1zhspFhHWybghebVBKgd8h0ma7LgdQ+oFKxeyIPTKlCJy1slH8
-nytq8O1t8S5eEvyIoHTGghHfIVr3Q6BXrjebKD41iPnstIMGElzTmwHj8jbdg2yh
-u8+A2twwm3jcO1dhJilM0V3Zr2L5upsrb20vdD0DMAKZyEcD20VkCt8sxFtTYfGw
-q72aylHxooObicswblfgWXJMEjQ+3CJzPEfkPCEZpUb87QGRsBHSuToVfICkL6ZN
-3TE1RznrItpwXGgWTwyoahXHkMmKLuDlf2PdOcGJd8YOiMFqSyJfh3nvAI2u83dS
-/wzMZqzl77QEUo5YcmXY5LpLco6P/xQcTTgJ7VT0M2mXr/LneffbjbaxNS6q7rl4
-uiGpPcpdevXqhf/VGS+e3JliUQYA5ny7nLYQOEN34O5AKHpfIYoqvGZJkLCp9BDx
-fPGn/b7mGeB/quTb1y/7G28Ovkj7tDz3SGFfSaNeMVpLbkxcZhq05dasb13q2go+
-la0pcv49lHnVIjGcQh+AqoEASm9+ZIyj9vTt6KQ60FDJ78Xkbe1iAOj/dggTe+wj
-udYtyvmpYvK/fz5rzg10oh20afbYPTnIubVcSB8RD1muFIrHTAPSrJ4OsXt1nFgT
-rvbIjBX5Q//cKDiCd/xHJOwDvIwtBgD084KdBPr8YAtQVThF2MULHeGp11nqo0Gb
-dsOkxe8cixK7JjyDfGbK8H82fI1Fd47lcp9h1VLL5A0XnJgDGHNW/IWIdBfvhvjS
-AnF0wPaN0ohpUvkfVAErG+n+RcLricL+afX/1+YoJZTNGW+fclbTBQCfWyFYBh49
-YTxa6qH131Lj8VWbCuSdfo1jN5nUuVeutkW9VnMLuo0VCt+Phw8ok3SP8rdBMFRW
-3eYmCCRw+XvLQT0vL3K0D4udts+nmX8F/30jPprjz09hyreERUWcqvQcUO3E5uc6
-xQUOmMrIg5jVK6PdFRtUMNip+EMOfewoUDtNf2VOQ0WdSboopZyXXGG0SW+7FC5V
-m/mFkffnxqHbj8odOI8l9xiK6ejeVMc3aKIL3tTAGZxNniKr4SfEFkT+nC4rNpLF
-tM6PBxxaffTpG5G2GW2sy9A5jEygcuDz5wTjS5KnKoXlI8qaDrfeIiB/hBZMDtAM
-KmFvCQ2AO3xDtxzHPphEhPZx793S7pqru+egtBtSZWtvciBUZXN0IDx0ZXN0QHJl
-a29yLmRldj6JAdQEEwEIAD4WIQRpDIZrWp/rSB21PSTYo+vASJM64gUCX/XWDQIb
-AwUJA8JnAAULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRDYo+vASJM64j/9C/4q
-2iKsQBcOofjH7PklIlV1asTJP8Uxp2giPXnwgcfWYDGs+e/oHMjkmWwyXUkE0ki7
-N4SB4m6ztfljkTsOPUFVcDtcjj2ScOx5lsrDW8wPMwiJpFM62HkJfg7mrAqDquTB
-iue5X+9OFbxOBSRti9w+H5Uiw/jaChxUKpaDW5qtZiYEkgKpbEK03jFkewtu8SWD
-zoFt2gMKSHg6btz+hqdrqA1R/n4Z5LbBuWk+hf+N7FGO9clQWoZrRr5qorSfOpQO
-/7S4U5UN4w/IL2OtuPfajHb91aH9q81eddclutOS5kAzYLHgytHSVHIw8QJiqsbe
-YqudCcYHo7aNRlpbIXnE6+FQqa7+hZd5Cv8IQgQngDiAi+C0khYKo3riTwORvlam
-CqC30lzlNWxkFJzfW0E88B4j3rOFeqaXhIohPtxKr68vGVsuIMCnCOsbYfyoUiZm
-RGc4tVAbCuwWJe+OoZEKsS0m6tY6CjT0ugpb+oxqQvyj2eB1cK0i0aiBrAuQCZWd
-BVgEX/XWDQEMAKjSmPaQJdE9+4c+uuZ3plwfwodEY5nPG1qIQWj7LmvCtYQWwex/
-rOPE0ec/6UdlrUSjiAQ0mV5JkdN2QRoxRGy8JsrLAnXadXeO3HI9SpuZaKvsUg5d
-apvdJqcDWlzz/LoA2rl+Z/wo3q2Wx9rh/RHqPLWBUiJSkIlANsshatu9N2Mj5ody
-defGn8gnj6b0JZRpUskyg4/9Wzns/w4OWql3CVm0BIGn6Tt/EplI7eCZg4VvujWN
-T0gydK75hkbGkHE1Z45kBZU26Uge+YEyJ0PFcaXE/kCNetPOtsUz/tO+h6ZLJECI
-lZlnG5/KxOGhoS3fG9F/XfyE3DNQE6qx7CuC6cWm+92wLlPz/Ir0iKTV0tPZLCgu
-5rSNuSJyjTy71nMksFaVJxjb7PZHMbQPXEIbcIX4AvEGV0Icwsh+e6/yXlTgxux9
-RszqyS1LHydtQLvx5X84d9iENkoGGNfVH99i2P1CrTbZ2v3KCnhvy+cTVLjW82XV
-WploktfbdC55TQARAQABAAv+KR1e8N9ywlaK0SmDGZlGq/V1Kf3LFvykMARyj6dq
-qwZYsBJdyKPgfnki2KONQ9zcmZSNDd8kgdy/dcU9PiyE+klJVkaiMwMQ7BzgDbdl
-Ged+4S303vg7vDlcDj0oDu7B3CfUnOvO1c+7SYHo6uLyP+BwyBB2aRL8Dd0UaxyY
-mmrm2A94d4C1+8w5AiU2XEXl+BK9fW/+r/zXMJCKHkl7JX3uykin906mI94C8M9c
-1X/1krP+4MdpKU9WcP2miMqXIhm09rF09YDY1qLRBhvKWnaDDDjBSmIxIAc2AyCe
-JbmFzLVXynduhxhplmOMDD2aIQNfxfiw2E+jq4MLgIGhrNV+yMGOInzMwT0qguB4
-bJllfk7f7ikqwBva9hdC3pUx4zOogJyTkcBH/ETm7b1L26DyJkxlln/Je2Qr64aX
-t5bhx/Y8rC7jVxYYwtIPKtn3zppwNFL3Vysg47BpYM6aAz0AZSKm+Y6jAi2/tWtV
-jhFvQWRPBaDuMS7dzcnb4TY5BgDJ/lG27MpNMEYU5zqWQ7capmYTk8AV6nH+r5cm
-QpoWld5p0gFw6qnjeJ1Q3XZs7QlPq0RQrXzjT3Drhu5XNjqeqQGDH6YY39OQrTSS
-/1BhFhiWUMBpyqv4lc8ytJjbkgg0daNubrIKynwZ/H8Gy3vRe2rHjqaApcwQ5Fwc
-Iy8FPeQI95rnw34b/0dohkxjz6ULJahdksVggI0NS312awjg6TlQx1V3Lv7hbuOE
-Qv1p3kedwr4MgnVe0fZw6Y3ehukGANX13UKtkw6sHjO7h87F9qR5Wb47Rnb12oDa
-fZHmn2jLDAr8Sius1mHFJie9nlXRvBxtVpjyliJxjg0hYc04PLdVKvGFP2a4WQep
-WM+r3fU/Snuhn3VAI2ibMXgFUHW9ofxmhGhdDWImFnU7lvh4U+yoD8vqe9FPFMhu
-zCrGSTo7Qy8PTKCzCf3frSPt3TorFrUOa5PBpq1/fOhLAQzpVC7F+hXZ/kIAWTVm
-wSIilPk7TSVJdd07bsfNQt88xtJoxQX+OgRb8yK+pSluQxii6IgVwFWslOxuZn/O
-Eg9nPh4VAlVGYCh/oleRTLZH+a73p9VQwUzmPjXUDkUFcdM0zysU4HmTZbwTZCQJ
-608IqC+p9D6u289bdsBsCDzA6LAhEgU4vj6Zfm0N3MqEWBDuBOt9McwY1Mbo8jbp
-slVnkz2B6Rw9UkMzQNVxRFCHfIWhPvbiWeiLQPD31Bs6hdBCzn44k75/+0qyBX0a
-Jk8Wmv4z2vR7dh4ABRm4pfZx4IsFbWBS4sSJAbwEGAEIACYWIQRpDIZrWp/rSB21
-PSTYo+vASJM64gUCX/XWDQIbDAUJA8JnAAAKCRDYo+vASJM64mceDACSkr9gsNRc
-OOcnzglYJtmvtAG27ziVS6/ywGPxyZtyIwfEg8JVnIXuB0Fog1/uuZDdjiz4QO3j
-Os9E8z8i6AUKdJgPjxlcr585lSLtKiz7TTPTDmKCF8aga2Gc6+yfjI92F0fEuGh5
-GjdQu76x6hLPYT6+pjrvjmXq8gF030jTOiQ2n6o9oH7aQhehEIFsrQdtKh9ZrhWN
-QWa1P4iPlzPf+Y7sG7irZqcm4wa/U+qxQPNVcA9FUziymPtbMGlqN4x2Z3Jr3VUP
-QFhwXF6U8BM3ldZDNPmmB9OKlsDCR/7+AvwJ52hRxAzIm/lhuXj1xPj5JFuUErAX
-aBIJN0iaJaXVB+JFbzXT1DLhqCR1T37zZSKnLMSKtvIe9UOO6Jy4mgX6CDjPM/Vu
-9aJhzqmaVUbZOYwJh5ojrWLzswv1K9CdcmDEaK4X/u1z+eWiNjsHE3pzUiq4DJhb
-T4CBiqLxHYsQ9n8dT95t+poqJ10PVFkehb+8kh05e3ENd4xpkkdTfIY=
-=CwjQ
------END PGP PRIVATE KEY BLOCK-----`
-
-var PubKey = `-----BEGIN PGP PUBLIC KEY BLOCK-----
-
-mQGNBF/11g0BDADciiDQKYjWjIZYTFC55kzaf3H7VcjKb7AdBSyHsN8OIZvLkbgx
-1M5x+JPVXCiBEJMjp7YCVJeTQYixic4Ep+YeC8zIdP8ZcvLD9bgFumws+TBJMY7w
-2cy3oPv/uVW4TRFv42PwKjO/sXpRg1gJx3EX2FJV+aYAPd8Z6pHxuOk6J49wLY1E
-3hl1ZrPGUGsF4l7tVHniZG8IzTCgJGC6qrlsg1VGrIkactesr7U6+Xs4VJgNIdCs
-2/7RqwWAtkSHumAKBe1hNY2ddt3p42jEM0P2g7Uwao7/ziSiS/N96dkEAdWCT99/
-e0qLC4q6VisrFvdmfDQrY73eadL6Jf38H2IUpNrcHgVZtEBGhD6dOcjs2YBZNfX3
-wfDJooRk0efcLlSFT1YVZhxez/zZTd+7nReKPmsOxiaUmP/bQSB4FZZ4ZxsfxH2t
-wgX4dtwRV28JGHeA/ISJiWMQKrci1PRhRWF32EaE6dF+2VJwGi9mssEkAA+YHh1O
-HjPgosqFp16rb1MAEQEAAbQbUmVrb3IgVGVzdCA8dGVzdEByZWtvci5kZXY+iQHU
-BBMBCAA+FiEEaQyGa1qf60gdtT0k2KPrwEiTOuIFAl/11g0CGwMFCQPCZwAFCwkI
-BwIGFQoJCAsCBBYCAwECHgECF4AACgkQ2KPrwEiTOuI//Qv+KtoirEAXDqH4x+z5
-JSJVdWrEyT/FMadoIj158IHH1mAxrPnv6BzI5JlsMl1JBNJIuzeEgeJus7X5Y5E7
-Dj1BVXA7XI49knDseZbKw1vMDzMIiaRTOth5CX4O5qwKg6rkwYrnuV/vThW8TgUk
-bYvcPh+VIsP42gocVCqWg1uarWYmBJICqWxCtN4xZHsLbvElg86BbdoDCkh4Om7c
-/oana6gNUf5+GeS2wblpPoX/jexRjvXJUFqGa0a+aqK0nzqUDv+0uFOVDeMPyC9j
-rbj32ox2/dWh/avNXnXXJbrTkuZAM2Cx4MrR0lRyMPECYqrG3mKrnQnGB6O2jUZa
-WyF5xOvhUKmu/oWXeQr/CEIEJ4A4gIvgtJIWCqN64k8Dkb5Wpgqgt9Jc5TVsZBSc
-31tBPPAeI96zhXqml4SKIT7cSq+vLxlbLiDApwjrG2H8qFImZkRnOLVQGwrsFiXv
-jqGRCrEtJurWOgo09LoKW/qMakL8o9ngdXCtItGogawLkAmVuQGNBF/11g0BDACo
-0pj2kCXRPfuHPrrmd6ZcH8KHRGOZzxtaiEFo+y5rwrWEFsHsf6zjxNHnP+lHZa1E
-o4gENJleSZHTdkEaMURsvCbKywJ12nV3jtxyPUqbmWir7FIOXWqb3SanA1pc8/y6
-ANq5fmf8KN6tlsfa4f0R6jy1gVIiUpCJQDbLIWrbvTdjI+aHcnXnxp/IJ4+m9CWU
-aVLJMoOP/Vs57P8ODlqpdwlZtASBp+k7fxKZSO3gmYOFb7o1jU9IMnSu+YZGxpBx
-NWeOZAWVNulIHvmBMidDxXGlxP5AjXrTzrbFM/7TvoemSyRAiJWZZxufysThoaEt
-3xvRf138hNwzUBOqsewrgunFpvvdsC5T8/yK9Iik1dLT2SwoLua0jbkico08u9Zz
-JLBWlScY2+z2RzG0D1xCG3CF+ALxBldCHMLIfnuv8l5U4MbsfUbM6sktSx8nbUC7
-8eV/OHfYhDZKBhjX1R/fYtj9Qq022dr9ygp4b8vnE1S41vNl1VqZaJLX23QueU0A
-EQEAAYkBvAQYAQgAJhYhBGkMhmtan+tIHbU9JNij68BIkzriBQJf9dYNAhsMBQkD
-wmcAAAoJENij68BIkzriZx4MAJKSv2Cw1Fw45yfOCVgm2a+0AbbvOJVLr/LAY/HJ
-m3IjB8SDwlWche4HQWiDX+65kN2OLPhA7eM6z0TzPyLoBQp0mA+PGVyvnzmVIu0q
-LPtNM9MOYoIXxqBrYZzr7J+Mj3YXR8S4aHkaN1C7vrHqEs9hPr6mOu+OZeryAXTf
-SNM6JDafqj2gftpCF6EQgWytB20qH1muFY1BZrU/iI+XM9/5juwbuKtmpybjBr9T
-6rFA81VwD0VTOLKY+1swaWo3jHZncmvdVQ9AWHBcXpTwEzeV1kM0+aYH04qWwMJH
-/v4C/AnnaFHEDMib+WG5ePXE+PkkW5QSsBdoEgk3SJolpdUH4kVvNdPUMuGoJHVP
-fvNlIqcsxIq28h71Q47onLiaBfoIOM8z9W71omHOqZpVRtk5jAmHmiOtYvOzC/Ur
-0J1yYMRorhf+7XP55aI2OwcTenNSKrgMmFtPgIGKovEdixD2fx1P3m36mionXQ9U
-WR6Fv7ySHTl7cQ13jGmSR1N8hg==
-=Fen+
------END PGP PUBLIC KEY BLOCK-----`
-
-func init() {
- p := os.Getenv("REKORTMPDIR")
- if p != "" {
- cli = path.Join(p, cli)
- server = path.Join(p, server)
- }
- var err error
- keys, err = openpgp.ReadArmoredKeyRing(strings.NewReader(secretKey))
- if err != nil {
- panic(err)
- }
-}
-
-func OutputContains(t *testing.T, output, sub string) {
- t.Helper()
- if !strings.Contains(output, sub) {
- t.Errorf("Expected [%s] in response, got %s", sub, output)
- }
-}
-
-func Run(t *testing.T, stdin, cmd string, arg ...string) string {
- t.Helper()
- arg = append([]string{coverageFlag()}, arg...)
- c := exec.Command(cmd, arg...)
- if stdin != "" {
- c.Stdin = strings.NewReader(stdin)
- }
- if os.Getenv("REKORTMPDIR") != "" {
- // ensure that we use a clean state.json file for each Run
- c.Env = append(c.Env, "HOME="+os.Getenv("REKORTMPDIR"))
- }
- b, err := c.CombinedOutput()
- if err != nil {
- t.Log(string(b))
- t.Fatal(err)
- }
- return stripCoverageOutput(string(b))
-}
-
-func RunCli(t *testing.T, arg ...string) string {
- t.Helper()
- arg = append(arg, rekorServerFlag())
- // use a blank config file to ensure no collision
- if os.Getenv("REKORTMPDIR") != "" {
- arg = append(arg, "--config="+os.Getenv("REKORTMPDIR")+".rekor.yaml")
- }
- return Run(t, "", cli, arg...)
-}
-
-func RunCliErr(t *testing.T, arg ...string) string {
- t.Helper()
- arg = append([]string{coverageFlag()}, arg...)
- arg = append(arg, rekorServerFlag())
- // use a blank config file to ensure no collision
- if os.Getenv("REKORTMPDIR") != "" {
- arg = append(arg, "--config="+os.Getenv("REKORTMPDIR")+".rekor.yaml")
- }
- cmd := exec.Command(cli, arg...)
- b, err := cmd.CombinedOutput()
- if err == nil {
- t.Log(string(b))
- t.Fatalf("expected error, got %s", string(b))
- }
- return stripCoverageOutput(string(b))
-}
-
-func rekorServerFlag() string {
- return fmt.Sprintf("--rekor_server=%s", rekorServer())
-}
-
-func rekorServer() string {
- if s := os.Getenv("REKOR_SERVER"); s != "" {
- return s
- }
- return "http://localhost:3000"
-}
-
-func coverageFlag() string {
- return "-test.coverprofile=/tmp/pkg-rekor-cli." + RandomSuffix(8) + ".cov"
-}
-
-func stripCoverageOutput(out string) string {
- return strings.Split(strings.Split(out, "PASS")[0], "FAIL")[0]
-}
-
-// RandomSuffix returns a random string of the given length.
-func RandomSuffix(n int) string {
- const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
-
- b := make([]byte, n)
- for i := range b {
- b[i] = letterBytes[rand.Intn(len(letterBytes))]
- }
- return string(b)
-}
-
-// RandomData returns a random byte slice of the given size.
-func RandomData(t *testing.T, n int) []byte {
- t.Helper()
- rand.Seed(time.Now().UnixNano())
- data := make([]byte, n)
- if _, err := rand.Read(data[:]); err != nil {
- t.Fatal(err)
- }
- return data
-}
-
-func CreateArtifact(t *testing.T, artifactPath string) string {
- t.Helper()
- // First let's generate some random data so we don't have to worry about dupes.
- data := RandomData(t, 100)
-
- artifact := base64.StdEncoding.EncodeToString(data[:])
- // Write this to a file
- write(t, artifact, artifactPath)
- return artifact
-}
-
-func write(t *testing.T, data string, path string) {
- t.Helper()
- if err := ioutil.WriteFile(path, []byte(data), 0644); err != nil {
- t.Fatal(err)
- }
-}
-
-func GetUUIDFromUploadOutput(t *testing.T, out string) string {
- t.Helper()
- // Output looks like "Artifact timestamped at ...\m Wrote response \n Created entry at index X, available at $URL/UUID", so grab the UUID:
- urlTokens := strings.Split(strings.TrimSpace(out), " ")
- url := urlTokens[len(urlTokens)-1]
- splitUrl := strings.Split(url, "/")
- return splitUrl[len(splitUrl)-1]
-}
-func SignPGP(b []byte) ([]byte, error) {
- var buf bytes.Buffer
- if err := openpgp.DetachSign(&buf, keys[0], bytes.NewReader(b), nil); err != nil {
- return nil, err
- }
- return buf.Bytes(), nil
-}
-func Write(t *testing.T, data string, path string) {
- t.Helper()
- if err := ioutil.WriteFile(path, []byte(data), 0644); err != nil {
- t.Fatal(err)
- }
-}
-
-// CreatedPGPSignedArtifact gets the test dir setup correctly with some random artifacts and keys.
-func CreatedPGPSignedArtifact(t *testing.T, artifactPath, sigPath string) {
- t.Helper()
- artifact := CreateArtifact(t, artifactPath)
-
- // Sign it with our key and write that to a file
- signature, err := SignPGP([]byte(artifact))
- if err != nil {
- t.Fatal(err)
- }
- if err := ioutil.WriteFile(sigPath, signature, 0644); err != nil {
- t.Fatal(err)
- }
-}
-
-func GetUUIDFromTimestampOutput(t *testing.T, out string) string {
- t.Helper()
- // Output looks like "Created entry at index X, available at $URL/UUID", so grab the UUID:
- urlTokens := strings.Split(strings.TrimSpace(out), "\n")
- return GetUUIDFromUploadOutput(t, urlTokens[len(urlTokens)-1])
-}
-
-// SetupTestData is a helper function to setups the test data
-func SetupTestData(t *testing.T) {
- // create a temp directory
- artifactPath := filepath.Join(t.TempDir(), "artifact")
- // create a temp file
- sigPath := filepath.Join(t.TempDir(), "signature.asc")
- CreatedPGPSignedArtifact(t, artifactPath, sigPath)
-
- // Write the public key to a file
- pubPath := filepath.Join(t.TempDir(), "pubKey.asc")
- if err := ioutil.WriteFile(pubPath, []byte(PubKey), 0644); err != nil { //nolint:gosec
- t.Fatal(err)
- }
-
- // Now upload to rekor!
- out := RunCli(t, "upload", "--artifact", artifactPath, "--signature", sigPath, "--public-key", pubPath)
- OutputContains(t, out, "Created entry at")
-}
diff --git a/vendor/go.etcd.io/bbolt/.go-version b/vendor/go.etcd.io/bbolt/.go-version
index 63f23d2af5..b6773170a5 100644
--- a/vendor/go.etcd.io/bbolt/.go-version
+++ b/vendor/go.etcd.io/bbolt/.go-version
@@ -1 +1 @@
-1.23.9
+1.23.10
diff --git a/vendor/go.etcd.io/bbolt/bolt_aix.go b/vendor/go.etcd.io/bbolt/bolt_aix.go
index 4b424ed4c4..596e540602 100644
--- a/vendor/go.etcd.io/bbolt/bolt_aix.go
+++ b/vendor/go.etcd.io/bbolt/bolt_aix.go
@@ -9,6 +9,8 @@ import (
"unsafe"
"golang.org/x/sys/unix"
+
+ "go.etcd.io/bbolt/internal/common"
)
// flock acquires an advisory lock on a file descriptor.
@@ -69,7 +71,7 @@ func mmap(db *DB, sz int) error {
// Save the original byte slice and convert to a byte array pointer.
db.dataref = b
- db.data = (*[maxMapSize]byte)(unsafe.Pointer(&b[0]))
+ db.data = (*[common.MaxMapSize]byte)(unsafe.Pointer(&b[0]))
db.datasz = sz
return nil
}
diff --git a/vendor/go.etcd.io/bbolt/bolt_android.go b/vendor/go.etcd.io/bbolt/bolt_android.go
index 11890f0d70..ac64fcf5b2 100644
--- a/vendor/go.etcd.io/bbolt/bolt_android.go
+++ b/vendor/go.etcd.io/bbolt/bolt_android.go
@@ -7,6 +7,8 @@ import (
"unsafe"
"golang.org/x/sys/unix"
+
+ "go.etcd.io/bbolt/internal/common"
)
// flock acquires an advisory lock on a file descriptor.
@@ -69,7 +71,7 @@ func mmap(db *DB, sz int) error {
// Save the original byte slice and convert to a byte array pointer.
db.dataref = b
- db.data = (*[maxMapSize]byte)(unsafe.Pointer(&b[0]))
+ db.data = (*[common.MaxMapSize]byte)(unsafe.Pointer(&b[0]))
db.datasz = sz
return nil
}
diff --git a/vendor/go.etcd.io/bbolt/bolt_solaris.go b/vendor/go.etcd.io/bbolt/bolt_solaris.go
index babad65786..56b2ccab47 100644
--- a/vendor/go.etcd.io/bbolt/bolt_solaris.go
+++ b/vendor/go.etcd.io/bbolt/bolt_solaris.go
@@ -7,6 +7,8 @@ import (
"unsafe"
"golang.org/x/sys/unix"
+
+ "go.etcd.io/bbolt/internal/common"
)
// flock acquires an advisory lock on a file descriptor.
@@ -67,7 +69,7 @@ func mmap(db *DB, sz int) error {
// Save the original byte slice and convert to a byte array pointer.
db.dataref = b
- db.data = (*[maxMapSize]byte)(unsafe.Pointer(&b[0]))
+ db.data = (*[common.MaxMapSize]byte)(unsafe.Pointer(&b[0]))
db.datasz = sz
return nil
}
diff --git a/vendor/go.etcd.io/bbolt/tx.go b/vendor/go.etcd.io/bbolt/tx.go
index 5eb383c4b8..7123ded8fe 100644
--- a/vendor/go.etcd.io/bbolt/tx.go
+++ b/vendor/go.etcd.io/bbolt/tx.go
@@ -561,10 +561,13 @@ func (tx *Tx) writeMeta() error {
tx.meta.Write(p)
// Write the meta page to file.
+ tx.db.metalock.Lock()
if _, err := tx.db.ops.writeAt(buf, int64(p.Id())*int64(tx.db.pageSize)); err != nil {
+ tx.db.metalock.Unlock()
lg.Errorf("writeAt failed, pgid: %d, pageSize: %d, error: %v", p.Id(), tx.db.pageSize, err)
return err
}
+ tx.db.metalock.Unlock()
if !tx.db.NoSync || common.IgnoreNoSync {
// gofail: var beforeSyncMetaPage struct{}
if err := fdatasync(tx.db); err != nil {
diff --git a/vendor/go.mongodb.org/mongo-driver/LICENSE b/vendor/go.mongodb.org/mongo-driver/LICENSE
deleted file mode 100644
index 261eeb9e9f..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bson.go b/vendor/go.mongodb.org/mongo-driver/bson/bson.go
deleted file mode 100644
index a0d8185826..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bson.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-//
-// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer
-// See THIRD-PARTY-NOTICES for original license terms.
-
-package bson // import "go.mongodb.org/mongo-driver/bson"
-
-import (
- "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-// Zeroer allows custom struct types to implement a report of zero
-// state. All struct types that don't implement Zeroer or where IsZero
-// returns false are considered to be not zero.
-type Zeroer interface {
- IsZero() bool
-}
-
-// D is an ordered representation of a BSON document. This type should be used when the order of the elements matters,
-// such as MongoDB command documents. If the order of the elements does not matter, an M should be used instead.
-//
-// A D should not be constructed with duplicate key names, as that can cause undefined server behavior.
-//
-// Example usage:
-//
-// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
-type D = primitive.D
-
-// E represents a BSON element for a D. It is usually used inside a D.
-type E = primitive.E
-
-// M is an unordered representation of a BSON document. This type should be used when the order of the elements does not
-// matter. This type is handled as a regular map[string]interface{} when encoding and decoding. Elements will be
-// serialized in an undefined, random order. If the order of the elements matters, a D should be used instead.
-//
-// Example usage:
-//
-// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
-type M = primitive.M
-
-// An A is an ordered representation of a BSON array.
-//
-// Example usage:
-//
-// bson.A{"bar", "world", 3.14159, bson.D{{"qux", 12345}}}
-type A = primitive.A
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go
deleted file mode 100644
index 652aa48b85..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
-)
-
-// ArrayCodec is the Codec used for bsoncore.Array values.
-//
-// Deprecated: ArrayCodec will not be directly accessible in Go Driver 2.0.
-type ArrayCodec struct{}
-
-var defaultArrayCodec = NewArrayCodec()
-
-// NewArrayCodec returns an ArrayCodec.
-//
-// Deprecated: NewArrayCodec will not be available in Go Driver 2.0. See
-// [ArrayCodec] for more details.
-func NewArrayCodec() *ArrayCodec {
- return &ArrayCodec{}
-}
-
-// EncodeValue is the ValueEncoder for bsoncore.Array values.
-func (ac *ArrayCodec) EncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tCoreArray {
- return ValueEncoderError{Name: "CoreArrayEncodeValue", Types: []reflect.Type{tCoreArray}, Received: val}
- }
-
- arr := val.Interface().(bsoncore.Array)
- return bsonrw.Copier{}.CopyArrayFromBytes(vw, arr)
-}
-
-// DecodeValue is the ValueDecoder for bsoncore.Array values.
-func (ac *ArrayCodec) DecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tCoreArray {
- return ValueDecoderError{Name: "CoreArrayDecodeValue", Types: []reflect.Type{tCoreArray}, Received: val}
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeSlice(val.Type(), 0, 0))
- }
-
- val.SetLen(0)
- arr, err := bsonrw.Copier{}.AppendArrayBytes(val.Interface().(bsoncore.Array), vr)
- val.Set(reflect.ValueOf(arr))
- return err
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go
deleted file mode 100644
index 0693bd432f..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go
+++ /dev/null
@@ -1,382 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec // import "go.mongodb.org/mongo-driver/bson/bsoncodec"
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-var (
- emptyValue = reflect.Value{}
-)
-
-// Marshaler is an interface implemented by types that can marshal themselves
-// into a BSON document represented as bytes. The bytes returned must be a valid
-// BSON document if the error is nil.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Marshaler] instead.
-type Marshaler interface {
- MarshalBSON() ([]byte, error)
-}
-
-// ValueMarshaler is an interface implemented by types that can marshal
-// themselves into a BSON value as bytes. The type must be the valid type for
-// the bytes returned. The bytes and byte type together must be valid if the
-// error is nil.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.ValueMarshaler] instead.
-type ValueMarshaler interface {
- MarshalBSONValue() (bsontype.Type, []byte, error)
-}
-
-// Unmarshaler is an interface implemented by types that can unmarshal a BSON
-// document representation of themselves. The BSON bytes can be assumed to be
-// valid. UnmarshalBSON must copy the BSON bytes if it wishes to retain the data
-// after returning.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Unmarshaler] instead.
-type Unmarshaler interface {
- UnmarshalBSON([]byte) error
-}
-
-// ValueUnmarshaler is an interface implemented by types that can unmarshal a
-// BSON value representation of themselves. The BSON bytes and type can be
-// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
-// wishes to retain the data after returning.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.ValueUnmarshaler] instead.
-type ValueUnmarshaler interface {
- UnmarshalBSONValue(bsontype.Type, []byte) error
-}
-
-// ValueEncoderError is an error returned from a ValueEncoder when the provided value can't be
-// encoded by the ValueEncoder.
-type ValueEncoderError struct {
- Name string
- Types []reflect.Type
- Kinds []reflect.Kind
- Received reflect.Value
-}
-
-func (vee ValueEncoderError) Error() string {
- typeKinds := make([]string, 0, len(vee.Types)+len(vee.Kinds))
- for _, t := range vee.Types {
- typeKinds = append(typeKinds, t.String())
- }
- for _, k := range vee.Kinds {
- if k == reflect.Map {
- typeKinds = append(typeKinds, "map[string]*")
- continue
- }
- typeKinds = append(typeKinds, k.String())
- }
- received := vee.Received.Kind().String()
- if vee.Received.IsValid() {
- received = vee.Received.Type().String()
- }
- return fmt.Sprintf("%s can only encode valid %s, but got %s", vee.Name, strings.Join(typeKinds, ", "), received)
-}
-
-// ValueDecoderError is an error returned from a ValueDecoder when the provided value can't be
-// decoded by the ValueDecoder.
-type ValueDecoderError struct {
- Name string
- Types []reflect.Type
- Kinds []reflect.Kind
- Received reflect.Value
-}
-
-func (vde ValueDecoderError) Error() string {
- typeKinds := make([]string, 0, len(vde.Types)+len(vde.Kinds))
- for _, t := range vde.Types {
- typeKinds = append(typeKinds, t.String())
- }
- for _, k := range vde.Kinds {
- if k == reflect.Map {
- typeKinds = append(typeKinds, "map[string]*")
- continue
- }
- typeKinds = append(typeKinds, k.String())
- }
- received := vde.Received.Kind().String()
- if vde.Received.IsValid() {
- received = vde.Received.Type().String()
- }
- return fmt.Sprintf("%s can only decode valid and settable %s, but got %s", vde.Name, strings.Join(typeKinds, ", "), received)
-}
-
-// EncodeContext is the contextual information required for a Codec to encode a
-// value.
-type EncodeContext struct {
- *Registry
-
- // MinSize causes the Encoder to marshal Go integer values (int, int8, int16, int32, int64,
- // uint, uint8, uint16, uint32, or uint64) as the minimum BSON int size (either 32 or 64 bits)
- // that can represent the integer value.
- //
- // Deprecated: Use bson.Encoder.IntMinSize instead.
- MinSize bool
-
- errorOnInlineDuplicates bool
- stringifyMapKeysWithFmt bool
- nilMapAsEmpty bool
- nilSliceAsEmpty bool
- nilByteSliceAsEmpty bool
- omitZeroStruct bool
- useJSONStructTags bool
-}
-
-// ErrorOnInlineDuplicates causes the Encoder to return an error if there is a duplicate field in
-// the marshaled BSON when the "inline" struct tag option is set.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.ErrorOnInlineDuplicates] instead.
-func (ec *EncodeContext) ErrorOnInlineDuplicates() {
- ec.errorOnInlineDuplicates = true
-}
-
-// StringifyMapKeysWithFmt causes the Encoder to convert Go map keys to BSON document field name
-// strings using fmt.Sprintf() instead of the default string conversion logic.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.StringifyMapKeysWithFmt] instead.
-func (ec *EncodeContext) StringifyMapKeysWithFmt() {
- ec.stringifyMapKeysWithFmt = true
-}
-
-// NilMapAsEmpty causes the Encoder to marshal nil Go maps as empty BSON documents instead of BSON
-// null.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.NilMapAsEmpty] instead.
-func (ec *EncodeContext) NilMapAsEmpty() {
- ec.nilMapAsEmpty = true
-}
-
-// NilSliceAsEmpty causes the Encoder to marshal nil Go slices as empty BSON arrays instead of BSON
-// null.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.NilSliceAsEmpty] instead.
-func (ec *EncodeContext) NilSliceAsEmpty() {
- ec.nilSliceAsEmpty = true
-}
-
-// NilByteSliceAsEmpty causes the Encoder to marshal nil Go byte slices as empty BSON binary values
-// instead of BSON null.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.NilByteSliceAsEmpty] instead.
-func (ec *EncodeContext) NilByteSliceAsEmpty() {
- ec.nilByteSliceAsEmpty = true
-}
-
-// OmitZeroStruct causes the Encoder to consider the zero value for a struct (e.g. MyStruct{})
-// as empty and omit it from the marshaled BSON when the "omitempty" struct tag option is set.
-//
-// Note that the Encoder only examines exported struct fields when determining if a struct is the
-// zero value. It considers pointers to a zero struct value (e.g. &MyStruct{}) not empty.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.OmitZeroStruct] instead.
-func (ec *EncodeContext) OmitZeroStruct() {
- ec.omitZeroStruct = true
-}
-
-// UseJSONStructTags causes the Encoder to fall back to using the "json" struct tag if a "bson"
-// struct tag is not specified.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.UseJSONStructTags] instead.
-func (ec *EncodeContext) UseJSONStructTags() {
- ec.useJSONStructTags = true
-}
-
-// DecodeContext is the contextual information required for a Codec to decode a
-// value.
-type DecodeContext struct {
- *Registry
-
- // Truncate, if true, instructs decoders to to truncate the fractional part of BSON "double"
- // values when attempting to unmarshal them into a Go integer (int, int8, int16, int32, int64,
- // uint, uint8, uint16, uint32, or uint64) struct field. The truncation logic does not apply to
- // BSON "decimal128" values.
- //
- // Deprecated: Use bson.Decoder.AllowTruncatingDoubles instead.
- Truncate bool
-
- // Ancestor is the type of a containing document. This is mainly used to determine what type
- // should be used when decoding an embedded document into an empty interface. For example, if
- // Ancestor is a bson.M, BSON embedded document values being decoded into an empty interface
- // will be decoded into a bson.M.
- //
- // Deprecated: Use bson.Decoder.DefaultDocumentM or bson.Decoder.DefaultDocumentD instead.
- Ancestor reflect.Type
-
- // defaultDocumentType specifies the Go type to decode top-level and nested BSON documents into. In particular, the
- // usage for this field is restricted to data typed as "interface{}" or "map[string]interface{}". If DocumentType is
- // set to a type that a BSON document cannot be unmarshaled into (e.g. "string"), unmarshalling will result in an
- // error. DocumentType overrides the Ancestor field.
- defaultDocumentType reflect.Type
-
- binaryAsSlice bool
- useJSONStructTags bool
- useLocalTimeZone bool
- zeroMaps bool
- zeroStructs bool
-}
-
-// BinaryAsSlice causes the Decoder to unmarshal BSON binary field values that are the "Generic" or
-// "Old" BSON binary subtype as a Go byte slice instead of a primitive.Binary.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.BinaryAsSlice] instead.
-func (dc *DecodeContext) BinaryAsSlice() {
- dc.binaryAsSlice = true
-}
-
-// UseJSONStructTags causes the Decoder to fall back to using the "json" struct tag if a "bson"
-// struct tag is not specified.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.UseJSONStructTags] instead.
-func (dc *DecodeContext) UseJSONStructTags() {
- dc.useJSONStructTags = true
-}
-
-// UseLocalTimeZone causes the Decoder to unmarshal time.Time values in the local timezone instead
-// of the UTC timezone.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.UseLocalTimeZone] instead.
-func (dc *DecodeContext) UseLocalTimeZone() {
- dc.useLocalTimeZone = true
-}
-
-// ZeroMaps causes the Decoder to delete any existing values from Go maps in the destination value
-// passed to Decode before unmarshaling BSON documents into them.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.ZeroMaps] instead.
-func (dc *DecodeContext) ZeroMaps() {
- dc.zeroMaps = true
-}
-
-// ZeroStructs causes the Decoder to delete any existing values from Go structs in the destination
-// value passed to Decode before unmarshaling BSON documents into them.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.ZeroStructs] instead.
-func (dc *DecodeContext) ZeroStructs() {
- dc.zeroStructs = true
-}
-
-// DefaultDocumentM causes the Decoder to always unmarshal documents into the primitive.M type. This
-// behavior is restricted to data typed as "interface{}" or "map[string]interface{}".
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.DefaultDocumentM] instead.
-func (dc *DecodeContext) DefaultDocumentM() {
- dc.defaultDocumentType = reflect.TypeOf(primitive.M{})
-}
-
-// DefaultDocumentD causes the Decoder to always unmarshal documents into the primitive.D type. This
-// behavior is restricted to data typed as "interface{}" or "map[string]interface{}".
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.DefaultDocumentD] instead.
-func (dc *DecodeContext) DefaultDocumentD() {
- dc.defaultDocumentType = reflect.TypeOf(primitive.D{})
-}
-
-// ValueCodec is an interface for encoding and decoding a reflect.Value.
-// values.
-//
-// Deprecated: Use [ValueEncoder] and [ValueDecoder] instead.
-type ValueCodec interface {
- ValueEncoder
- ValueDecoder
-}
-
-// ValueEncoder is the interface implemented by types that can handle the encoding of a value.
-type ValueEncoder interface {
- EncodeValue(EncodeContext, bsonrw.ValueWriter, reflect.Value) error
-}
-
-// ValueEncoderFunc is an adapter function that allows a function with the correct signature to be
-// used as a ValueEncoder.
-type ValueEncoderFunc func(EncodeContext, bsonrw.ValueWriter, reflect.Value) error
-
-// EncodeValue implements the ValueEncoder interface.
-func (fn ValueEncoderFunc) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- return fn(ec, vw, val)
-}
-
-// ValueDecoder is the interface implemented by types that can handle the decoding of a value.
-type ValueDecoder interface {
- DecodeValue(DecodeContext, bsonrw.ValueReader, reflect.Value) error
-}
-
-// ValueDecoderFunc is an adapter function that allows a function with the correct signature to be
-// used as a ValueDecoder.
-type ValueDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) error
-
-// DecodeValue implements the ValueDecoder interface.
-func (fn ValueDecoderFunc) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- return fn(dc, vr, val)
-}
-
-// typeDecoder is the interface implemented by types that can handle the decoding of a value given its type.
-type typeDecoder interface {
- decodeType(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error)
-}
-
-// typeDecoderFunc is an adapter function that allows a function with the correct signature to be used as a typeDecoder.
-type typeDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error)
-
-func (fn typeDecoderFunc) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- return fn(dc, vr, t)
-}
-
-// decodeAdapter allows two functions with the correct signatures to be used as both a ValueDecoder and typeDecoder.
-type decodeAdapter struct {
- ValueDecoderFunc
- typeDecoderFunc
-}
-
-var _ ValueDecoder = decodeAdapter{}
-var _ typeDecoder = decodeAdapter{}
-
-// decodeTypeOrValue calls decoder.decodeType is decoder is a typeDecoder. Otherwise, it allocates a new element of type
-// t and calls decoder.DecodeValue on it.
-func decodeTypeOrValue(decoder ValueDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- td, _ := decoder.(typeDecoder)
- return decodeTypeOrValueWithInfo(decoder, td, dc, vr, t, true)
-}
-
-func decodeTypeOrValueWithInfo(vd ValueDecoder, td typeDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type, convert bool) (reflect.Value, error) {
- if td != nil {
- val, err := td.decodeType(dc, vr, t)
- if err == nil && convert && val.Type() != t {
- // This conversion step is necessary for slices and maps. If a user declares variables like:
- //
- // type myBool bool
- // var m map[string]myBool
- //
- // and tries to decode BSON bytes into the map, the decoding will fail if this conversion is not present
- // because we'll try to assign a value of type bool to one of type myBool.
- val = val.Convert(t)
- }
- return val, err
- }
-
- val := reflect.New(t).Elem()
- err := vd.DecodeValue(dc, vr, val)
- return val, err
-}
-
-// CodecZeroer is the interface implemented by Codecs that can also determine if
-// a value of the type that would be encoded is zero.
-//
-// Deprecated: Defining custom rules for the zero/empty value will not be supported in Go Driver
-// 2.0. Users who want to omit empty complex values should use a pointer field and set the value to
-// nil instead.
-type CodecZeroer interface {
- IsTypeZero(interface{}) bool
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go
deleted file mode 100644
index 0134b5a94b..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go
+++ /dev/null
@@ -1,138 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "fmt"
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-// ByteSliceCodec is the Codec used for []byte values.
-//
-// Deprecated: ByteSliceCodec will not be directly configurable in Go Driver
-// 2.0. To configure the byte slice encode and decode behavior, use the
-// configuration methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the byte slice
-// encode and decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to encode nil byte slices as empty
-// BSON binary values, use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// NilByteSliceAsEmpty: true,
-// })
-//
-// See the deprecation notice for each field in ByteSliceCodec for the
-// corresponding settings.
-type ByteSliceCodec struct {
- // EncodeNilAsEmpty causes EncodeValue to marshal nil Go byte slices as empty BSON binary values
- // instead of BSON null.
- //
- // Deprecated: Use bson.Encoder.NilByteSliceAsEmpty or options.BSONOptions.NilByteSliceAsEmpty
- // instead.
- EncodeNilAsEmpty bool
-}
-
-var (
- defaultByteSliceCodec = NewByteSliceCodec()
-
- // Assert that defaultByteSliceCodec satisfies the typeDecoder interface, which allows it to be
- // used by collection type decoders (e.g. map, slice, etc) to set individual values in a
- // collection.
- _ typeDecoder = defaultByteSliceCodec
-)
-
-// NewByteSliceCodec returns a ByteSliceCodec with options opts.
-//
-// Deprecated: NewByteSliceCodec will not be available in Go Driver 2.0. See
-// [ByteSliceCodec] for more details.
-func NewByteSliceCodec(opts ...*bsonoptions.ByteSliceCodecOptions) *ByteSliceCodec {
- byteSliceOpt := bsonoptions.MergeByteSliceCodecOptions(opts...)
- codec := ByteSliceCodec{}
- if byteSliceOpt.EncodeNilAsEmpty != nil {
- codec.EncodeNilAsEmpty = *byteSliceOpt.EncodeNilAsEmpty
- }
- return &codec
-}
-
-// EncodeValue is the ValueEncoder for []byte.
-func (bsc *ByteSliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tByteSlice {
- return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
- }
- if val.IsNil() && !bsc.EncodeNilAsEmpty && !ec.nilByteSliceAsEmpty {
- return vw.WriteNull()
- }
- return vw.WriteBinary(val.Interface().([]byte))
-}
-
-func (bsc *ByteSliceCodec) decodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tByteSlice {
- return emptyValue, ValueDecoderError{
- Name: "ByteSliceDecodeValue",
- Types: []reflect.Type{tByteSlice},
- Received: reflect.Zero(t),
- }
- }
-
- var data []byte
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.String:
- str, err := vr.ReadString()
- if err != nil {
- return emptyValue, err
- }
- data = []byte(str)
- case bsontype.Symbol:
- sym, err := vr.ReadSymbol()
- if err != nil {
- return emptyValue, err
- }
- data = []byte(sym)
- case bsontype.Binary:
- var subtype byte
- data, subtype, err = vr.ReadBinary()
- if err != nil {
- return emptyValue, err
- }
- if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
- return emptyValue, decodeBinaryError{subtype: subtype, typeName: "[]byte"}
- }
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a []byte", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(data), nil
-}
-
-// DecodeValue is the ValueDecoder for []byte.
-func (bsc *ByteSliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tByteSlice {
- return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
- }
-
- elem, err := bsc.decodeType(dc, vr, tByteSlice)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/codec_cache.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/codec_cache.go
deleted file mode 100644
index 844b50299f..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/codec_cache.go
+++ /dev/null
@@ -1,166 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "reflect"
- "sync"
- "sync/atomic"
-)
-
-// Runtime check that the kind encoder and decoder caches can store any valid
-// reflect.Kind constant.
-func init() {
- if s := reflect.Kind(len(kindEncoderCache{}.entries)).String(); s != "kind27" {
- panic("The capacity of kindEncoderCache is too small.\n" +
- "This is due to a new type being added to reflect.Kind.")
- }
-}
-
-// statically assert array size
-var _ = (kindEncoderCache{}).entries[reflect.UnsafePointer]
-var _ = (kindDecoderCache{}).entries[reflect.UnsafePointer]
-
-type typeEncoderCache struct {
- cache sync.Map // map[reflect.Type]ValueEncoder
-}
-
-func (c *typeEncoderCache) Store(rt reflect.Type, enc ValueEncoder) {
- c.cache.Store(rt, enc)
-}
-
-func (c *typeEncoderCache) Load(rt reflect.Type) (ValueEncoder, bool) {
- if v, _ := c.cache.Load(rt); v != nil {
- return v.(ValueEncoder), true
- }
- return nil, false
-}
-
-func (c *typeEncoderCache) LoadOrStore(rt reflect.Type, enc ValueEncoder) ValueEncoder {
- if v, loaded := c.cache.LoadOrStore(rt, enc); loaded {
- enc = v.(ValueEncoder)
- }
- return enc
-}
-
-func (c *typeEncoderCache) Clone() *typeEncoderCache {
- cc := new(typeEncoderCache)
- c.cache.Range(func(k, v interface{}) bool {
- if k != nil && v != nil {
- cc.cache.Store(k, v)
- }
- return true
- })
- return cc
-}
-
-type typeDecoderCache struct {
- cache sync.Map // map[reflect.Type]ValueDecoder
-}
-
-func (c *typeDecoderCache) Store(rt reflect.Type, dec ValueDecoder) {
- c.cache.Store(rt, dec)
-}
-
-func (c *typeDecoderCache) Load(rt reflect.Type) (ValueDecoder, bool) {
- if v, _ := c.cache.Load(rt); v != nil {
- return v.(ValueDecoder), true
- }
- return nil, false
-}
-
-func (c *typeDecoderCache) LoadOrStore(rt reflect.Type, dec ValueDecoder) ValueDecoder {
- if v, loaded := c.cache.LoadOrStore(rt, dec); loaded {
- dec = v.(ValueDecoder)
- }
- return dec
-}
-
-func (c *typeDecoderCache) Clone() *typeDecoderCache {
- cc := new(typeDecoderCache)
- c.cache.Range(func(k, v interface{}) bool {
- if k != nil && v != nil {
- cc.cache.Store(k, v)
- }
- return true
- })
- return cc
-}
-
-// atomic.Value requires that all calls to Store() have the same concrete type
-// so we wrap the ValueEncoder with a kindEncoderCacheEntry to ensure the type
-// is always the same (since different concrete types may implement the
-// ValueEncoder interface).
-type kindEncoderCacheEntry struct {
- enc ValueEncoder
-}
-
-type kindEncoderCache struct {
- entries [reflect.UnsafePointer + 1]atomic.Value // *kindEncoderCacheEntry
-}
-
-func (c *kindEncoderCache) Store(rt reflect.Kind, enc ValueEncoder) {
- if enc != nil && rt < reflect.Kind(len(c.entries)) {
- c.entries[rt].Store(&kindEncoderCacheEntry{enc: enc})
- }
-}
-
-func (c *kindEncoderCache) Load(rt reflect.Kind) (ValueEncoder, bool) {
- if rt < reflect.Kind(len(c.entries)) {
- if ent, ok := c.entries[rt].Load().(*kindEncoderCacheEntry); ok {
- return ent.enc, ent.enc != nil
- }
- }
- return nil, false
-}
-
-func (c *kindEncoderCache) Clone() *kindEncoderCache {
- cc := new(kindEncoderCache)
- for i, v := range c.entries {
- if val := v.Load(); val != nil {
- cc.entries[i].Store(val)
- }
- }
- return cc
-}
-
-// atomic.Value requires that all calls to Store() have the same concrete type
-// so we wrap the ValueDecoder with a kindDecoderCacheEntry to ensure the type
-// is always the same (since different concrete types may implement the
-// ValueDecoder interface).
-type kindDecoderCacheEntry struct {
- dec ValueDecoder
-}
-
-type kindDecoderCache struct {
- entries [reflect.UnsafePointer + 1]atomic.Value // *kindDecoderCacheEntry
-}
-
-func (c *kindDecoderCache) Store(rt reflect.Kind, dec ValueDecoder) {
- if rt < reflect.Kind(len(c.entries)) {
- c.entries[rt].Store(&kindDecoderCacheEntry{dec: dec})
- }
-}
-
-func (c *kindDecoderCache) Load(rt reflect.Kind) (ValueDecoder, bool) {
- if rt < reflect.Kind(len(c.entries)) {
- if ent, ok := c.entries[rt].Load().(*kindDecoderCacheEntry); ok {
- return ent.dec, ent.dec != nil
- }
- }
- return nil, false
-}
-
-func (c *kindDecoderCache) Clone() *kindDecoderCache {
- cc := new(kindDecoderCache)
- for i, v := range c.entries {
- if val := v.Load(); val != nil {
- cc.entries[i].Store(val)
- }
- }
- return cc
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go
deleted file mode 100644
index cb8180f25c..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonrw"
-)
-
-// condAddrEncoder is the encoder used when a pointer to the encoding value has an encoder.
-type condAddrEncoder struct {
- canAddrEnc ValueEncoder
- elseEnc ValueEncoder
-}
-
-var _ ValueEncoder = (*condAddrEncoder)(nil)
-
-// newCondAddrEncoder returns an condAddrEncoder.
-func newCondAddrEncoder(canAddrEnc, elseEnc ValueEncoder) *condAddrEncoder {
- encoder := condAddrEncoder{canAddrEnc: canAddrEnc, elseEnc: elseEnc}
- return &encoder
-}
-
-// EncodeValue is the ValueEncoderFunc for a value that may be addressable.
-func (cae *condAddrEncoder) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if val.CanAddr() {
- return cae.canAddrEnc.EncodeValue(ec, vw, val)
- }
- if cae.elseEnc != nil {
- return cae.elseEnc.EncodeValue(ec, vw, val)
- }
- return ErrNoEncoder{Type: val.Type()}
-}
-
-// condAddrDecoder is the decoder used when a pointer to the value has a decoder.
-type condAddrDecoder struct {
- canAddrDec ValueDecoder
- elseDec ValueDecoder
-}
-
-var _ ValueDecoder = (*condAddrDecoder)(nil)
-
-// newCondAddrDecoder returns an CondAddrDecoder.
-func newCondAddrDecoder(canAddrDec, elseDec ValueDecoder) *condAddrDecoder {
- decoder := condAddrDecoder{canAddrDec: canAddrDec, elseDec: elseDec}
- return &decoder
-}
-
-// DecodeValue is the ValueDecoderFunc for a value that may be addressable.
-func (cad *condAddrDecoder) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if val.CanAddr() {
- return cad.canAddrDec.DecodeValue(dc, vr, val)
- }
- if cad.elseDec != nil {
- return cad.elseDec.DecodeValue(dc, vr, val)
- }
- return ErrNoDecoder{Type: val.Type()}
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go
deleted file mode 100644
index 8702d6d39e..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go
+++ /dev/null
@@ -1,1819 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "math"
- "net/url"
- "reflect"
- "strconv"
- "time"
-
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
- "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
-)
-
-var (
- defaultValueDecoders DefaultValueDecoders
- errCannotTruncate = errors.New("float64 can only be truncated to a lower precision type when truncation is enabled")
-)
-
-type decodeBinaryError struct {
- subtype byte
- typeName string
-}
-
-func (d decodeBinaryError) Error() string {
- return fmt.Sprintf("only binary values with subtype 0x00 or 0x02 can be decoded into %s, but got subtype %v", d.typeName, d.subtype)
-}
-
-func newDefaultStructCodec() *StructCodec {
- codec, err := NewStructCodec(DefaultStructTagParser)
- if err != nil {
- // This function is called from the codec registration path, so errors can't be propagated. If there's an error
- // constructing the StructCodec, we panic to avoid losing it.
- panic(fmt.Errorf("error creating default StructCodec: %w", err))
- }
- return codec
-}
-
-// DefaultValueDecoders is a namespace type for the default ValueDecoders used
-// when creating a registry.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-type DefaultValueDecoders struct{}
-
-// RegisterDefaultDecoders will register the decoder methods attached to DefaultValueDecoders with
-// the provided RegistryBuilder.
-//
-// There is no support for decoding map[string]interface{} because there is no decoder for
-// interface{}, so users must either register this decoder themselves or use the
-// EmptyInterfaceDecoder available in the bson package.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) RegisterDefaultDecoders(rb *RegistryBuilder) {
- if rb == nil {
- panic(errors.New("argument to RegisterDefaultDecoders must not be nil"))
- }
-
- intDecoder := decodeAdapter{dvd.IntDecodeValue, dvd.intDecodeType}
- floatDecoder := decodeAdapter{dvd.FloatDecodeValue, dvd.floatDecodeType}
-
- rb.
- RegisterTypeDecoder(tD, ValueDecoderFunc(dvd.DDecodeValue)).
- RegisterTypeDecoder(tBinary, decodeAdapter{dvd.BinaryDecodeValue, dvd.binaryDecodeType}).
- RegisterTypeDecoder(tUndefined, decodeAdapter{dvd.UndefinedDecodeValue, dvd.undefinedDecodeType}).
- RegisterTypeDecoder(tDateTime, decodeAdapter{dvd.DateTimeDecodeValue, dvd.dateTimeDecodeType}).
- RegisterTypeDecoder(tNull, decodeAdapter{dvd.NullDecodeValue, dvd.nullDecodeType}).
- RegisterTypeDecoder(tRegex, decodeAdapter{dvd.RegexDecodeValue, dvd.regexDecodeType}).
- RegisterTypeDecoder(tDBPointer, decodeAdapter{dvd.DBPointerDecodeValue, dvd.dBPointerDecodeType}).
- RegisterTypeDecoder(tTimestamp, decodeAdapter{dvd.TimestampDecodeValue, dvd.timestampDecodeType}).
- RegisterTypeDecoder(tMinKey, decodeAdapter{dvd.MinKeyDecodeValue, dvd.minKeyDecodeType}).
- RegisterTypeDecoder(tMaxKey, decodeAdapter{dvd.MaxKeyDecodeValue, dvd.maxKeyDecodeType}).
- RegisterTypeDecoder(tJavaScript, decodeAdapter{dvd.JavaScriptDecodeValue, dvd.javaScriptDecodeType}).
- RegisterTypeDecoder(tSymbol, decodeAdapter{dvd.SymbolDecodeValue, dvd.symbolDecodeType}).
- RegisterTypeDecoder(tByteSlice, defaultByteSliceCodec).
- RegisterTypeDecoder(tTime, defaultTimeCodec).
- RegisterTypeDecoder(tEmpty, defaultEmptyInterfaceCodec).
- RegisterTypeDecoder(tCoreArray, defaultArrayCodec).
- RegisterTypeDecoder(tOID, decodeAdapter{dvd.ObjectIDDecodeValue, dvd.objectIDDecodeType}).
- RegisterTypeDecoder(tDecimal, decodeAdapter{dvd.Decimal128DecodeValue, dvd.decimal128DecodeType}).
- RegisterTypeDecoder(tJSONNumber, decodeAdapter{dvd.JSONNumberDecodeValue, dvd.jsonNumberDecodeType}).
- RegisterTypeDecoder(tURL, decodeAdapter{dvd.URLDecodeValue, dvd.urlDecodeType}).
- RegisterTypeDecoder(tCoreDocument, ValueDecoderFunc(dvd.CoreDocumentDecodeValue)).
- RegisterTypeDecoder(tCodeWithScope, decodeAdapter{dvd.CodeWithScopeDecodeValue, dvd.codeWithScopeDecodeType}).
- RegisterDefaultDecoder(reflect.Bool, decodeAdapter{dvd.BooleanDecodeValue, dvd.booleanDecodeType}).
- RegisterDefaultDecoder(reflect.Int, intDecoder).
- RegisterDefaultDecoder(reflect.Int8, intDecoder).
- RegisterDefaultDecoder(reflect.Int16, intDecoder).
- RegisterDefaultDecoder(reflect.Int32, intDecoder).
- RegisterDefaultDecoder(reflect.Int64, intDecoder).
- RegisterDefaultDecoder(reflect.Uint, defaultUIntCodec).
- RegisterDefaultDecoder(reflect.Uint8, defaultUIntCodec).
- RegisterDefaultDecoder(reflect.Uint16, defaultUIntCodec).
- RegisterDefaultDecoder(reflect.Uint32, defaultUIntCodec).
- RegisterDefaultDecoder(reflect.Uint64, defaultUIntCodec).
- RegisterDefaultDecoder(reflect.Float32, floatDecoder).
- RegisterDefaultDecoder(reflect.Float64, floatDecoder).
- RegisterDefaultDecoder(reflect.Array, ValueDecoderFunc(dvd.ArrayDecodeValue)).
- RegisterDefaultDecoder(reflect.Map, defaultMapCodec).
- RegisterDefaultDecoder(reflect.Slice, defaultSliceCodec).
- RegisterDefaultDecoder(reflect.String, defaultStringCodec).
- RegisterDefaultDecoder(reflect.Struct, newDefaultStructCodec()).
- RegisterDefaultDecoder(reflect.Ptr, NewPointerCodec()).
- RegisterTypeMapEntry(bsontype.Double, tFloat64).
- RegisterTypeMapEntry(bsontype.String, tString).
- RegisterTypeMapEntry(bsontype.Array, tA).
- RegisterTypeMapEntry(bsontype.Binary, tBinary).
- RegisterTypeMapEntry(bsontype.Undefined, tUndefined).
- RegisterTypeMapEntry(bsontype.ObjectID, tOID).
- RegisterTypeMapEntry(bsontype.Boolean, tBool).
- RegisterTypeMapEntry(bsontype.DateTime, tDateTime).
- RegisterTypeMapEntry(bsontype.Regex, tRegex).
- RegisterTypeMapEntry(bsontype.DBPointer, tDBPointer).
- RegisterTypeMapEntry(bsontype.JavaScript, tJavaScript).
- RegisterTypeMapEntry(bsontype.Symbol, tSymbol).
- RegisterTypeMapEntry(bsontype.CodeWithScope, tCodeWithScope).
- RegisterTypeMapEntry(bsontype.Int32, tInt32).
- RegisterTypeMapEntry(bsontype.Int64, tInt64).
- RegisterTypeMapEntry(bsontype.Timestamp, tTimestamp).
- RegisterTypeMapEntry(bsontype.Decimal128, tDecimal).
- RegisterTypeMapEntry(bsontype.MinKey, tMinKey).
- RegisterTypeMapEntry(bsontype.MaxKey, tMaxKey).
- RegisterTypeMapEntry(bsontype.Type(0), tD).
- RegisterTypeMapEntry(bsontype.EmbeddedDocument, tD).
- RegisterHookDecoder(tValueUnmarshaler, ValueDecoderFunc(dvd.ValueUnmarshalerDecodeValue)).
- RegisterHookDecoder(tUnmarshaler, ValueDecoderFunc(dvd.UnmarshalerDecodeValue))
-}
-
-// DDecodeValue is the ValueDecoderFunc for primitive.D instances.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) DDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.IsValid() || !val.CanSet() || val.Type() != tD {
- return ValueDecoderError{Name: "DDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
- }
-
- switch vrType := vr.Type(); vrType {
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- dc.Ancestor = tD
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- default:
- return fmt.Errorf("cannot decode %v into a primitive.D", vrType)
- }
-
- dr, err := vr.ReadDocument()
- if err != nil {
- return err
- }
-
- decoder, err := dc.LookupDecoder(tEmpty)
- if err != nil {
- return err
- }
- tEmptyTypeDecoder, _ := decoder.(typeDecoder)
-
- // Use the elements in the provided value if it's non nil. Otherwise, allocate a new D instance.
- var elems primitive.D
- if !val.IsNil() {
- val.SetLen(0)
- elems = val.Interface().(primitive.D)
- } else {
- elems = make(primitive.D, 0)
- }
-
- for {
- key, elemVr, err := dr.ReadElement()
- if errors.Is(err, bsonrw.ErrEOD) {
- break
- } else if err != nil {
- return err
- }
-
- // Pass false for convert because we don't need to call reflect.Value.Convert for tEmpty.
- elem, err := decodeTypeOrValueWithInfo(decoder, tEmptyTypeDecoder, dc, elemVr, tEmpty, false)
- if err != nil {
- return err
- }
-
- elems = append(elems, primitive.E{Key: key, Value: elem.Interface()})
- }
-
- val.Set(reflect.ValueOf(elems))
- return nil
-}
-
-func (dvd DefaultValueDecoders) booleanDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t.Kind() != reflect.Bool {
- return emptyValue, ValueDecoderError{
- Name: "BooleanDecodeValue",
- Kinds: []reflect.Kind{reflect.Bool},
- Received: reflect.Zero(t),
- }
- }
-
- var b bool
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Int32:
- i32, err := vr.ReadInt32()
- if err != nil {
- return emptyValue, err
- }
- b = (i32 != 0)
- case bsontype.Int64:
- i64, err := vr.ReadInt64()
- if err != nil {
- return emptyValue, err
- }
- b = (i64 != 0)
- case bsontype.Double:
- f64, err := vr.ReadDouble()
- if err != nil {
- return emptyValue, err
- }
- b = (f64 != 0)
- case bsontype.Boolean:
- b, err = vr.ReadBoolean()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a boolean", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(b), nil
-}
-
-// BooleanDecodeValue is the ValueDecoderFunc for bool types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) BooleanDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.IsValid() || !val.CanSet() || val.Kind() != reflect.Bool {
- return ValueDecoderError{Name: "BooleanDecodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val}
- }
-
- elem, err := dvd.booleanDecodeType(dctx, vr, val.Type())
- if err != nil {
- return err
- }
-
- val.SetBool(elem.Bool())
- return nil
-}
-
-func (DefaultValueDecoders) intDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- var i64 int64
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Int32:
- i32, err := vr.ReadInt32()
- if err != nil {
- return emptyValue, err
- }
- i64 = int64(i32)
- case bsontype.Int64:
- i64, err = vr.ReadInt64()
- if err != nil {
- return emptyValue, err
- }
- case bsontype.Double:
- f64, err := vr.ReadDouble()
- if err != nil {
- return emptyValue, err
- }
- if !dc.Truncate && math.Floor(f64) != f64 {
- return emptyValue, errCannotTruncate
- }
- if f64 > float64(math.MaxInt64) {
- return emptyValue, fmt.Errorf("%g overflows int64", f64)
- }
- i64 = int64(f64)
- case bsontype.Boolean:
- b, err := vr.ReadBoolean()
- if err != nil {
- return emptyValue, err
- }
- if b {
- i64 = 1
- }
- case bsontype.Null:
- if err = vr.ReadNull(); err != nil {
- return emptyValue, err
- }
- case bsontype.Undefined:
- if err = vr.ReadUndefined(); err != nil {
- return emptyValue, err
- }
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType)
- }
-
- switch t.Kind() {
- case reflect.Int8:
- if i64 < math.MinInt8 || i64 > math.MaxInt8 {
- return emptyValue, fmt.Errorf("%d overflows int8", i64)
- }
-
- return reflect.ValueOf(int8(i64)), nil
- case reflect.Int16:
- if i64 < math.MinInt16 || i64 > math.MaxInt16 {
- return emptyValue, fmt.Errorf("%d overflows int16", i64)
- }
-
- return reflect.ValueOf(int16(i64)), nil
- case reflect.Int32:
- if i64 < math.MinInt32 || i64 > math.MaxInt32 {
- return emptyValue, fmt.Errorf("%d overflows int32", i64)
- }
-
- return reflect.ValueOf(int32(i64)), nil
- case reflect.Int64:
- return reflect.ValueOf(i64), nil
- case reflect.Int:
- if i64 > math.MaxInt { // Can we fit this inside of an int
- return emptyValue, fmt.Errorf("%d overflows int", i64)
- }
-
- return reflect.ValueOf(int(i64)), nil
- default:
- return emptyValue, ValueDecoderError{
- Name: "IntDecodeValue",
- Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int},
- Received: reflect.Zero(t),
- }
- }
-}
-
-// IntDecodeValue is the ValueDecoderFunc for int types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) IntDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() {
- return ValueDecoderError{
- Name: "IntDecodeValue",
- Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int},
- Received: val,
- }
- }
-
- elem, err := dvd.intDecodeType(dc, vr, val.Type())
- if err != nil {
- return err
- }
-
- val.SetInt(elem.Int())
- return nil
-}
-
-// UintDecodeValue is the ValueDecoderFunc for uint types.
-//
-// Deprecated: UintDecodeValue is not registered by default. Use UintCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) UintDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- var i64 int64
- var err error
- switch vr.Type() {
- case bsontype.Int32:
- i32, err := vr.ReadInt32()
- if err != nil {
- return err
- }
- i64 = int64(i32)
- case bsontype.Int64:
- i64, err = vr.ReadInt64()
- if err != nil {
- return err
- }
- case bsontype.Double:
- f64, err := vr.ReadDouble()
- if err != nil {
- return err
- }
- if !dc.Truncate && math.Floor(f64) != f64 {
- return errors.New("UintDecodeValue can only truncate float64 to an integer type when truncation is enabled")
- }
- if f64 > float64(math.MaxInt64) {
- return fmt.Errorf("%g overflows int64", f64)
- }
- i64 = int64(f64)
- case bsontype.Boolean:
- b, err := vr.ReadBoolean()
- if err != nil {
- return err
- }
- if b {
- i64 = 1
- }
- default:
- return fmt.Errorf("cannot decode %v into an integer type", vr.Type())
- }
-
- if !val.CanSet() {
- return ValueDecoderError{
- Name: "UintDecodeValue",
- Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
- Received: val,
- }
- }
-
- switch val.Kind() {
- case reflect.Uint8:
- if i64 < 0 || i64 > math.MaxUint8 {
- return fmt.Errorf("%d overflows uint8", i64)
- }
- case reflect.Uint16:
- if i64 < 0 || i64 > math.MaxUint16 {
- return fmt.Errorf("%d overflows uint16", i64)
- }
- case reflect.Uint32:
- if i64 < 0 || i64 > math.MaxUint32 {
- return fmt.Errorf("%d overflows uint32", i64)
- }
- case reflect.Uint64:
- if i64 < 0 {
- return fmt.Errorf("%d overflows uint64", i64)
- }
- case reflect.Uint:
- if i64 < 0 || uint64(i64) > uint64(math.MaxUint) { // Can we fit this inside of an uint
- return fmt.Errorf("%d overflows uint", i64)
- }
- default:
- return ValueDecoderError{
- Name: "UintDecodeValue",
- Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
- Received: val,
- }
- }
-
- val.SetUint(uint64(i64))
- return nil
-}
-
-func (dvd DefaultValueDecoders) floatDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- var f float64
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Int32:
- i32, err := vr.ReadInt32()
- if err != nil {
- return emptyValue, err
- }
- f = float64(i32)
- case bsontype.Int64:
- i64, err := vr.ReadInt64()
- if err != nil {
- return emptyValue, err
- }
- f = float64(i64)
- case bsontype.Double:
- f, err = vr.ReadDouble()
- if err != nil {
- return emptyValue, err
- }
- case bsontype.Boolean:
- b, err := vr.ReadBoolean()
- if err != nil {
- return emptyValue, err
- }
- if b {
- f = 1
- }
- case bsontype.Null:
- if err = vr.ReadNull(); err != nil {
- return emptyValue, err
- }
- case bsontype.Undefined:
- if err = vr.ReadUndefined(); err != nil {
- return emptyValue, err
- }
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a float32 or float64 type", vrType)
- }
-
- switch t.Kind() {
- case reflect.Float32:
- if !dc.Truncate && float64(float32(f)) != f {
- return emptyValue, errCannotTruncate
- }
-
- return reflect.ValueOf(float32(f)), nil
- case reflect.Float64:
- return reflect.ValueOf(f), nil
- default:
- return emptyValue, ValueDecoderError{
- Name: "FloatDecodeValue",
- Kinds: []reflect.Kind{reflect.Float32, reflect.Float64},
- Received: reflect.Zero(t),
- }
- }
-}
-
-// FloatDecodeValue is the ValueDecoderFunc for float types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) FloatDecodeValue(ec DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() {
- return ValueDecoderError{
- Name: "FloatDecodeValue",
- Kinds: []reflect.Kind{reflect.Float32, reflect.Float64},
- Received: val,
- }
- }
-
- elem, err := dvd.floatDecodeType(ec, vr, val.Type())
- if err != nil {
- return err
- }
-
- val.SetFloat(elem.Float())
- return nil
-}
-
-// StringDecodeValue is the ValueDecoderFunc for string types.
-//
-// Deprecated: StringDecodeValue is not registered by default. Use StringCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) StringDecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- var str string
- var err error
- switch vr.Type() {
- // TODO(GODRIVER-577): Handle JavaScript and Symbol BSON types when allowed.
- case bsontype.String:
- str, err = vr.ReadString()
- if err != nil {
- return err
- }
- default:
- return fmt.Errorf("cannot decode %v into a string type", vr.Type())
- }
- if !val.CanSet() || val.Kind() != reflect.String {
- return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val}
- }
-
- val.SetString(str)
- return nil
-}
-
-func (DefaultValueDecoders) javaScriptDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tJavaScript {
- return emptyValue, ValueDecoderError{
- Name: "JavaScriptDecodeValue",
- Types: []reflect.Type{tJavaScript},
- Received: reflect.Zero(t),
- }
- }
-
- var js string
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.JavaScript:
- js, err = vr.ReadJavascript()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a primitive.JavaScript", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.JavaScript(js)), nil
-}
-
-// JavaScriptDecodeValue is the ValueDecoderFunc for the primitive.JavaScript type.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) JavaScriptDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tJavaScript {
- return ValueDecoderError{Name: "JavaScriptDecodeValue", Types: []reflect.Type{tJavaScript}, Received: val}
- }
-
- elem, err := dvd.javaScriptDecodeType(dctx, vr, tJavaScript)
- if err != nil {
- return err
- }
-
- val.SetString(elem.String())
- return nil
-}
-
-func (DefaultValueDecoders) symbolDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tSymbol {
- return emptyValue, ValueDecoderError{
- Name: "SymbolDecodeValue",
- Types: []reflect.Type{tSymbol},
- Received: reflect.Zero(t),
- }
- }
-
- var symbol string
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.String:
- symbol, err = vr.ReadString()
- case bsontype.Symbol:
- symbol, err = vr.ReadSymbol()
- case bsontype.Binary:
- data, subtype, err := vr.ReadBinary()
- if err != nil {
- return emptyValue, err
- }
-
- if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
- return emptyValue, decodeBinaryError{subtype: subtype, typeName: "primitive.Symbol"}
- }
- symbol = string(data)
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a primitive.Symbol", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.Symbol(symbol)), nil
-}
-
-// SymbolDecodeValue is the ValueDecoderFunc for the primitive.Symbol type.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) SymbolDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tSymbol {
- return ValueDecoderError{Name: "SymbolDecodeValue", Types: []reflect.Type{tSymbol}, Received: val}
- }
-
- elem, err := dvd.symbolDecodeType(dctx, vr, tSymbol)
- if err != nil {
- return err
- }
-
- val.SetString(elem.String())
- return nil
-}
-
-func (DefaultValueDecoders) binaryDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tBinary {
- return emptyValue, ValueDecoderError{
- Name: "BinaryDecodeValue",
- Types: []reflect.Type{tBinary},
- Received: reflect.Zero(t),
- }
- }
-
- var data []byte
- var subtype byte
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Binary:
- data, subtype, err = vr.ReadBinary()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a Binary", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.Binary{Subtype: subtype, Data: data}), nil
-}
-
-// BinaryDecodeValue is the ValueDecoderFunc for Binary.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) BinaryDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tBinary {
- return ValueDecoderError{Name: "BinaryDecodeValue", Types: []reflect.Type{tBinary}, Received: val}
- }
-
- elem, err := dvd.binaryDecodeType(dc, vr, tBinary)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) undefinedDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tUndefined {
- return emptyValue, ValueDecoderError{
- Name: "UndefinedDecodeValue",
- Types: []reflect.Type{tUndefined},
- Received: reflect.Zero(t),
- }
- }
-
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- case bsontype.Null:
- err = vr.ReadNull()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into an Undefined", vr.Type())
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.Undefined{}), nil
-}
-
-// UndefinedDecodeValue is the ValueDecoderFunc for Undefined.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) UndefinedDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tUndefined {
- return ValueDecoderError{Name: "UndefinedDecodeValue", Types: []reflect.Type{tUndefined}, Received: val}
- }
-
- elem, err := dvd.undefinedDecodeType(dc, vr, tUndefined)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-// Accept both 12-byte string and pretty-printed 24-byte hex string formats.
-func (dvd DefaultValueDecoders) objectIDDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tOID {
- return emptyValue, ValueDecoderError{
- Name: "ObjectIDDecodeValue",
- Types: []reflect.Type{tOID},
- Received: reflect.Zero(t),
- }
- }
-
- var oid primitive.ObjectID
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.ObjectID:
- oid, err = vr.ReadObjectID()
- if err != nil {
- return emptyValue, err
- }
- case bsontype.String:
- str, err := vr.ReadString()
- if err != nil {
- return emptyValue, err
- }
- if oid, err = primitive.ObjectIDFromHex(str); err == nil {
- break
- }
- if len(str) != 12 {
- return emptyValue, fmt.Errorf("an ObjectID string must be exactly 12 bytes long (got %v)", len(str))
- }
- byteArr := []byte(str)
- copy(oid[:], byteArr)
- case bsontype.Null:
- if err = vr.ReadNull(); err != nil {
- return emptyValue, err
- }
- case bsontype.Undefined:
- if err = vr.ReadUndefined(); err != nil {
- return emptyValue, err
- }
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into an ObjectID", vrType)
- }
-
- return reflect.ValueOf(oid), nil
-}
-
-// ObjectIDDecodeValue is the ValueDecoderFunc for primitive.ObjectID.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) ObjectIDDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tOID {
- return ValueDecoderError{Name: "ObjectIDDecodeValue", Types: []reflect.Type{tOID}, Received: val}
- }
-
- elem, err := dvd.objectIDDecodeType(dc, vr, tOID)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) dateTimeDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tDateTime {
- return emptyValue, ValueDecoderError{
- Name: "DateTimeDecodeValue",
- Types: []reflect.Type{tDateTime},
- Received: reflect.Zero(t),
- }
- }
-
- var dt int64
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.DateTime:
- dt, err = vr.ReadDateTime()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a DateTime", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.DateTime(dt)), nil
-}
-
-// DateTimeDecodeValue is the ValueDecoderFunc for DateTime.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) DateTimeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tDateTime {
- return ValueDecoderError{Name: "DateTimeDecodeValue", Types: []reflect.Type{tDateTime}, Received: val}
- }
-
- elem, err := dvd.dateTimeDecodeType(dc, vr, tDateTime)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) nullDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tNull {
- return emptyValue, ValueDecoderError{
- Name: "NullDecodeValue",
- Types: []reflect.Type{tNull},
- Received: reflect.Zero(t),
- }
- }
-
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- case bsontype.Null:
- err = vr.ReadNull()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a Null", vr.Type())
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.Null{}), nil
-}
-
-// NullDecodeValue is the ValueDecoderFunc for Null.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) NullDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tNull {
- return ValueDecoderError{Name: "NullDecodeValue", Types: []reflect.Type{tNull}, Received: val}
- }
-
- elem, err := dvd.nullDecodeType(dc, vr, tNull)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) regexDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tRegex {
- return emptyValue, ValueDecoderError{
- Name: "RegexDecodeValue",
- Types: []reflect.Type{tRegex},
- Received: reflect.Zero(t),
- }
- }
-
- var pattern, options string
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Regex:
- pattern, options, err = vr.ReadRegex()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a Regex", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.Regex{Pattern: pattern, Options: options}), nil
-}
-
-// RegexDecodeValue is the ValueDecoderFunc for Regex.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) RegexDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tRegex {
- return ValueDecoderError{Name: "RegexDecodeValue", Types: []reflect.Type{tRegex}, Received: val}
- }
-
- elem, err := dvd.regexDecodeType(dc, vr, tRegex)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) dBPointerDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tDBPointer {
- return emptyValue, ValueDecoderError{
- Name: "DBPointerDecodeValue",
- Types: []reflect.Type{tDBPointer},
- Received: reflect.Zero(t),
- }
- }
-
- var ns string
- var pointer primitive.ObjectID
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.DBPointer:
- ns, pointer, err = vr.ReadDBPointer()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a DBPointer", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.DBPointer{DB: ns, Pointer: pointer}), nil
-}
-
-// DBPointerDecodeValue is the ValueDecoderFunc for DBPointer.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) DBPointerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tDBPointer {
- return ValueDecoderError{Name: "DBPointerDecodeValue", Types: []reflect.Type{tDBPointer}, Received: val}
- }
-
- elem, err := dvd.dBPointerDecodeType(dc, vr, tDBPointer)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) timestampDecodeType(_ DecodeContext, vr bsonrw.ValueReader, reflectType reflect.Type) (reflect.Value, error) {
- if reflectType != tTimestamp {
- return emptyValue, ValueDecoderError{
- Name: "TimestampDecodeValue",
- Types: []reflect.Type{tTimestamp},
- Received: reflect.Zero(reflectType),
- }
- }
-
- var t, incr uint32
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Timestamp:
- t, incr, err = vr.ReadTimestamp()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a Timestamp", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.Timestamp{T: t, I: incr}), nil
-}
-
-// TimestampDecodeValue is the ValueDecoderFunc for Timestamp.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) TimestampDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tTimestamp {
- return ValueDecoderError{Name: "TimestampDecodeValue", Types: []reflect.Type{tTimestamp}, Received: val}
- }
-
- elem, err := dvd.timestampDecodeType(dc, vr, tTimestamp)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) minKeyDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tMinKey {
- return emptyValue, ValueDecoderError{
- Name: "MinKeyDecodeValue",
- Types: []reflect.Type{tMinKey},
- Received: reflect.Zero(t),
- }
- }
-
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.MinKey:
- err = vr.ReadMinKey()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a MinKey", vr.Type())
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.MinKey{}), nil
-}
-
-// MinKeyDecodeValue is the ValueDecoderFunc for MinKey.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) MinKeyDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tMinKey {
- return ValueDecoderError{Name: "MinKeyDecodeValue", Types: []reflect.Type{tMinKey}, Received: val}
- }
-
- elem, err := dvd.minKeyDecodeType(dc, vr, tMinKey)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (DefaultValueDecoders) maxKeyDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tMaxKey {
- return emptyValue, ValueDecoderError{
- Name: "MaxKeyDecodeValue",
- Types: []reflect.Type{tMaxKey},
- Received: reflect.Zero(t),
- }
- }
-
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.MaxKey:
- err = vr.ReadMaxKey()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a MaxKey", vr.Type())
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(primitive.MaxKey{}), nil
-}
-
-// MaxKeyDecodeValue is the ValueDecoderFunc for MaxKey.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) MaxKeyDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tMaxKey {
- return ValueDecoderError{Name: "MaxKeyDecodeValue", Types: []reflect.Type{tMaxKey}, Received: val}
- }
-
- elem, err := dvd.maxKeyDecodeType(dc, vr, tMaxKey)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (dvd DefaultValueDecoders) decimal128DecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tDecimal {
- return emptyValue, ValueDecoderError{
- Name: "Decimal128DecodeValue",
- Types: []reflect.Type{tDecimal},
- Received: reflect.Zero(t),
- }
- }
-
- var d128 primitive.Decimal128
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Decimal128:
- d128, err = vr.ReadDecimal128()
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a primitive.Decimal128", vr.Type())
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(d128), nil
-}
-
-// Decimal128DecodeValue is the ValueDecoderFunc for primitive.Decimal128.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) Decimal128DecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tDecimal {
- return ValueDecoderError{Name: "Decimal128DecodeValue", Types: []reflect.Type{tDecimal}, Received: val}
- }
-
- elem, err := dvd.decimal128DecodeType(dctx, vr, tDecimal)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (dvd DefaultValueDecoders) jsonNumberDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tJSONNumber {
- return emptyValue, ValueDecoderError{
- Name: "JSONNumberDecodeValue",
- Types: []reflect.Type{tJSONNumber},
- Received: reflect.Zero(t),
- }
- }
-
- var jsonNum json.Number
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Double:
- f64, err := vr.ReadDouble()
- if err != nil {
- return emptyValue, err
- }
- jsonNum = json.Number(strconv.FormatFloat(f64, 'f', -1, 64))
- case bsontype.Int32:
- i32, err := vr.ReadInt32()
- if err != nil {
- return emptyValue, err
- }
- jsonNum = json.Number(strconv.FormatInt(int64(i32), 10))
- case bsontype.Int64:
- i64, err := vr.ReadInt64()
- if err != nil {
- return emptyValue, err
- }
- jsonNum = json.Number(strconv.FormatInt(i64, 10))
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a json.Number", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(jsonNum), nil
-}
-
-// JSONNumberDecodeValue is the ValueDecoderFunc for json.Number.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) JSONNumberDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tJSONNumber {
- return ValueDecoderError{Name: "JSONNumberDecodeValue", Types: []reflect.Type{tJSONNumber}, Received: val}
- }
-
- elem, err := dvd.jsonNumberDecodeType(dc, vr, tJSONNumber)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (dvd DefaultValueDecoders) urlDecodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tURL {
- return emptyValue, ValueDecoderError{
- Name: "URLDecodeValue",
- Types: []reflect.Type{tURL},
- Received: reflect.Zero(t),
- }
- }
-
- urlPtr := &url.URL{}
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.String:
- var str string // Declare str here to avoid shadowing err during the ReadString call.
- str, err = vr.ReadString()
- if err != nil {
- return emptyValue, err
- }
-
- urlPtr, err = url.Parse(str)
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a *url.URL", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(urlPtr).Elem(), nil
-}
-
-// URLDecodeValue is the ValueDecoderFunc for url.URL.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) URLDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tURL {
- return ValueDecoderError{Name: "URLDecodeValue", Types: []reflect.Type{tURL}, Received: val}
- }
-
- elem, err := dvd.urlDecodeType(dc, vr, tURL)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-// TimeDecodeValue is the ValueDecoderFunc for time.Time.
-//
-// Deprecated: TimeDecodeValue is not registered by default. Use TimeCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) TimeDecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if vr.Type() != bsontype.DateTime {
- return fmt.Errorf("cannot decode %v into a time.Time", vr.Type())
- }
-
- dt, err := vr.ReadDateTime()
- if err != nil {
- return err
- }
-
- if !val.CanSet() || val.Type() != tTime {
- return ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: val}
- }
-
- val.Set(reflect.ValueOf(time.Unix(dt/1000, dt%1000*1000000).UTC()))
- return nil
-}
-
-// ByteSliceDecodeValue is the ValueDecoderFunc for []byte.
-//
-// Deprecated: ByteSliceDecodeValue is not registered by default. Use ByteSliceCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) ByteSliceDecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if vr.Type() != bsontype.Binary && vr.Type() != bsontype.Null {
- return fmt.Errorf("cannot decode %v into a []byte", vr.Type())
- }
-
- if !val.CanSet() || val.Type() != tByteSlice {
- return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
- }
-
- if vr.Type() == bsontype.Null {
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- }
-
- data, subtype, err := vr.ReadBinary()
- if err != nil {
- return err
- }
- if subtype != 0x00 {
- return fmt.Errorf("ByteSliceDecodeValue can only be used to decode subtype 0x00 for %s, got %v", bsontype.Binary, subtype)
- }
-
- val.Set(reflect.ValueOf(data))
- return nil
-}
-
-// MapDecodeValue is the ValueDecoderFunc for map[string]* types.
-//
-// Deprecated: MapDecodeValue is not registered by default. Use MapCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) MapDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Kind() != reflect.Map || val.Type().Key().Kind() != reflect.String {
- return ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
- }
-
- switch vr.Type() {
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- default:
- return fmt.Errorf("cannot decode %v into a %s", vr.Type(), val.Type())
- }
-
- dr, err := vr.ReadDocument()
- if err != nil {
- return err
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeMap(val.Type()))
- }
-
- eType := val.Type().Elem()
- decoder, err := dc.LookupDecoder(eType)
- if err != nil {
- return err
- }
-
- if eType == tEmpty {
- dc.Ancestor = val.Type()
- }
-
- keyType := val.Type().Key()
- for {
- key, vr, err := dr.ReadElement()
- if errors.Is(err, bsonrw.ErrEOD) {
- break
- }
- if err != nil {
- return err
- }
-
- elem := reflect.New(eType).Elem()
-
- err = decoder.DecodeValue(dc, vr, elem)
- if err != nil {
- return err
- }
-
- val.SetMapIndex(reflect.ValueOf(key).Convert(keyType), elem)
- }
- return nil
-}
-
-// ArrayDecodeValue is the ValueDecoderFunc for array types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) ArrayDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Array {
- return ValueDecoderError{Name: "ArrayDecodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val}
- }
-
- switch vrType := vr.Type(); vrType {
- case bsontype.Array:
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- if val.Type().Elem() != tE {
- return fmt.Errorf("cannot decode document into %s", val.Type())
- }
- case bsontype.Binary:
- if val.Type().Elem() != tByte {
- return fmt.Errorf("ArrayDecodeValue can only be used to decode binary into a byte array, got %v", vrType)
- }
- data, subtype, err := vr.ReadBinary()
- if err != nil {
- return err
- }
- if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
- return fmt.Errorf("ArrayDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", bsontype.Binary, subtype)
- }
-
- if len(data) > val.Len() {
- return fmt.Errorf("more elements returned in array than can fit inside %s", val.Type())
- }
-
- for idx, elem := range data {
- val.Index(idx).Set(reflect.ValueOf(elem))
- }
- return nil
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- case bsontype.Undefined:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadUndefined()
- default:
- return fmt.Errorf("cannot decode %v into an array", vrType)
- }
-
- var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error)
- switch val.Type().Elem() {
- case tE:
- elemsFunc = dvd.decodeD
- default:
- elemsFunc = dvd.decodeDefault
- }
-
- elems, err := elemsFunc(dc, vr, val)
- if err != nil {
- return err
- }
-
- if len(elems) > val.Len() {
- return fmt.Errorf("more elements returned in array than can fit inside %s, got %v elements", val.Type(), len(elems))
- }
-
- for idx, elem := range elems {
- val.Index(idx).Set(elem)
- }
-
- return nil
-}
-
-// SliceDecodeValue is the ValueDecoderFunc for slice types.
-//
-// Deprecated: SliceDecodeValue is not registered by default. Use SliceCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) SliceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Kind() != reflect.Slice {
- return ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
- }
-
- switch vr.Type() {
- case bsontype.Array:
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- if val.Type().Elem() != tE {
- return fmt.Errorf("cannot decode document into %s", val.Type())
- }
- default:
- return fmt.Errorf("cannot decode %v into a slice", vr.Type())
- }
-
- var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error)
- switch val.Type().Elem() {
- case tE:
- dc.Ancestor = val.Type()
- elemsFunc = dvd.decodeD
- default:
- elemsFunc = dvd.decodeDefault
- }
-
- elems, err := elemsFunc(dc, vr, val)
- if err != nil {
- return err
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeSlice(val.Type(), 0, len(elems)))
- }
-
- val.SetLen(0)
- val.Set(reflect.Append(val, elems...))
-
- return nil
-}
-
-// ValueUnmarshalerDecodeValue is the ValueDecoderFunc for ValueUnmarshaler implementations.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) ValueUnmarshalerDecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.IsValid() || (!val.Type().Implements(tValueUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tValueUnmarshaler)) {
- return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
- }
-
- // If BSON value is null and the go value is a pointer, then don't call
- // UnmarshalBSONValue. Even if the Go pointer is already initialized (i.e.,
- // non-nil), encountering null in BSON will result in the pointer being
- // directly set to nil here. Since the pointer is being replaced with nil,
- // there is no opportunity (or reason) for the custom UnmarshalBSONValue logic
- // to be called.
- if vr.Type() == bsontype.Null && val.Kind() == reflect.Ptr {
- val.Set(reflect.Zero(val.Type()))
-
- return vr.ReadNull()
- }
-
- if val.Kind() == reflect.Ptr && val.IsNil() {
- if !val.CanSet() {
- return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
- }
- val.Set(reflect.New(val.Type().Elem()))
- }
-
- if !val.Type().Implements(tValueUnmarshaler) {
- if !val.CanAddr() {
- return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
- }
- val = val.Addr() // If the type doesn't implement the interface, a pointer to it must.
- }
-
- t, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
- if err != nil {
- return err
- }
-
- m, ok := val.Interface().(ValueUnmarshaler)
- if !ok {
- // NB: this error should be unreachable due to the above checks
- return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
- }
- return m.UnmarshalBSONValue(t, src)
-}
-
-// UnmarshalerDecodeValue is the ValueDecoderFunc for Unmarshaler implementations.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) UnmarshalerDecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.IsValid() || (!val.Type().Implements(tUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tUnmarshaler)) {
- return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
- }
-
- if val.Kind() == reflect.Ptr && val.IsNil() {
- if !val.CanSet() {
- return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
- }
- val.Set(reflect.New(val.Type().Elem()))
- }
-
- _, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
- if err != nil {
- return err
- }
-
- // If the target Go value is a pointer and the BSON field value is empty, set the value to the
- // zero value of the pointer (nil) and don't call UnmarshalBSON. UnmarshalBSON has no way to
- // change the pointer value from within the function (only the value at the pointer address),
- // so it can't set the pointer to "nil" itself. Since the most common Go value for an empty BSON
- // field value is "nil", we set "nil" here and don't call UnmarshalBSON. This behavior matches
- // the behavior of the Go "encoding/json" unmarshaler when the target Go value is a pointer and
- // the JSON field value is "null".
- if val.Kind() == reflect.Ptr && len(src) == 0 {
- val.Set(reflect.Zero(val.Type()))
- return nil
- }
-
- if !val.Type().Implements(tUnmarshaler) {
- if !val.CanAddr() {
- return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
- }
- val = val.Addr() // If the type doesn't implement the interface, a pointer to it must.
- }
-
- m, ok := val.Interface().(Unmarshaler)
- if !ok {
- // NB: this error should be unreachable due to the above checks
- return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
- }
- return m.UnmarshalBSON(src)
-}
-
-// EmptyInterfaceDecodeValue is the ValueDecoderFunc for interface{}.
-//
-// Deprecated: EmptyInterfaceDecodeValue is not registered by default. Use EmptyInterfaceCodec.DecodeValue instead.
-func (dvd DefaultValueDecoders) EmptyInterfaceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tEmpty {
- return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val}
- }
-
- rtype, err := dc.LookupTypeMapEntry(vr.Type())
- if err != nil {
- switch vr.Type() {
- case bsontype.EmbeddedDocument:
- if dc.Ancestor != nil {
- rtype = dc.Ancestor
- break
- }
- rtype = tD
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- default:
- return err
- }
- }
-
- decoder, err := dc.LookupDecoder(rtype)
- if err != nil {
- return err
- }
-
- elem := reflect.New(rtype).Elem()
- err = decoder.DecodeValue(dc, vr, elem)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-// CoreDocumentDecodeValue is the ValueDecoderFunc for bsoncore.Document.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (DefaultValueDecoders) CoreDocumentDecodeValue(_ DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tCoreDocument {
- return ValueDecoderError{Name: "CoreDocumentDecodeValue", Types: []reflect.Type{tCoreDocument}, Received: val}
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeSlice(val.Type(), 0, 0))
- }
-
- val.SetLen(0)
-
- cdoc, err := bsonrw.Copier{}.AppendDocumentBytes(val.Interface().(bsoncore.Document), vr)
- val.Set(reflect.ValueOf(cdoc))
- return err
-}
-
-func (dvd DefaultValueDecoders) decodeDefault(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) ([]reflect.Value, error) {
- elems := make([]reflect.Value, 0)
-
- ar, err := vr.ReadArray()
- if err != nil {
- return nil, err
- }
-
- eType := val.Type().Elem()
-
- decoder, err := dc.LookupDecoder(eType)
- if err != nil {
- return nil, err
- }
- eTypeDecoder, _ := decoder.(typeDecoder)
-
- idx := 0
- for {
- vr, err := ar.ReadValue()
- if errors.Is(err, bsonrw.ErrEOA) {
- break
- }
- if err != nil {
- return nil, err
- }
-
- elem, err := decodeTypeOrValueWithInfo(decoder, eTypeDecoder, dc, vr, eType, true)
- if err != nil {
- return nil, newDecodeError(strconv.Itoa(idx), err)
- }
- elems = append(elems, elem)
- idx++
- }
-
- return elems, nil
-}
-
-func (dvd DefaultValueDecoders) readCodeWithScope(dc DecodeContext, vr bsonrw.ValueReader) (primitive.CodeWithScope, error) {
- var cws primitive.CodeWithScope
-
- code, dr, err := vr.ReadCodeWithScope()
- if err != nil {
- return cws, err
- }
-
- scope := reflect.New(tD).Elem()
- elems, err := dvd.decodeElemsFromDocumentReader(dc, dr)
- if err != nil {
- return cws, err
- }
-
- scope.Set(reflect.MakeSlice(tD, 0, len(elems)))
- scope.Set(reflect.Append(scope, elems...))
-
- cws = primitive.CodeWithScope{
- Code: primitive.JavaScript(code),
- Scope: scope.Interface().(primitive.D),
- }
- return cws, nil
-}
-
-func (dvd DefaultValueDecoders) codeWithScopeDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tCodeWithScope {
- return emptyValue, ValueDecoderError{
- Name: "CodeWithScopeDecodeValue",
- Types: []reflect.Type{tCodeWithScope},
- Received: reflect.Zero(t),
- }
- }
-
- var cws primitive.CodeWithScope
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.CodeWithScope:
- cws, err = dvd.readCodeWithScope(dc, vr)
- case bsontype.Null:
- err = vr.ReadNull()
- case bsontype.Undefined:
- err = vr.ReadUndefined()
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a primitive.CodeWithScope", vrType)
- }
- if err != nil {
- return emptyValue, err
- }
-
- return reflect.ValueOf(cws), nil
-}
-
-// CodeWithScopeDecodeValue is the ValueDecoderFunc for CodeWithScope.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value decoders registered.
-func (dvd DefaultValueDecoders) CodeWithScopeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tCodeWithScope {
- return ValueDecoderError{Name: "CodeWithScopeDecodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val}
- }
-
- elem, err := dvd.codeWithScopeDecodeType(dc, vr, tCodeWithScope)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-func (dvd DefaultValueDecoders) decodeD(dc DecodeContext, vr bsonrw.ValueReader, _ reflect.Value) ([]reflect.Value, error) {
- switch vr.Type() {
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- default:
- return nil, fmt.Errorf("cannot decode %v into a D", vr.Type())
- }
-
- dr, err := vr.ReadDocument()
- if err != nil {
- return nil, err
- }
-
- return dvd.decodeElemsFromDocumentReader(dc, dr)
-}
-
-func (DefaultValueDecoders) decodeElemsFromDocumentReader(dc DecodeContext, dr bsonrw.DocumentReader) ([]reflect.Value, error) {
- decoder, err := dc.LookupDecoder(tEmpty)
- if err != nil {
- return nil, err
- }
-
- elems := make([]reflect.Value, 0)
- for {
- key, vr, err := dr.ReadElement()
- if errors.Is(err, bsonrw.ErrEOD) {
- break
- }
- if err != nil {
- return nil, err
- }
-
- val := reflect.New(tEmpty).Elem()
- err = decoder.DecodeValue(dc, vr, val)
- if err != nil {
- return nil, newDecodeError(key, err)
- }
-
- elems = append(elems, reflect.ValueOf(primitive.E{Key: key, Value: val.Interface()}))
- }
-
- return elems, nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go
deleted file mode 100644
index 4751ae995e..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go
+++ /dev/null
@@ -1,856 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "math"
- "net/url"
- "reflect"
- "sync"
- "time"
-
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
- "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
-)
-
-var defaultValueEncoders DefaultValueEncoders
-
-var bvwPool = bsonrw.NewBSONValueWriterPool()
-
-var errInvalidValue = errors.New("cannot encode invalid element")
-
-var sliceWriterPool = sync.Pool{
- New: func() interface{} {
- sw := make(bsonrw.SliceWriter, 0)
- return &sw
- },
-}
-
-func encodeElement(ec EncodeContext, dw bsonrw.DocumentWriter, e primitive.E) error {
- vw, err := dw.WriteDocumentElement(e.Key)
- if err != nil {
- return err
- }
-
- if e.Value == nil {
- return vw.WriteNull()
- }
- encoder, err := ec.LookupEncoder(reflect.TypeOf(e.Value))
- if err != nil {
- return err
- }
-
- err = encoder.EncodeValue(ec, vw, reflect.ValueOf(e.Value))
- if err != nil {
- return err
- }
- return nil
-}
-
-// DefaultValueEncoders is a namespace type for the default ValueEncoders used
-// when creating a registry.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-type DefaultValueEncoders struct{}
-
-// RegisterDefaultEncoders will register the encoder methods attached to DefaultValueEncoders with
-// the provided RegistryBuilder.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) RegisterDefaultEncoders(rb *RegistryBuilder) {
- if rb == nil {
- panic(errors.New("argument to RegisterDefaultEncoders must not be nil"))
- }
- rb.
- RegisterTypeEncoder(tByteSlice, defaultByteSliceCodec).
- RegisterTypeEncoder(tTime, defaultTimeCodec).
- RegisterTypeEncoder(tEmpty, defaultEmptyInterfaceCodec).
- RegisterTypeEncoder(tCoreArray, defaultArrayCodec).
- RegisterTypeEncoder(tOID, ValueEncoderFunc(dve.ObjectIDEncodeValue)).
- RegisterTypeEncoder(tDecimal, ValueEncoderFunc(dve.Decimal128EncodeValue)).
- RegisterTypeEncoder(tJSONNumber, ValueEncoderFunc(dve.JSONNumberEncodeValue)).
- RegisterTypeEncoder(tURL, ValueEncoderFunc(dve.URLEncodeValue)).
- RegisterTypeEncoder(tJavaScript, ValueEncoderFunc(dve.JavaScriptEncodeValue)).
- RegisterTypeEncoder(tSymbol, ValueEncoderFunc(dve.SymbolEncodeValue)).
- RegisterTypeEncoder(tBinary, ValueEncoderFunc(dve.BinaryEncodeValue)).
- RegisterTypeEncoder(tUndefined, ValueEncoderFunc(dve.UndefinedEncodeValue)).
- RegisterTypeEncoder(tDateTime, ValueEncoderFunc(dve.DateTimeEncodeValue)).
- RegisterTypeEncoder(tNull, ValueEncoderFunc(dve.NullEncodeValue)).
- RegisterTypeEncoder(tRegex, ValueEncoderFunc(dve.RegexEncodeValue)).
- RegisterTypeEncoder(tDBPointer, ValueEncoderFunc(dve.DBPointerEncodeValue)).
- RegisterTypeEncoder(tTimestamp, ValueEncoderFunc(dve.TimestampEncodeValue)).
- RegisterTypeEncoder(tMinKey, ValueEncoderFunc(dve.MinKeyEncodeValue)).
- RegisterTypeEncoder(tMaxKey, ValueEncoderFunc(dve.MaxKeyEncodeValue)).
- RegisterTypeEncoder(tCoreDocument, ValueEncoderFunc(dve.CoreDocumentEncodeValue)).
- RegisterTypeEncoder(tCodeWithScope, ValueEncoderFunc(dve.CodeWithScopeEncodeValue)).
- RegisterDefaultEncoder(reflect.Bool, ValueEncoderFunc(dve.BooleanEncodeValue)).
- RegisterDefaultEncoder(reflect.Int, ValueEncoderFunc(dve.IntEncodeValue)).
- RegisterDefaultEncoder(reflect.Int8, ValueEncoderFunc(dve.IntEncodeValue)).
- RegisterDefaultEncoder(reflect.Int16, ValueEncoderFunc(dve.IntEncodeValue)).
- RegisterDefaultEncoder(reflect.Int32, ValueEncoderFunc(dve.IntEncodeValue)).
- RegisterDefaultEncoder(reflect.Int64, ValueEncoderFunc(dve.IntEncodeValue)).
- RegisterDefaultEncoder(reflect.Uint, defaultUIntCodec).
- RegisterDefaultEncoder(reflect.Uint8, defaultUIntCodec).
- RegisterDefaultEncoder(reflect.Uint16, defaultUIntCodec).
- RegisterDefaultEncoder(reflect.Uint32, defaultUIntCodec).
- RegisterDefaultEncoder(reflect.Uint64, defaultUIntCodec).
- RegisterDefaultEncoder(reflect.Float32, ValueEncoderFunc(dve.FloatEncodeValue)).
- RegisterDefaultEncoder(reflect.Float64, ValueEncoderFunc(dve.FloatEncodeValue)).
- RegisterDefaultEncoder(reflect.Array, ValueEncoderFunc(dve.ArrayEncodeValue)).
- RegisterDefaultEncoder(reflect.Map, defaultMapCodec).
- RegisterDefaultEncoder(reflect.Slice, defaultSliceCodec).
- RegisterDefaultEncoder(reflect.String, defaultStringCodec).
- RegisterDefaultEncoder(reflect.Struct, newDefaultStructCodec()).
- RegisterDefaultEncoder(reflect.Ptr, NewPointerCodec()).
- RegisterHookEncoder(tValueMarshaler, ValueEncoderFunc(dve.ValueMarshalerEncodeValue)).
- RegisterHookEncoder(tMarshaler, ValueEncoderFunc(dve.MarshalerEncodeValue)).
- RegisterHookEncoder(tProxy, ValueEncoderFunc(dve.ProxyEncodeValue))
-}
-
-// BooleanEncodeValue is the ValueEncoderFunc for bool types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) BooleanEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Bool {
- return ValueEncoderError{Name: "BooleanEncodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val}
- }
- return vw.WriteBoolean(val.Bool())
-}
-
-func fitsIn32Bits(i int64) bool {
- return math.MinInt32 <= i && i <= math.MaxInt32
-}
-
-// IntEncodeValue is the ValueEncoderFunc for int types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) IntEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- switch val.Kind() {
- case reflect.Int8, reflect.Int16, reflect.Int32:
- return vw.WriteInt32(int32(val.Int()))
- case reflect.Int:
- i64 := val.Int()
- if fitsIn32Bits(i64) {
- return vw.WriteInt32(int32(i64))
- }
- return vw.WriteInt64(i64)
- case reflect.Int64:
- i64 := val.Int()
- if ec.MinSize && fitsIn32Bits(i64) {
- return vw.WriteInt32(int32(i64))
- }
- return vw.WriteInt64(i64)
- }
-
- return ValueEncoderError{
- Name: "IntEncodeValue",
- Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int},
- Received: val,
- }
-}
-
-// UintEncodeValue is the ValueEncoderFunc for uint types.
-//
-// Deprecated: UintEncodeValue is not registered by default. Use UintCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) UintEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- switch val.Kind() {
- case reflect.Uint8, reflect.Uint16:
- return vw.WriteInt32(int32(val.Uint()))
- case reflect.Uint, reflect.Uint32, reflect.Uint64:
- u64 := val.Uint()
- if ec.MinSize && u64 <= math.MaxInt32 {
- return vw.WriteInt32(int32(u64))
- }
- if u64 > math.MaxInt64 {
- return fmt.Errorf("%d overflows int64", u64)
- }
- return vw.WriteInt64(int64(u64))
- }
-
- return ValueEncoderError{
- Name: "UintEncodeValue",
- Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
- Received: val,
- }
-}
-
-// FloatEncodeValue is the ValueEncoderFunc for float types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) FloatEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- switch val.Kind() {
- case reflect.Float32, reflect.Float64:
- return vw.WriteDouble(val.Float())
- }
-
- return ValueEncoderError{Name: "FloatEncodeValue", Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, Received: val}
-}
-
-// StringEncodeValue is the ValueEncoderFunc for string types.
-//
-// Deprecated: StringEncodeValue is not registered by default. Use StringCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) StringEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if val.Kind() != reflect.String {
- return ValueEncoderError{
- Name: "StringEncodeValue",
- Kinds: []reflect.Kind{reflect.String},
- Received: val,
- }
- }
-
- return vw.WriteString(val.String())
-}
-
-// ObjectIDEncodeValue is the ValueEncoderFunc for primitive.ObjectID.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) ObjectIDEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tOID {
- return ValueEncoderError{Name: "ObjectIDEncodeValue", Types: []reflect.Type{tOID}, Received: val}
- }
- return vw.WriteObjectID(val.Interface().(primitive.ObjectID))
-}
-
-// Decimal128EncodeValue is the ValueEncoderFunc for primitive.Decimal128.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) Decimal128EncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tDecimal {
- return ValueEncoderError{Name: "Decimal128EncodeValue", Types: []reflect.Type{tDecimal}, Received: val}
- }
- return vw.WriteDecimal128(val.Interface().(primitive.Decimal128))
-}
-
-// JSONNumberEncodeValue is the ValueEncoderFunc for json.Number.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) JSONNumberEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tJSONNumber {
- return ValueEncoderError{Name: "JSONNumberEncodeValue", Types: []reflect.Type{tJSONNumber}, Received: val}
- }
- jsnum := val.Interface().(json.Number)
-
- // Attempt int first, then float64
- if i64, err := jsnum.Int64(); err == nil {
- return dve.IntEncodeValue(ec, vw, reflect.ValueOf(i64))
- }
-
- f64, err := jsnum.Float64()
- if err != nil {
- return err
- }
-
- return dve.FloatEncodeValue(ec, vw, reflect.ValueOf(f64))
-}
-
-// URLEncodeValue is the ValueEncoderFunc for url.URL.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) URLEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tURL {
- return ValueEncoderError{Name: "URLEncodeValue", Types: []reflect.Type{tURL}, Received: val}
- }
- u := val.Interface().(url.URL)
- return vw.WriteString(u.String())
-}
-
-// TimeEncodeValue is the ValueEncoderFunc for time.TIme.
-//
-// Deprecated: TimeEncodeValue is not registered by default. Use TimeCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) TimeEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tTime {
- return ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: val}
- }
- tt := val.Interface().(time.Time)
- dt := primitive.NewDateTimeFromTime(tt)
- return vw.WriteDateTime(int64(dt))
-}
-
-// ByteSliceEncodeValue is the ValueEncoderFunc for []byte.
-//
-// Deprecated: ByteSliceEncodeValue is not registered by default. Use ByteSliceCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) ByteSliceEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tByteSlice {
- return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
- }
- if val.IsNil() {
- return vw.WriteNull()
- }
- return vw.WriteBinary(val.Interface().([]byte))
-}
-
-// MapEncodeValue is the ValueEncoderFunc for map[string]* types.
-//
-// Deprecated: MapEncodeValue is not registered by default. Use MapCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) MapEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Map || val.Type().Key().Kind() != reflect.String {
- return ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
- }
-
- if val.IsNil() {
- // If we have a nill map but we can't WriteNull, that means we're probably trying to encode
- // to a TopLevel document. We can't currently tell if this is what actually happened, but if
- // there's a deeper underlying problem, the error will also be returned from WriteDocument,
- // so just continue. The operations on a map reflection value are valid, so we can call
- // MapKeys within mapEncodeValue without a problem.
- err := vw.WriteNull()
- if err == nil {
- return nil
- }
- }
-
- dw, err := vw.WriteDocument()
- if err != nil {
- return err
- }
-
- return dve.mapEncodeValue(ec, dw, val, nil)
-}
-
-// mapEncodeValue handles encoding of the values of a map. The collisionFn returns
-// true if the provided key exists, this is mainly used for inline maps in the
-// struct codec.
-func (dve DefaultValueEncoders) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, val reflect.Value, collisionFn func(string) bool) error {
-
- elemType := val.Type().Elem()
- encoder, err := ec.LookupEncoder(elemType)
- if err != nil && elemType.Kind() != reflect.Interface {
- return err
- }
-
- keys := val.MapKeys()
- for _, key := range keys {
- if collisionFn != nil && collisionFn(key.String()) {
- return fmt.Errorf("Key %s of inlined map conflicts with a struct field name", key)
- }
-
- currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.MapIndex(key))
- if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
- return lookupErr
- }
-
- vw, err := dw.WriteDocumentElement(key.String())
- if err != nil {
- return err
- }
-
- if errors.Is(lookupErr, errInvalidValue) {
- err = vw.WriteNull()
- if err != nil {
- return err
- }
- continue
- }
-
- err = currEncoder.EncodeValue(ec, vw, currVal)
- if err != nil {
- return err
- }
- }
-
- return dw.WriteDocumentEnd()
-}
-
-// ArrayEncodeValue is the ValueEncoderFunc for array types.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) ArrayEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Array {
- return ValueEncoderError{Name: "ArrayEncodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val}
- }
-
- // If we have a []primitive.E we want to treat it as a document instead of as an array.
- if val.Type().Elem() == tE {
- dw, err := vw.WriteDocument()
- if err != nil {
- return err
- }
-
- for idx := 0; idx < val.Len(); idx++ {
- e := val.Index(idx).Interface().(primitive.E)
- err = encodeElement(ec, dw, e)
- if err != nil {
- return err
- }
- }
-
- return dw.WriteDocumentEnd()
- }
-
- // If we have a []byte we want to treat it as a binary instead of as an array.
- if val.Type().Elem() == tByte {
- var byteSlice []byte
- for idx := 0; idx < val.Len(); idx++ {
- byteSlice = append(byteSlice, val.Index(idx).Interface().(byte))
- }
- return vw.WriteBinary(byteSlice)
- }
-
- aw, err := vw.WriteArray()
- if err != nil {
- return err
- }
-
- elemType := val.Type().Elem()
- encoder, err := ec.LookupEncoder(elemType)
- if err != nil && elemType.Kind() != reflect.Interface {
- return err
- }
-
- for idx := 0; idx < val.Len(); idx++ {
- currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx))
- if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
- return lookupErr
- }
-
- vw, err := aw.WriteArrayElement()
- if err != nil {
- return err
- }
-
- if errors.Is(lookupErr, errInvalidValue) {
- err = vw.WriteNull()
- if err != nil {
- return err
- }
- continue
- }
-
- err = currEncoder.EncodeValue(ec, vw, currVal)
- if err != nil {
- return err
- }
- }
- return aw.WriteArrayEnd()
-}
-
-// SliceEncodeValue is the ValueEncoderFunc for slice types.
-//
-// Deprecated: SliceEncodeValue is not registered by default. Use SliceCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) SliceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Slice {
- return ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
- }
-
- if val.IsNil() {
- return vw.WriteNull()
- }
-
- // If we have a []primitive.E we want to treat it as a document instead of as an array.
- if val.Type().ConvertibleTo(tD) {
- d := val.Convert(tD).Interface().(primitive.D)
-
- dw, err := vw.WriteDocument()
- if err != nil {
- return err
- }
-
- for _, e := range d {
- err = encodeElement(ec, dw, e)
- if err != nil {
- return err
- }
- }
-
- return dw.WriteDocumentEnd()
- }
-
- aw, err := vw.WriteArray()
- if err != nil {
- return err
- }
-
- elemType := val.Type().Elem()
- encoder, err := ec.LookupEncoder(elemType)
- if err != nil && elemType.Kind() != reflect.Interface {
- return err
- }
-
- for idx := 0; idx < val.Len(); idx++ {
- currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx))
- if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
- return lookupErr
- }
-
- vw, err := aw.WriteArrayElement()
- if err != nil {
- return err
- }
-
- if errors.Is(lookupErr, errInvalidValue) {
- err = vw.WriteNull()
- if err != nil {
- return err
- }
- continue
- }
-
- err = currEncoder.EncodeValue(ec, vw, currVal)
- if err != nil {
- return err
- }
- }
- return aw.WriteArrayEnd()
-}
-
-func (dve DefaultValueEncoders) lookupElementEncoder(ec EncodeContext, origEncoder ValueEncoder, currVal reflect.Value) (ValueEncoder, reflect.Value, error) {
- if origEncoder != nil || (currVal.Kind() != reflect.Interface) {
- return origEncoder, currVal, nil
- }
- currVal = currVal.Elem()
- if !currVal.IsValid() {
- return nil, currVal, errInvalidValue
- }
- currEncoder, err := ec.LookupEncoder(currVal.Type())
-
- return currEncoder, currVal, err
-}
-
-// EmptyInterfaceEncodeValue is the ValueEncoderFunc for interface{}.
-//
-// Deprecated: EmptyInterfaceEncodeValue is not registered by default. Use EmptyInterfaceCodec.EncodeValue instead.
-func (dve DefaultValueEncoders) EmptyInterfaceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tEmpty {
- return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val}
- }
-
- if val.IsNil() {
- return vw.WriteNull()
- }
- encoder, err := ec.LookupEncoder(val.Elem().Type())
- if err != nil {
- return err
- }
-
- return encoder.EncodeValue(ec, vw, val.Elem())
-}
-
-// ValueMarshalerEncodeValue is the ValueEncoderFunc for ValueMarshaler implementations.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) ValueMarshalerEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- // Either val or a pointer to val must implement ValueMarshaler
- switch {
- case !val.IsValid():
- return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val}
- case val.Type().Implements(tValueMarshaler):
- // If ValueMarshaler is implemented on a concrete type, make sure that val isn't a nil pointer
- if isImplementationNil(val, tValueMarshaler) {
- return vw.WriteNull()
- }
- case reflect.PtrTo(val.Type()).Implements(tValueMarshaler) && val.CanAddr():
- val = val.Addr()
- default:
- return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val}
- }
-
- m, ok := val.Interface().(ValueMarshaler)
- if !ok {
- return vw.WriteNull()
- }
- t, data, err := m.MarshalBSONValue()
- if err != nil {
- return err
- }
- return bsonrw.Copier{}.CopyValueFromBytes(vw, t, data)
-}
-
-// MarshalerEncodeValue is the ValueEncoderFunc for Marshaler implementations.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) MarshalerEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- // Either val or a pointer to val must implement Marshaler
- switch {
- case !val.IsValid():
- return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val}
- case val.Type().Implements(tMarshaler):
- // If Marshaler is implemented on a concrete type, make sure that val isn't a nil pointer
- if isImplementationNil(val, tMarshaler) {
- return vw.WriteNull()
- }
- case reflect.PtrTo(val.Type()).Implements(tMarshaler) && val.CanAddr():
- val = val.Addr()
- default:
- return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val}
- }
-
- m, ok := val.Interface().(Marshaler)
- if !ok {
- return vw.WriteNull()
- }
- data, err := m.MarshalBSON()
- if err != nil {
- return err
- }
- return bsonrw.Copier{}.CopyValueFromBytes(vw, bsontype.EmbeddedDocument, data)
-}
-
-// ProxyEncodeValue is the ValueEncoderFunc for Proxy implementations.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) ProxyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- // Either val or a pointer to val must implement Proxy
- switch {
- case !val.IsValid():
- return ValueEncoderError{Name: "ProxyEncodeValue", Types: []reflect.Type{tProxy}, Received: val}
- case val.Type().Implements(tProxy):
- // If Proxy is implemented on a concrete type, make sure that val isn't a nil pointer
- if isImplementationNil(val, tProxy) {
- return vw.WriteNull()
- }
- case reflect.PtrTo(val.Type()).Implements(tProxy) && val.CanAddr():
- val = val.Addr()
- default:
- return ValueEncoderError{Name: "ProxyEncodeValue", Types: []reflect.Type{tProxy}, Received: val}
- }
-
- m, ok := val.Interface().(Proxy)
- if !ok {
- return vw.WriteNull()
- }
- v, err := m.ProxyBSON()
- if err != nil {
- return err
- }
- if v == nil {
- encoder, err := ec.LookupEncoder(nil)
- if err != nil {
- return err
- }
- return encoder.EncodeValue(ec, vw, reflect.ValueOf(nil))
- }
- vv := reflect.ValueOf(v)
- switch vv.Kind() {
- case reflect.Ptr, reflect.Interface:
- vv = vv.Elem()
- }
- encoder, err := ec.LookupEncoder(vv.Type())
- if err != nil {
- return err
- }
- return encoder.EncodeValue(ec, vw, vv)
-}
-
-// JavaScriptEncodeValue is the ValueEncoderFunc for the primitive.JavaScript type.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) JavaScriptEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tJavaScript {
- return ValueEncoderError{Name: "JavaScriptEncodeValue", Types: []reflect.Type{tJavaScript}, Received: val}
- }
-
- return vw.WriteJavascript(val.String())
-}
-
-// SymbolEncodeValue is the ValueEncoderFunc for the primitive.Symbol type.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) SymbolEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tSymbol {
- return ValueEncoderError{Name: "SymbolEncodeValue", Types: []reflect.Type{tSymbol}, Received: val}
- }
-
- return vw.WriteSymbol(val.String())
-}
-
-// BinaryEncodeValue is the ValueEncoderFunc for Binary.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) BinaryEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tBinary {
- return ValueEncoderError{Name: "BinaryEncodeValue", Types: []reflect.Type{tBinary}, Received: val}
- }
- b := val.Interface().(primitive.Binary)
-
- return vw.WriteBinaryWithSubtype(b.Data, b.Subtype)
-}
-
-// UndefinedEncodeValue is the ValueEncoderFunc for Undefined.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) UndefinedEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tUndefined {
- return ValueEncoderError{Name: "UndefinedEncodeValue", Types: []reflect.Type{tUndefined}, Received: val}
- }
-
- return vw.WriteUndefined()
-}
-
-// DateTimeEncodeValue is the ValueEncoderFunc for DateTime.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) DateTimeEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tDateTime {
- return ValueEncoderError{Name: "DateTimeEncodeValue", Types: []reflect.Type{tDateTime}, Received: val}
- }
-
- return vw.WriteDateTime(val.Int())
-}
-
-// NullEncodeValue is the ValueEncoderFunc for Null.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) NullEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tNull {
- return ValueEncoderError{Name: "NullEncodeValue", Types: []reflect.Type{tNull}, Received: val}
- }
-
- return vw.WriteNull()
-}
-
-// RegexEncodeValue is the ValueEncoderFunc for Regex.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) RegexEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tRegex {
- return ValueEncoderError{Name: "RegexEncodeValue", Types: []reflect.Type{tRegex}, Received: val}
- }
-
- regex := val.Interface().(primitive.Regex)
-
- return vw.WriteRegex(regex.Pattern, regex.Options)
-}
-
-// DBPointerEncodeValue is the ValueEncoderFunc for DBPointer.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) DBPointerEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tDBPointer {
- return ValueEncoderError{Name: "DBPointerEncodeValue", Types: []reflect.Type{tDBPointer}, Received: val}
- }
-
- dbp := val.Interface().(primitive.DBPointer)
-
- return vw.WriteDBPointer(dbp.DB, dbp.Pointer)
-}
-
-// TimestampEncodeValue is the ValueEncoderFunc for Timestamp.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) TimestampEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tTimestamp {
- return ValueEncoderError{Name: "TimestampEncodeValue", Types: []reflect.Type{tTimestamp}, Received: val}
- }
-
- ts := val.Interface().(primitive.Timestamp)
-
- return vw.WriteTimestamp(ts.T, ts.I)
-}
-
-// MinKeyEncodeValue is the ValueEncoderFunc for MinKey.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) MinKeyEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tMinKey {
- return ValueEncoderError{Name: "MinKeyEncodeValue", Types: []reflect.Type{tMinKey}, Received: val}
- }
-
- return vw.WriteMinKey()
-}
-
-// MaxKeyEncodeValue is the ValueEncoderFunc for MaxKey.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) MaxKeyEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tMaxKey {
- return ValueEncoderError{Name: "MaxKeyEncodeValue", Types: []reflect.Type{tMaxKey}, Received: val}
- }
-
- return vw.WriteMaxKey()
-}
-
-// CoreDocumentEncodeValue is the ValueEncoderFunc for bsoncore.Document.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (DefaultValueEncoders) CoreDocumentEncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tCoreDocument {
- return ValueEncoderError{Name: "CoreDocumentEncodeValue", Types: []reflect.Type{tCoreDocument}, Received: val}
- }
-
- cdoc := val.Interface().(bsoncore.Document)
-
- return bsonrw.Copier{}.CopyDocumentFromBytes(vw, cdoc)
-}
-
-// CodeWithScopeEncodeValue is the ValueEncoderFunc for CodeWithScope.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.NewRegistry] to get a registry with all default
-// value encoders registered.
-func (dve DefaultValueEncoders) CodeWithScopeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tCodeWithScope {
- return ValueEncoderError{Name: "CodeWithScopeEncodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val}
- }
-
- cws := val.Interface().(primitive.CodeWithScope)
-
- dw, err := vw.WriteCodeWithScope(string(cws.Code))
- if err != nil {
- return err
- }
-
- sw := sliceWriterPool.Get().(*bsonrw.SliceWriter)
- defer sliceWriterPool.Put(sw)
- *sw = (*sw)[:0]
-
- scopeVW := bvwPool.Get(sw)
- defer bvwPool.Put(scopeVW)
-
- encoder, err := ec.LookupEncoder(reflect.TypeOf(cws.Scope))
- if err != nil {
- return err
- }
-
- err = encoder.EncodeValue(ec, scopeVW, reflect.ValueOf(cws.Scope))
- if err != nil {
- return err
- }
-
- err = bsonrw.Copier{}.CopyBytesToDocumentWriter(dw, *sw)
- if err != nil {
- return err
- }
- return dw.WriteDocumentEnd()
-}
-
-// isImplementationNil returns if val is a nil pointer and inter is implemented on a concrete type
-func isImplementationNil(val reflect.Value, inter reflect.Type) bool {
- vt := val.Type()
- for vt.Kind() == reflect.Ptr {
- vt = vt.Elem()
- }
- return vt.Implements(inter) && val.Kind() == reflect.Ptr && val.IsNil()
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go
deleted file mode 100644
index 4613e5a1ec..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2022-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-// Package bsoncodec provides a system for encoding values to BSON representations and decoding
-// values from BSON representations. This package considers both binary BSON and ExtendedJSON as
-// BSON representations. The types in this package enable a flexible system for handling this
-// encoding and decoding.
-//
-// The codec system is composed of two parts:
-//
-// 1) ValueEncoders and ValueDecoders that handle encoding and decoding Go values to and from BSON
-// representations.
-//
-// 2) A Registry that holds these ValueEncoders and ValueDecoders and provides methods for
-// retrieving them.
-//
-// # ValueEncoders and ValueDecoders
-//
-// The ValueEncoder interface is implemented by types that can encode a provided Go type to BSON.
-// The value to encode is provided as a reflect.Value and a bsonrw.ValueWriter is used within the
-// EncodeValue method to actually create the BSON representation. For convenience, ValueEncoderFunc
-// is provided to allow use of a function with the correct signature as a ValueEncoder. An
-// EncodeContext instance is provided to allow implementations to lookup further ValueEncoders and
-// to provide configuration information.
-//
-// The ValueDecoder interface is the inverse of the ValueEncoder. Implementations should ensure that
-// the value they receive is settable. Similar to ValueEncoderFunc, ValueDecoderFunc is provided to
-// allow the use of a function with the correct signature as a ValueDecoder. A DecodeContext
-// instance is provided and serves similar functionality to the EncodeContext.
-//
-// # Registry
-//
-// A Registry is a store for ValueEncoders, ValueDecoders, and a type map. See the Registry type
-// documentation for examples of registering various custom encoders and decoders. A Registry can
-// have three main types of codecs:
-//
-// 1. Type encoders/decoders - These can be registered using the RegisterTypeEncoder and
-// RegisterTypeDecoder methods. The registered codec will be invoked when encoding/decoding a value
-// whose type matches the registered type exactly.
-// If the registered type is an interface, the codec will be invoked when encoding or decoding
-// values whose type is the interface, but not for values with concrete types that implement the
-// interface.
-//
-// 2. Hook encoders/decoders - These can be registered using the RegisterHookEncoder and
-// RegisterHookDecoder methods. These methods only accept interface types and the registered codecs
-// will be invoked when encoding or decoding values whose types implement the interface. An example
-// of a hook defined by the driver is bson.Marshaler. The driver will call the MarshalBSON method
-// for any value whose type implements bson.Marshaler, regardless of the value's concrete type.
-//
-// 3. Type map entries - This can be used to associate a BSON type with a Go type. These type
-// associations are used when decoding into a bson.D/bson.M or a struct field of type interface{}.
-// For example, by default, BSON int32 and int64 values decode as Go int32 and int64 instances,
-// respectively, when decoding into a bson.D. The following code would change the behavior so these
-// values decode as Go int instances instead:
-//
-// intType := reflect.TypeOf(int(0))
-// registry.RegisterTypeMapEntry(bsontype.Int32, intType).RegisterTypeMapEntry(bsontype.Int64, intType)
-//
-// 4. Kind encoder/decoders - These can be registered using the RegisterDefaultEncoder and
-// RegisterDefaultDecoder methods. The registered codec will be invoked when encoding or decoding
-// values whose reflect.Kind matches the registered reflect.Kind as long as the value's type doesn't
-// match a registered type or hook encoder/decoder first. These methods should be used to change the
-// behavior for all values for a specific kind.
-//
-// # Registry Lookup Procedure
-//
-// When looking up an encoder in a Registry, the precedence rules are as follows:
-//
-// 1. A type encoder registered for the exact type of the value.
-//
-// 2. A hook encoder registered for an interface that is implemented by the value or by a pointer to
-// the value. If the value matches multiple hooks (e.g. the type implements bsoncodec.Marshaler and
-// bsoncodec.ValueMarshaler), the first one registered will be selected. Note that registries
-// constructed using bson.NewRegistry have driver-defined hooks registered for the
-// bsoncodec.Marshaler, bsoncodec.ValueMarshaler, and bsoncodec.Proxy interfaces, so those will take
-// precedence over any new hooks.
-//
-// 3. A kind encoder registered for the value's kind.
-//
-// If all of these lookups fail to find an encoder, an error of type ErrNoEncoder is returned. The
-// same precedence rules apply for decoders, with the exception that an error of type ErrNoDecoder
-// will be returned if no decoder is found.
-//
-// # DefaultValueEncoders and DefaultValueDecoders
-//
-// The DefaultValueEncoders and DefaultValueDecoders types provide a full set of ValueEncoders and
-// ValueDecoders for handling a wide range of Go types, including all of the types within the
-// primitive package. To make registering these codecs easier, a helper method on each type is
-// provided. For the DefaultValueEncoders type the method is called RegisterDefaultEncoders and for
-// the DefaultValueDecoders type the method is called RegisterDefaultDecoders, this method also
-// handles registering type map entries for each BSON type.
-package bsoncodec
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go
deleted file mode 100644
index 098368f071..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go
+++ /dev/null
@@ -1,173 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-// EmptyInterfaceCodec is the Codec used for interface{} values.
-//
-// Deprecated: EmptyInterfaceCodec will not be directly configurable in Go
-// Driver 2.0. To configure the empty interface encode and decode behavior, use
-// the configuration methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the empty interface
-// encode and decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to unmarshal BSON binary field
-// values as a Go byte slice, use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// BinaryAsSlice: true,
-// })
-//
-// See the deprecation notice for each field in EmptyInterfaceCodec for the
-// corresponding settings.
-type EmptyInterfaceCodec struct {
- // DecodeBinaryAsSlice causes DecodeValue to unmarshal BSON binary field values that are the
- // "Generic" or "Old" BSON binary subtype as a Go byte slice instead of a primitive.Binary.
- //
- // Deprecated: Use bson.Decoder.BinaryAsSlice or options.BSONOptions.BinaryAsSlice instead.
- DecodeBinaryAsSlice bool
-}
-
-var (
- defaultEmptyInterfaceCodec = NewEmptyInterfaceCodec()
-
- // Assert that defaultEmptyInterfaceCodec satisfies the typeDecoder interface, which allows it
- // to be used by collection type decoders (e.g. map, slice, etc) to set individual values in a
- // collection.
- _ typeDecoder = defaultEmptyInterfaceCodec
-)
-
-// NewEmptyInterfaceCodec returns a EmptyInterfaceCodec with options opts.
-//
-// Deprecated: NewEmptyInterfaceCodec will not be available in Go Driver 2.0. See
-// [EmptyInterfaceCodec] for more details.
-func NewEmptyInterfaceCodec(opts ...*bsonoptions.EmptyInterfaceCodecOptions) *EmptyInterfaceCodec {
- interfaceOpt := bsonoptions.MergeEmptyInterfaceCodecOptions(opts...)
-
- codec := EmptyInterfaceCodec{}
- if interfaceOpt.DecodeBinaryAsSlice != nil {
- codec.DecodeBinaryAsSlice = *interfaceOpt.DecodeBinaryAsSlice
- }
- return &codec
-}
-
-// EncodeValue is the ValueEncoderFunc for interface{}.
-func (eic EmptyInterfaceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tEmpty {
- return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val}
- }
-
- if val.IsNil() {
- return vw.WriteNull()
- }
- encoder, err := ec.LookupEncoder(val.Elem().Type())
- if err != nil {
- return err
- }
-
- return encoder.EncodeValue(ec, vw, val.Elem())
-}
-
-func (eic EmptyInterfaceCodec) getEmptyInterfaceDecodeType(dc DecodeContext, valueType bsontype.Type) (reflect.Type, error) {
- isDocument := valueType == bsontype.Type(0) || valueType == bsontype.EmbeddedDocument
- if isDocument {
- if dc.defaultDocumentType != nil {
- // If the bsontype is an embedded document and the DocumentType is set on the DecodeContext, then return
- // that type.
- return dc.defaultDocumentType, nil
- }
- if dc.Ancestor != nil {
- // Using ancestor information rather than looking up the type map entry forces consistent decoding.
- // If we're decoding into a bson.D, subdocuments should also be decoded as bson.D, even if a type map entry
- // has been registered.
- return dc.Ancestor, nil
- }
- }
-
- rtype, err := dc.LookupTypeMapEntry(valueType)
- if err == nil {
- return rtype, nil
- }
-
- if isDocument {
- // For documents, fallback to looking up a type map entry for bsontype.Type(0) or bsontype.EmbeddedDocument,
- // depending on the original valueType.
- var lookupType bsontype.Type
- switch valueType {
- case bsontype.Type(0):
- lookupType = bsontype.EmbeddedDocument
- case bsontype.EmbeddedDocument:
- lookupType = bsontype.Type(0)
- }
-
- rtype, err = dc.LookupTypeMapEntry(lookupType)
- if err == nil {
- return rtype, nil
- }
- }
-
- return nil, err
-}
-
-func (eic EmptyInterfaceCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tEmpty {
- return emptyValue, ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.Zero(t)}
- }
-
- rtype, err := eic.getEmptyInterfaceDecodeType(dc, vr.Type())
- if err != nil {
- switch vr.Type() {
- case bsontype.Null:
- return reflect.Zero(t), vr.ReadNull()
- default:
- return emptyValue, err
- }
- }
-
- decoder, err := dc.LookupDecoder(rtype)
- if err != nil {
- return emptyValue, err
- }
-
- elem, err := decodeTypeOrValue(decoder, dc, vr, rtype)
- if err != nil {
- return emptyValue, err
- }
-
- if (eic.DecodeBinaryAsSlice || dc.binaryAsSlice) && rtype == tBinary {
- binElem := elem.Interface().(primitive.Binary)
- if binElem.Subtype == bsontype.BinaryGeneric || binElem.Subtype == bsontype.BinaryBinaryOld {
- elem = reflect.ValueOf(binElem.Data)
- }
- }
-
- return elem, nil
-}
-
-// DecodeValue is the ValueDecoderFunc for interface{}.
-func (eic EmptyInterfaceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tEmpty {
- return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val}
- }
-
- elem, err := eic.decodeType(dc, vr, val.Type())
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go
deleted file mode 100644
index d7e00ffa8d..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go
+++ /dev/null
@@ -1,343 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "encoding"
- "errors"
- "fmt"
- "reflect"
- "strconv"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-var defaultMapCodec = NewMapCodec()
-
-// MapCodec is the Codec used for map values.
-//
-// Deprecated: MapCodec will not be directly configurable in Go Driver 2.0. To
-// configure the map encode and decode behavior, use the configuration methods
-// on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the map encode and
-// decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to marshal nil Go maps as empty BSON
-// documents, use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// NilMapAsEmpty: true,
-// })
-//
-// See the deprecation notice for each field in MapCodec for the corresponding
-// settings.
-type MapCodec struct {
- // DecodeZerosMap causes DecodeValue to delete any existing values from Go maps in the destination
- // value passed to Decode before unmarshaling BSON documents into them.
- //
- // Deprecated: Use bson.Decoder.ZeroMaps or options.BSONOptions.ZeroMaps instead.
- DecodeZerosMap bool
-
- // EncodeNilAsEmpty causes EncodeValue to marshal nil Go maps as empty BSON documents instead of
- // BSON null.
- //
- // Deprecated: Use bson.Encoder.NilMapAsEmpty or options.BSONOptions.NilMapAsEmpty instead.
- EncodeNilAsEmpty bool
-
- // EncodeKeysWithStringer causes the Encoder to convert Go map keys to BSON document field name
- // strings using fmt.Sprintf() instead of the default string conversion logic.
- //
- // Deprecated: Use bson.Encoder.StringifyMapKeysWithFmt or
- // options.BSONOptions.StringifyMapKeysWithFmt instead.
- EncodeKeysWithStringer bool
-}
-
-// KeyMarshaler is the interface implemented by an object that can marshal itself into a string key.
-// This applies to types used as map keys and is similar to encoding.TextMarshaler.
-type KeyMarshaler interface {
- MarshalKey() (key string, err error)
-}
-
-// KeyUnmarshaler is the interface implemented by an object that can unmarshal a string representation
-// of itself. This applies to types used as map keys and is similar to encoding.TextUnmarshaler.
-//
-// UnmarshalKey must be able to decode the form generated by MarshalKey.
-// UnmarshalKey must copy the text if it wishes to retain the text
-// after returning.
-type KeyUnmarshaler interface {
- UnmarshalKey(key string) error
-}
-
-// NewMapCodec returns a MapCodec with options opts.
-//
-// Deprecated: NewMapCodec will not be available in Go Driver 2.0. See
-// [MapCodec] for more details.
-func NewMapCodec(opts ...*bsonoptions.MapCodecOptions) *MapCodec {
- mapOpt := bsonoptions.MergeMapCodecOptions(opts...)
-
- codec := MapCodec{}
- if mapOpt.DecodeZerosMap != nil {
- codec.DecodeZerosMap = *mapOpt.DecodeZerosMap
- }
- if mapOpt.EncodeNilAsEmpty != nil {
- codec.EncodeNilAsEmpty = *mapOpt.EncodeNilAsEmpty
- }
- if mapOpt.EncodeKeysWithStringer != nil {
- codec.EncodeKeysWithStringer = *mapOpt.EncodeKeysWithStringer
- }
- return &codec
-}
-
-// EncodeValue is the ValueEncoder for map[*]* types.
-func (mc *MapCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Map {
- return ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
- }
-
- if val.IsNil() && !mc.EncodeNilAsEmpty && !ec.nilMapAsEmpty {
- // If we have a nil map but we can't WriteNull, that means we're probably trying to encode
- // to a TopLevel document. We can't currently tell if this is what actually happened, but if
- // there's a deeper underlying problem, the error will also be returned from WriteDocument,
- // so just continue. The operations on a map reflection value are valid, so we can call
- // MapKeys within mapEncodeValue without a problem.
- err := vw.WriteNull()
- if err == nil {
- return nil
- }
- }
-
- dw, err := vw.WriteDocument()
- if err != nil {
- return err
- }
-
- return mc.mapEncodeValue(ec, dw, val, nil)
-}
-
-// mapEncodeValue handles encoding of the values of a map. The collisionFn returns
-// true if the provided key exists, this is mainly used for inline maps in the
-// struct codec.
-func (mc *MapCodec) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, val reflect.Value, collisionFn func(string) bool) error {
-
- elemType := val.Type().Elem()
- encoder, err := ec.LookupEncoder(elemType)
- if err != nil && elemType.Kind() != reflect.Interface {
- return err
- }
-
- keys := val.MapKeys()
- for _, key := range keys {
- keyStr, err := mc.encodeKey(key, ec.stringifyMapKeysWithFmt)
- if err != nil {
- return err
- }
-
- if collisionFn != nil && collisionFn(keyStr) {
- return fmt.Errorf("Key %s of inlined map conflicts with a struct field name", key)
- }
-
- currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.MapIndex(key))
- if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
- return lookupErr
- }
-
- vw, err := dw.WriteDocumentElement(keyStr)
- if err != nil {
- return err
- }
-
- if errors.Is(lookupErr, errInvalidValue) {
- err = vw.WriteNull()
- if err != nil {
- return err
- }
- continue
- }
-
- err = currEncoder.EncodeValue(ec, vw, currVal)
- if err != nil {
- return err
- }
- }
-
- return dw.WriteDocumentEnd()
-}
-
-// DecodeValue is the ValueDecoder for map[string/decimal]* types.
-func (mc *MapCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if val.Kind() != reflect.Map || (!val.CanSet() && val.IsNil()) {
- return ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
- }
-
- switch vrType := vr.Type(); vrType {
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- case bsontype.Undefined:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadUndefined()
- default:
- return fmt.Errorf("cannot decode %v into a %s", vrType, val.Type())
- }
-
- dr, err := vr.ReadDocument()
- if err != nil {
- return err
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeMap(val.Type()))
- }
-
- if val.Len() > 0 && (mc.DecodeZerosMap || dc.zeroMaps) {
- clearMap(val)
- }
-
- eType := val.Type().Elem()
- decoder, err := dc.LookupDecoder(eType)
- if err != nil {
- return err
- }
- eTypeDecoder, _ := decoder.(typeDecoder)
-
- if eType == tEmpty {
- dc.Ancestor = val.Type()
- }
-
- keyType := val.Type().Key()
-
- for {
- key, vr, err := dr.ReadElement()
- if errors.Is(err, bsonrw.ErrEOD) {
- break
- }
- if err != nil {
- return err
- }
-
- k, err := mc.decodeKey(key, keyType)
- if err != nil {
- return err
- }
-
- elem, err := decodeTypeOrValueWithInfo(decoder, eTypeDecoder, dc, vr, eType, true)
- if err != nil {
- return newDecodeError(key, err)
- }
-
- val.SetMapIndex(k, elem)
- }
- return nil
-}
-
-func clearMap(m reflect.Value) {
- var none reflect.Value
- for _, k := range m.MapKeys() {
- m.SetMapIndex(k, none)
- }
-}
-
-func (mc *MapCodec) encodeKey(val reflect.Value, encodeKeysWithStringer bool) (string, error) {
- if mc.EncodeKeysWithStringer || encodeKeysWithStringer {
- return fmt.Sprint(val), nil
- }
-
- // keys of any string type are used directly
- if val.Kind() == reflect.String {
- return val.String(), nil
- }
- // KeyMarshalers are marshaled
- if km, ok := val.Interface().(KeyMarshaler); ok {
- if val.Kind() == reflect.Ptr && val.IsNil() {
- return "", nil
- }
- buf, err := km.MarshalKey()
- if err == nil {
- return buf, nil
- }
- return "", err
- }
- // keys implement encoding.TextMarshaler are marshaled.
- if km, ok := val.Interface().(encoding.TextMarshaler); ok {
- if val.Kind() == reflect.Ptr && val.IsNil() {
- return "", nil
- }
-
- buf, err := km.MarshalText()
- if err != nil {
- return "", err
- }
-
- return string(buf), nil
- }
-
- switch val.Kind() {
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- return strconv.FormatInt(val.Int(), 10), nil
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- return strconv.FormatUint(val.Uint(), 10), nil
- }
- return "", fmt.Errorf("unsupported key type: %v", val.Type())
-}
-
-var keyUnmarshalerType = reflect.TypeOf((*KeyUnmarshaler)(nil)).Elem()
-var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
-
-func (mc *MapCodec) decodeKey(key string, keyType reflect.Type) (reflect.Value, error) {
- keyVal := reflect.ValueOf(key)
- var err error
- switch {
- // First, if EncodeKeysWithStringer is not enabled, try to decode withKeyUnmarshaler
- case !mc.EncodeKeysWithStringer && reflect.PtrTo(keyType).Implements(keyUnmarshalerType):
- keyVal = reflect.New(keyType)
- v := keyVal.Interface().(KeyUnmarshaler)
- err = v.UnmarshalKey(key)
- keyVal = keyVal.Elem()
- // Try to decode encoding.TextUnmarshalers.
- case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
- keyVal = reflect.New(keyType)
- v := keyVal.Interface().(encoding.TextUnmarshaler)
- err = v.UnmarshalText([]byte(key))
- keyVal = keyVal.Elem()
- // Otherwise, go to type specific behavior
- default:
- switch keyType.Kind() {
- case reflect.String:
- keyVal = reflect.ValueOf(key).Convert(keyType)
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
- n, parseErr := strconv.ParseInt(key, 10, 64)
- if parseErr != nil || reflect.Zero(keyType).OverflowInt(n) {
- err = fmt.Errorf("failed to unmarshal number key %v", key)
- }
- keyVal = reflect.ValueOf(n).Convert(keyType)
- case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- n, parseErr := strconv.ParseUint(key, 10, 64)
- if parseErr != nil || reflect.Zero(keyType).OverflowUint(n) {
- err = fmt.Errorf("failed to unmarshal number key %v", key)
- break
- }
- keyVal = reflect.ValueOf(n).Convert(keyType)
- case reflect.Float32, reflect.Float64:
- if mc.EncodeKeysWithStringer {
- parsed, err := strconv.ParseFloat(key, 64)
- if err != nil {
- return keyVal, fmt.Errorf("Map key is defined to be a decimal type (%v) but got error %w", keyType.Kind(), err)
- }
- keyVal = reflect.ValueOf(parsed)
- break
- }
- fallthrough
- default:
- return keyVal, fmt.Errorf("unsupported key type: %v", keyType)
- }
- }
- return keyVal, err
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go
deleted file mode 100644
index fbd9f0a9e9..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go
+++ /dev/null
@@ -1,65 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import "fmt"
-
-type mode int
-
-const (
- _ mode = iota
- mTopLevel
- mDocument
- mArray
- mValue
- mElement
- mCodeWithScope
- mSpacer
-)
-
-func (m mode) String() string {
- var str string
-
- switch m {
- case mTopLevel:
- str = "TopLevel"
- case mDocument:
- str = "DocumentMode"
- case mArray:
- str = "ArrayMode"
- case mValue:
- str = "ValueMode"
- case mElement:
- str = "ElementMode"
- case mCodeWithScope:
- str = "CodeWithScopeMode"
- case mSpacer:
- str = "CodeWithScopeSpacerFrame"
- default:
- str = "UnknownMode"
- }
-
- return str
-}
-
-// TransitionError is an error returned when an invalid progressing a
-// ValueReader or ValueWriter state machine occurs.
-type TransitionError struct {
- parent mode
- current mode
- destination mode
-}
-
-func (te TransitionError) Error() string {
- if te.destination == mode(0) {
- return fmt.Sprintf("invalid state transition: cannot read/write value while in %s", te.current)
- }
- if te.parent == mode(0) {
- return fmt.Sprintf("invalid state transition: %s -> %s", te.current, te.destination)
- }
- return fmt.Sprintf("invalid state transition: %s -> %s; parent %s", te.current, te.destination, te.parent)
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go
deleted file mode 100644
index ddfa4a33e1..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-var _ ValueEncoder = &PointerCodec{}
-var _ ValueDecoder = &PointerCodec{}
-
-// PointerCodec is the Codec used for pointers.
-//
-// Deprecated: PointerCodec will not be directly accessible in Go Driver 2.0. To
-// override the default pointer encode and decode behavior, create a new registry
-// with [go.mongodb.org/mongo-driver/bson.NewRegistry] and register a new
-// encoder and decoder for pointers.
-//
-// For example,
-//
-// reg := bson.NewRegistry()
-// reg.RegisterKindEncoder(reflect.Ptr, myPointerEncoder)
-// reg.RegisterKindDecoder(reflect.Ptr, myPointerDecoder)
-type PointerCodec struct {
- ecache typeEncoderCache
- dcache typeDecoderCache
-}
-
-// NewPointerCodec returns a PointerCodec that has been initialized.
-//
-// Deprecated: NewPointerCodec will not be available in Go Driver 2.0. See
-// [PointerCodec] for more details.
-func NewPointerCodec() *PointerCodec {
- return &PointerCodec{}
-}
-
-// EncodeValue handles encoding a pointer by either encoding it to BSON Null if the pointer is nil
-// or looking up an encoder for the type of value the pointer points to.
-func (pc *PointerCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if val.Kind() != reflect.Ptr {
- if !val.IsValid() {
- return vw.WriteNull()
- }
- return ValueEncoderError{Name: "PointerCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: val}
- }
-
- if val.IsNil() {
- return vw.WriteNull()
- }
-
- typ := val.Type()
- if v, ok := pc.ecache.Load(typ); ok {
- if v == nil {
- return ErrNoEncoder{Type: typ}
- }
- return v.EncodeValue(ec, vw, val.Elem())
- }
- // TODO(charlie): handle concurrent requests for the same type
- enc, err := ec.LookupEncoder(typ.Elem())
- enc = pc.ecache.LoadOrStore(typ, enc)
- if err != nil {
- return err
- }
- return enc.EncodeValue(ec, vw, val.Elem())
-}
-
-// DecodeValue handles decoding a pointer by looking up a decoder for the type it points to and
-// using that to decode. If the BSON value is Null, this method will set the pointer to nil.
-func (pc *PointerCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Kind() != reflect.Ptr {
- return ValueDecoderError{Name: "PointerCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: val}
- }
-
- typ := val.Type()
- if vr.Type() == bsontype.Null {
- val.Set(reflect.Zero(typ))
- return vr.ReadNull()
- }
- if vr.Type() == bsontype.Undefined {
- val.Set(reflect.Zero(typ))
- return vr.ReadUndefined()
- }
-
- if val.IsNil() {
- val.Set(reflect.New(typ.Elem()))
- }
-
- if v, ok := pc.dcache.Load(typ); ok {
- if v == nil {
- return ErrNoDecoder{Type: typ}
- }
- return v.DecodeValue(dc, vr, val.Elem())
- }
- // TODO(charlie): handle concurrent requests for the same type
- dec, err := dc.LookupDecoder(typ.Elem())
- dec = pc.dcache.LoadOrStore(typ, dec)
- if err != nil {
- return err
- }
- return dec.DecodeValue(dc, vr, val.Elem())
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go
deleted file mode 100644
index 4cf2b01ab4..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-// Proxy is an interface implemented by types that cannot themselves be directly encoded. Types
-// that implement this interface with have ProxyBSON called during the encoding process and that
-// value will be encoded in place for the implementer.
-type Proxy interface {
- ProxyBSON() (interface{}, error)
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go
deleted file mode 100644
index 196c491bbb..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go
+++ /dev/null
@@ -1,524 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "errors"
- "fmt"
- "reflect"
- "sync"
-
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-// ErrNilType is returned when nil is passed to either LookupEncoder or LookupDecoder.
-//
-// Deprecated: ErrNilType will not be supported in Go Driver 2.0.
-var ErrNilType = errors.New("cannot perform a decoder lookup on ")
-
-// ErrNotPointer is returned when a non-pointer type is provided to LookupDecoder.
-//
-// Deprecated: ErrNotPointer will not be supported in Go Driver 2.0.
-var ErrNotPointer = errors.New("non-pointer provided to LookupDecoder")
-
-// ErrNoEncoder is returned when there wasn't an encoder available for a type.
-//
-// Deprecated: ErrNoEncoder will not be supported in Go Driver 2.0.
-type ErrNoEncoder struct {
- Type reflect.Type
-}
-
-func (ene ErrNoEncoder) Error() string {
- if ene.Type == nil {
- return "no encoder found for "
- }
- return "no encoder found for " + ene.Type.String()
-}
-
-// ErrNoDecoder is returned when there wasn't a decoder available for a type.
-//
-// Deprecated: ErrNoDecoder will not be supported in Go Driver 2.0.
-type ErrNoDecoder struct {
- Type reflect.Type
-}
-
-func (end ErrNoDecoder) Error() string {
- return "no decoder found for " + end.Type.String()
-}
-
-// ErrNoTypeMapEntry is returned when there wasn't a type available for the provided BSON type.
-//
-// Deprecated: ErrNoTypeMapEntry will not be supported in Go Driver 2.0.
-type ErrNoTypeMapEntry struct {
- Type bsontype.Type
-}
-
-func (entme ErrNoTypeMapEntry) Error() string {
- return "no type map entry found for " + entme.Type.String()
-}
-
-// ErrNotInterface is returned when the provided type is not an interface.
-//
-// Deprecated: ErrNotInterface will not be supported in Go Driver 2.0.
-var ErrNotInterface = errors.New("The provided type is not an interface")
-
-// A RegistryBuilder is used to build a Registry. This type is not goroutine
-// safe.
-//
-// Deprecated: Use Registry instead.
-type RegistryBuilder struct {
- registry *Registry
-}
-
-// NewRegistryBuilder creates a new empty RegistryBuilder.
-//
-// Deprecated: Use NewRegistry instead.
-func NewRegistryBuilder() *RegistryBuilder {
- return &RegistryBuilder{
- registry: NewRegistry(),
- }
-}
-
-// RegisterCodec will register the provided ValueCodec for the provided type.
-//
-// Deprecated: Use Registry.RegisterTypeEncoder and Registry.RegisterTypeDecoder instead.
-func (rb *RegistryBuilder) RegisterCodec(t reflect.Type, codec ValueCodec) *RegistryBuilder {
- rb.RegisterTypeEncoder(t, codec)
- rb.RegisterTypeDecoder(t, codec)
- return rb
-}
-
-// RegisterTypeEncoder will register the provided ValueEncoder for the provided type.
-//
-// The type will be used directly, so an encoder can be registered for a type and a different encoder can be registered
-// for a pointer to that type.
-//
-// If the given type is an interface, the encoder will be called when marshaling a type that is that interface. It
-// will not be called when marshaling a non-interface type that implements the interface.
-//
-// Deprecated: Use Registry.RegisterTypeEncoder instead.
-func (rb *RegistryBuilder) RegisterTypeEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder {
- rb.registry.RegisterTypeEncoder(t, enc)
- return rb
-}
-
-// RegisterHookEncoder will register an encoder for the provided interface type t. This encoder will be called when
-// marshaling a type if the type implements t or a pointer to the type implements t. If the provided type is not
-// an interface (i.e. t.Kind() != reflect.Interface), this method will panic.
-//
-// Deprecated: Use Registry.RegisterInterfaceEncoder instead.
-func (rb *RegistryBuilder) RegisterHookEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder {
- rb.registry.RegisterInterfaceEncoder(t, enc)
- return rb
-}
-
-// RegisterTypeDecoder will register the provided ValueDecoder for the provided type.
-//
-// The type will be used directly, so a decoder can be registered for a type and a different decoder can be registered
-// for a pointer to that type.
-//
-// If the given type is an interface, the decoder will be called when unmarshaling into a type that is that interface.
-// It will not be called when unmarshaling into a non-interface type that implements the interface.
-//
-// Deprecated: Use Registry.RegisterTypeDecoder instead.
-func (rb *RegistryBuilder) RegisterTypeDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder {
- rb.registry.RegisterTypeDecoder(t, dec)
- return rb
-}
-
-// RegisterHookDecoder will register an decoder for the provided interface type t. This decoder will be called when
-// unmarshaling into a type if the type implements t or a pointer to the type implements t. If the provided type is not
-// an interface (i.e. t.Kind() != reflect.Interface), this method will panic.
-//
-// Deprecated: Use Registry.RegisterInterfaceDecoder instead.
-func (rb *RegistryBuilder) RegisterHookDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder {
- rb.registry.RegisterInterfaceDecoder(t, dec)
- return rb
-}
-
-// RegisterEncoder registers the provided type and encoder pair.
-//
-// Deprecated: Use Registry.RegisterTypeEncoder or Registry.RegisterInterfaceEncoder instead.
-func (rb *RegistryBuilder) RegisterEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder {
- if t == tEmpty {
- rb.registry.RegisterTypeEncoder(t, enc)
- return rb
- }
- switch t.Kind() {
- case reflect.Interface:
- rb.registry.RegisterInterfaceEncoder(t, enc)
- default:
- rb.registry.RegisterTypeEncoder(t, enc)
- }
- return rb
-}
-
-// RegisterDecoder registers the provided type and decoder pair.
-//
-// Deprecated: Use Registry.RegisterTypeDecoder or Registry.RegisterInterfaceDecoder instead.
-func (rb *RegistryBuilder) RegisterDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder {
- if t == nil {
- rb.registry.RegisterTypeDecoder(t, dec)
- return rb
- }
- if t == tEmpty {
- rb.registry.RegisterTypeDecoder(t, dec)
- return rb
- }
- switch t.Kind() {
- case reflect.Interface:
- rb.registry.RegisterInterfaceDecoder(t, dec)
- default:
- rb.registry.RegisterTypeDecoder(t, dec)
- }
- return rb
-}
-
-// RegisterDefaultEncoder will register the provided ValueEncoder to the provided
-// kind.
-//
-// Deprecated: Use Registry.RegisterKindEncoder instead.
-func (rb *RegistryBuilder) RegisterDefaultEncoder(kind reflect.Kind, enc ValueEncoder) *RegistryBuilder {
- rb.registry.RegisterKindEncoder(kind, enc)
- return rb
-}
-
-// RegisterDefaultDecoder will register the provided ValueDecoder to the
-// provided kind.
-//
-// Deprecated: Use Registry.RegisterKindDecoder instead.
-func (rb *RegistryBuilder) RegisterDefaultDecoder(kind reflect.Kind, dec ValueDecoder) *RegistryBuilder {
- rb.registry.RegisterKindDecoder(kind, dec)
- return rb
-}
-
-// RegisterTypeMapEntry will register the provided type to the BSON type. The primary usage for this
-// mapping is decoding situations where an empty interface is used and a default type needs to be
-// created and decoded into.
-//
-// By default, BSON documents will decode into interface{} values as bson.D. To change the default type for BSON
-// documents, a type map entry for bsontype.EmbeddedDocument should be registered. For example, to force BSON documents
-// to decode to bson.Raw, use the following code:
-//
-// rb.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(bson.Raw{}))
-//
-// Deprecated: Use Registry.RegisterTypeMapEntry instead.
-func (rb *RegistryBuilder) RegisterTypeMapEntry(bt bsontype.Type, rt reflect.Type) *RegistryBuilder {
- rb.registry.RegisterTypeMapEntry(bt, rt)
- return rb
-}
-
-// Build creates a Registry from the current state of this RegistryBuilder.
-//
-// Deprecated: Use NewRegistry instead.
-func (rb *RegistryBuilder) Build() *Registry {
- r := &Registry{
- interfaceEncoders: append([]interfaceValueEncoder(nil), rb.registry.interfaceEncoders...),
- interfaceDecoders: append([]interfaceValueDecoder(nil), rb.registry.interfaceDecoders...),
- typeEncoders: rb.registry.typeEncoders.Clone(),
- typeDecoders: rb.registry.typeDecoders.Clone(),
- kindEncoders: rb.registry.kindEncoders.Clone(),
- kindDecoders: rb.registry.kindDecoders.Clone(),
- }
- rb.registry.typeMap.Range(func(k, v interface{}) bool {
- if k != nil && v != nil {
- r.typeMap.Store(k, v)
- }
- return true
- })
- return r
-}
-
-// A Registry is used to store and retrieve codecs for types and interfaces. This type is the main
-// typed passed around and Encoders and Decoders are constructed from it.
-type Registry struct {
- interfaceEncoders []interfaceValueEncoder
- interfaceDecoders []interfaceValueDecoder
- typeEncoders *typeEncoderCache
- typeDecoders *typeDecoderCache
- kindEncoders *kindEncoderCache
- kindDecoders *kindDecoderCache
- typeMap sync.Map // map[bsontype.Type]reflect.Type
-}
-
-// NewRegistry creates a new empty Registry.
-func NewRegistry() *Registry {
- return &Registry{
- typeEncoders: new(typeEncoderCache),
- typeDecoders: new(typeDecoderCache),
- kindEncoders: new(kindEncoderCache),
- kindDecoders: new(kindDecoderCache),
- }
-}
-
-// RegisterTypeEncoder registers the provided ValueEncoder for the provided type.
-//
-// The type will be used as provided, so an encoder can be registered for a type and a different
-// encoder can be registered for a pointer to that type.
-//
-// If the given type is an interface, the encoder will be called when marshaling a type that is
-// that interface. It will not be called when marshaling a non-interface type that implements the
-// interface. To get the latter behavior, call RegisterHookEncoder instead.
-//
-// RegisterTypeEncoder should not be called concurrently with any other Registry method.
-func (r *Registry) RegisterTypeEncoder(valueType reflect.Type, enc ValueEncoder) {
- r.typeEncoders.Store(valueType, enc)
-}
-
-// RegisterTypeDecoder registers the provided ValueDecoder for the provided type.
-//
-// The type will be used as provided, so a decoder can be registered for a type and a different
-// decoder can be registered for a pointer to that type.
-//
-// If the given type is an interface, the decoder will be called when unmarshaling into a type that
-// is that interface. It will not be called when unmarshaling into a non-interface type that
-// implements the interface. To get the latter behavior, call RegisterHookDecoder instead.
-//
-// RegisterTypeDecoder should not be called concurrently with any other Registry method.
-func (r *Registry) RegisterTypeDecoder(valueType reflect.Type, dec ValueDecoder) {
- r.typeDecoders.Store(valueType, dec)
-}
-
-// RegisterKindEncoder registers the provided ValueEncoder for the provided kind.
-//
-// Use RegisterKindEncoder to register an encoder for any type with the same underlying kind. For
-// example, consider the type MyInt defined as
-//
-// type MyInt int32
-//
-// To define an encoder for MyInt and int32, use RegisterKindEncoder like
-//
-// reg.RegisterKindEncoder(reflect.Int32, myEncoder)
-//
-// RegisterKindEncoder should not be called concurrently with any other Registry method.
-func (r *Registry) RegisterKindEncoder(kind reflect.Kind, enc ValueEncoder) {
- r.kindEncoders.Store(kind, enc)
-}
-
-// RegisterKindDecoder registers the provided ValueDecoder for the provided kind.
-//
-// Use RegisterKindDecoder to register a decoder for any type with the same underlying kind. For
-// example, consider the type MyInt defined as
-//
-// type MyInt int32
-//
-// To define an decoder for MyInt and int32, use RegisterKindDecoder like
-//
-// reg.RegisterKindDecoder(reflect.Int32, myDecoder)
-//
-// RegisterKindDecoder should not be called concurrently with any other Registry method.
-func (r *Registry) RegisterKindDecoder(kind reflect.Kind, dec ValueDecoder) {
- r.kindDecoders.Store(kind, dec)
-}
-
-// RegisterInterfaceEncoder registers an encoder for the provided interface type iface. This encoder will
-// be called when marshaling a type if the type implements iface or a pointer to the type
-// implements iface. If the provided type is not an interface
-// (i.e. iface.Kind() != reflect.Interface), this method will panic.
-//
-// RegisterInterfaceEncoder should not be called concurrently with any other Registry method.
-func (r *Registry) RegisterInterfaceEncoder(iface reflect.Type, enc ValueEncoder) {
- if iface.Kind() != reflect.Interface {
- panicStr := fmt.Errorf("RegisterInterfaceEncoder expects a type with kind reflect.Interface, "+
- "got type %s with kind %s", iface, iface.Kind())
- panic(panicStr)
- }
-
- for idx, encoder := range r.interfaceEncoders {
- if encoder.i == iface {
- r.interfaceEncoders[idx].ve = enc
- return
- }
- }
-
- r.interfaceEncoders = append(r.interfaceEncoders, interfaceValueEncoder{i: iface, ve: enc})
-}
-
-// RegisterInterfaceDecoder registers an decoder for the provided interface type iface. This decoder will
-// be called when unmarshaling into a type if the type implements iface or a pointer to the type
-// implements iface. If the provided type is not an interface (i.e. iface.Kind() != reflect.Interface),
-// this method will panic.
-//
-// RegisterInterfaceDecoder should not be called concurrently with any other Registry method.
-func (r *Registry) RegisterInterfaceDecoder(iface reflect.Type, dec ValueDecoder) {
- if iface.Kind() != reflect.Interface {
- panicStr := fmt.Errorf("RegisterInterfaceDecoder expects a type with kind reflect.Interface, "+
- "got type %s with kind %s", iface, iface.Kind())
- panic(panicStr)
- }
-
- for idx, decoder := range r.interfaceDecoders {
- if decoder.i == iface {
- r.interfaceDecoders[idx].vd = dec
- return
- }
- }
-
- r.interfaceDecoders = append(r.interfaceDecoders, interfaceValueDecoder{i: iface, vd: dec})
-}
-
-// RegisterTypeMapEntry will register the provided type to the BSON type. The primary usage for this
-// mapping is decoding situations where an empty interface is used and a default type needs to be
-// created and decoded into.
-//
-// By default, BSON documents will decode into interface{} values as bson.D. To change the default type for BSON
-// documents, a type map entry for bsontype.EmbeddedDocument should be registered. For example, to force BSON documents
-// to decode to bson.Raw, use the following code:
-//
-// reg.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(bson.Raw{}))
-func (r *Registry) RegisterTypeMapEntry(bt bsontype.Type, rt reflect.Type) {
- r.typeMap.Store(bt, rt)
-}
-
-// LookupEncoder returns the first matching encoder in the Registry. It uses the following lookup
-// order:
-//
-// 1. An encoder registered for the exact type. If the given type is an interface, an encoder
-// registered using RegisterTypeEncoder for that interface will be selected.
-//
-// 2. An encoder registered using RegisterInterfaceEncoder for an interface implemented by the type
-// or by a pointer to the type.
-//
-// 3. An encoder registered using RegisterKindEncoder for the kind of value.
-//
-// If no encoder is found, an error of type ErrNoEncoder is returned. LookupEncoder is safe for
-// concurrent use by multiple goroutines after all codecs and encoders are registered.
-func (r *Registry) LookupEncoder(valueType reflect.Type) (ValueEncoder, error) {
- if valueType == nil {
- return nil, ErrNoEncoder{Type: valueType}
- }
- enc, found := r.lookupTypeEncoder(valueType)
- if found {
- if enc == nil {
- return nil, ErrNoEncoder{Type: valueType}
- }
- return enc, nil
- }
-
- enc, found = r.lookupInterfaceEncoder(valueType, true)
- if found {
- return r.typeEncoders.LoadOrStore(valueType, enc), nil
- }
-
- if v, ok := r.kindEncoders.Load(valueType.Kind()); ok {
- return r.storeTypeEncoder(valueType, v), nil
- }
- return nil, ErrNoEncoder{Type: valueType}
-}
-
-func (r *Registry) storeTypeEncoder(rt reflect.Type, enc ValueEncoder) ValueEncoder {
- return r.typeEncoders.LoadOrStore(rt, enc)
-}
-
-func (r *Registry) lookupTypeEncoder(rt reflect.Type) (ValueEncoder, bool) {
- return r.typeEncoders.Load(rt)
-}
-
-func (r *Registry) lookupInterfaceEncoder(valueType reflect.Type, allowAddr bool) (ValueEncoder, bool) {
- if valueType == nil {
- return nil, false
- }
- for _, ienc := range r.interfaceEncoders {
- if valueType.Implements(ienc.i) {
- return ienc.ve, true
- }
- if allowAddr && valueType.Kind() != reflect.Ptr && reflect.PtrTo(valueType).Implements(ienc.i) {
- // if *t implements an interface, this will catch if t implements an interface further
- // ahead in interfaceEncoders
- defaultEnc, found := r.lookupInterfaceEncoder(valueType, false)
- if !found {
- defaultEnc, _ = r.kindEncoders.Load(valueType.Kind())
- }
- return newCondAddrEncoder(ienc.ve, defaultEnc), true
- }
- }
- return nil, false
-}
-
-// LookupDecoder returns the first matching decoder in the Registry. It uses the following lookup
-// order:
-//
-// 1. A decoder registered for the exact type. If the given type is an interface, a decoder
-// registered using RegisterTypeDecoder for that interface will be selected.
-//
-// 2. A decoder registered using RegisterInterfaceDecoder for an interface implemented by the type or by
-// a pointer to the type.
-//
-// 3. A decoder registered using RegisterKindDecoder for the kind of value.
-//
-// If no decoder is found, an error of type ErrNoDecoder is returned. LookupDecoder is safe for
-// concurrent use by multiple goroutines after all codecs and decoders are registered.
-func (r *Registry) LookupDecoder(valueType reflect.Type) (ValueDecoder, error) {
- if valueType == nil {
- return nil, ErrNilType
- }
- dec, found := r.lookupTypeDecoder(valueType)
- if found {
- if dec == nil {
- return nil, ErrNoDecoder{Type: valueType}
- }
- return dec, nil
- }
-
- dec, found = r.lookupInterfaceDecoder(valueType, true)
- if found {
- return r.storeTypeDecoder(valueType, dec), nil
- }
-
- if v, ok := r.kindDecoders.Load(valueType.Kind()); ok {
- return r.storeTypeDecoder(valueType, v), nil
- }
- return nil, ErrNoDecoder{Type: valueType}
-}
-
-func (r *Registry) lookupTypeDecoder(valueType reflect.Type) (ValueDecoder, bool) {
- return r.typeDecoders.Load(valueType)
-}
-
-func (r *Registry) storeTypeDecoder(typ reflect.Type, dec ValueDecoder) ValueDecoder {
- return r.typeDecoders.LoadOrStore(typ, dec)
-}
-
-func (r *Registry) lookupInterfaceDecoder(valueType reflect.Type, allowAddr bool) (ValueDecoder, bool) {
- for _, idec := range r.interfaceDecoders {
- if valueType.Implements(idec.i) {
- return idec.vd, true
- }
- if allowAddr && valueType.Kind() != reflect.Ptr && reflect.PtrTo(valueType).Implements(idec.i) {
- // if *t implements an interface, this will catch if t implements an interface further
- // ahead in interfaceDecoders
- defaultDec, found := r.lookupInterfaceDecoder(valueType, false)
- if !found {
- defaultDec, _ = r.kindDecoders.Load(valueType.Kind())
- }
- return newCondAddrDecoder(idec.vd, defaultDec), true
- }
- }
- return nil, false
-}
-
-// LookupTypeMapEntry inspects the registry's type map for a Go type for the corresponding BSON
-// type. If no type is found, ErrNoTypeMapEntry is returned.
-//
-// LookupTypeMapEntry should not be called concurrently with any other Registry method.
-func (r *Registry) LookupTypeMapEntry(bt bsontype.Type) (reflect.Type, error) {
- v, ok := r.typeMap.Load(bt)
- if v == nil || !ok {
- return nil, ErrNoTypeMapEntry{Type: bt}
- }
- return v.(reflect.Type), nil
-}
-
-type interfaceValueEncoder struct {
- i reflect.Type
- ve ValueEncoder
-}
-
-type interfaceValueDecoder struct {
- i reflect.Type
- vd ValueDecoder
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go
deleted file mode 100644
index 14c9fd2564..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go
+++ /dev/null
@@ -1,214 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "errors"
- "fmt"
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-var defaultSliceCodec = NewSliceCodec()
-
-// SliceCodec is the Codec used for slice values.
-//
-// Deprecated: SliceCodec will not be directly configurable in Go Driver 2.0. To
-// configure the slice encode and decode behavior, use the configuration methods
-// on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the slice encode and
-// decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to marshal nil Go slices as empty
-// BSON arrays, use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// NilSliceAsEmpty: true,
-// })
-//
-// See the deprecation notice for each field in SliceCodec for the corresponding
-// settings.
-type SliceCodec struct {
- // EncodeNilAsEmpty causes EncodeValue to marshal nil Go slices as empty BSON arrays instead of
- // BSON null.
- //
- // Deprecated: Use bson.Encoder.NilSliceAsEmpty instead.
- EncodeNilAsEmpty bool
-}
-
-// NewSliceCodec returns a MapCodec with options opts.
-//
-// Deprecated: NewSliceCodec will not be available in Go Driver 2.0. See
-// [SliceCodec] for more details.
-func NewSliceCodec(opts ...*bsonoptions.SliceCodecOptions) *SliceCodec {
- sliceOpt := bsonoptions.MergeSliceCodecOptions(opts...)
-
- codec := SliceCodec{}
- if sliceOpt.EncodeNilAsEmpty != nil {
- codec.EncodeNilAsEmpty = *sliceOpt.EncodeNilAsEmpty
- }
- return &codec
-}
-
-// EncodeValue is the ValueEncoder for slice types.
-func (sc SliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Slice {
- return ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
- }
-
- if val.IsNil() && !sc.EncodeNilAsEmpty && !ec.nilSliceAsEmpty {
- return vw.WriteNull()
- }
-
- // If we have a []byte we want to treat it as a binary instead of as an array.
- if val.Type().Elem() == tByte {
- byteSlice := make([]byte, val.Len())
- reflect.Copy(reflect.ValueOf(byteSlice), val)
- return vw.WriteBinary(byteSlice)
- }
-
- // If we have a []primitive.E we want to treat it as a document instead of as an array.
- if val.Type() == tD || val.Type().ConvertibleTo(tD) {
- d := val.Convert(tD).Interface().(primitive.D)
-
- dw, err := vw.WriteDocument()
- if err != nil {
- return err
- }
-
- for _, e := range d {
- err = encodeElement(ec, dw, e)
- if err != nil {
- return err
- }
- }
-
- return dw.WriteDocumentEnd()
- }
-
- aw, err := vw.WriteArray()
- if err != nil {
- return err
- }
-
- elemType := val.Type().Elem()
- encoder, err := ec.LookupEncoder(elemType)
- if err != nil && elemType.Kind() != reflect.Interface {
- return err
- }
-
- for idx := 0; idx < val.Len(); idx++ {
- currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.Index(idx))
- if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) {
- return lookupErr
- }
-
- vw, err := aw.WriteArrayElement()
- if err != nil {
- return err
- }
-
- if errors.Is(lookupErr, errInvalidValue) {
- err = vw.WriteNull()
- if err != nil {
- return err
- }
- continue
- }
-
- err = currEncoder.EncodeValue(ec, vw, currVal)
- if err != nil {
- return err
- }
- }
- return aw.WriteArrayEnd()
-}
-
-// DecodeValue is the ValueDecoder for slice types.
-func (sc *SliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Kind() != reflect.Slice {
- return ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
- }
-
- switch vrType := vr.Type(); vrType {
- case bsontype.Array:
- case bsontype.Null:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadNull()
- case bsontype.Undefined:
- val.Set(reflect.Zero(val.Type()))
- return vr.ReadUndefined()
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- if val.Type().Elem() != tE {
- return fmt.Errorf("cannot decode document into %s", val.Type())
- }
- case bsontype.Binary:
- if val.Type().Elem() != tByte {
- return fmt.Errorf("SliceDecodeValue can only decode a binary into a byte array, got %v", vrType)
- }
- data, subtype, err := vr.ReadBinary()
- if err != nil {
- return err
- }
- if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
- return fmt.Errorf("SliceDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", bsontype.Binary, subtype)
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeSlice(val.Type(), 0, len(data)))
- }
- val.SetLen(0)
- val.Set(reflect.AppendSlice(val, reflect.ValueOf(data)))
- return nil
- case bsontype.String:
- if sliceType := val.Type().Elem(); sliceType != tByte {
- return fmt.Errorf("SliceDecodeValue can only decode a string into a byte array, got %v", sliceType)
- }
- str, err := vr.ReadString()
- if err != nil {
- return err
- }
- byteStr := []byte(str)
-
- if val.IsNil() {
- val.Set(reflect.MakeSlice(val.Type(), 0, len(byteStr)))
- }
- val.SetLen(0)
- val.Set(reflect.AppendSlice(val, reflect.ValueOf(byteStr)))
- return nil
- default:
- return fmt.Errorf("cannot decode %v into a slice", vrType)
- }
-
- var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error)
- switch val.Type().Elem() {
- case tE:
- dc.Ancestor = val.Type()
- elemsFunc = defaultValueDecoders.decodeD
- default:
- elemsFunc = defaultValueDecoders.decodeDefault
- }
-
- elems, err := elemsFunc(dc, vr, val)
- if err != nil {
- return err
- }
-
- if val.IsNil() {
- val.Set(reflect.MakeSlice(val.Type(), 0, len(elems)))
- }
-
- val.SetLen(0)
- val.Set(reflect.Append(val, elems...))
-
- return nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go
deleted file mode 100644
index a8f885a854..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "fmt"
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-// StringCodec is the Codec used for string values.
-//
-// Deprecated: StringCodec will not be directly accessible in Go Driver 2.0. To
-// override the default string encode and decode behavior, create a new registry
-// with [go.mongodb.org/mongo-driver/bson.NewRegistry] and register a new
-// encoder and decoder for strings.
-//
-// For example,
-//
-// reg := bson.NewRegistry()
-// reg.RegisterKindEncoder(reflect.String, myStringEncoder)
-// reg.RegisterKindDecoder(reflect.String, myStringDecoder)
-type StringCodec struct {
- // DecodeObjectIDAsHex specifies if object IDs should be decoded as their hex representation.
- // If false, a string made from the raw object ID bytes will be used. Defaults to true.
- //
- // Deprecated: Decoding object IDs as raw bytes will not be supported in Go Driver 2.0.
- DecodeObjectIDAsHex bool
-}
-
-var (
- defaultStringCodec = NewStringCodec()
-
- // Assert that defaultStringCodec satisfies the typeDecoder interface, which allows it to be
- // used by collection type decoders (e.g. map, slice, etc) to set individual values in a
- // collection.
- _ typeDecoder = defaultStringCodec
-)
-
-// NewStringCodec returns a StringCodec with options opts.
-//
-// Deprecated: NewStringCodec will not be available in Go Driver 2.0. See
-// [StringCodec] for more details.
-func NewStringCodec(opts ...*bsonoptions.StringCodecOptions) *StringCodec {
- stringOpt := bsonoptions.MergeStringCodecOptions(opts...)
- return &StringCodec{*stringOpt.DecodeObjectIDAsHex}
-}
-
-// EncodeValue is the ValueEncoder for string types.
-func (sc *StringCodec) EncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if val.Kind() != reflect.String {
- return ValueEncoderError{
- Name: "StringEncodeValue",
- Kinds: []reflect.Kind{reflect.String},
- Received: val,
- }
- }
-
- return vw.WriteString(val.String())
-}
-
-func (sc *StringCodec) decodeType(_ DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t.Kind() != reflect.String {
- return emptyValue, ValueDecoderError{
- Name: "StringDecodeValue",
- Kinds: []reflect.Kind{reflect.String},
- Received: reflect.Zero(t),
- }
- }
-
- var str string
- var err error
- switch vr.Type() {
- case bsontype.String:
- str, err = vr.ReadString()
- if err != nil {
- return emptyValue, err
- }
- case bsontype.ObjectID:
- oid, err := vr.ReadObjectID()
- if err != nil {
- return emptyValue, err
- }
- if sc.DecodeObjectIDAsHex {
- str = oid.Hex()
- } else {
- // TODO(GODRIVER-2796): Return an error here instead of decoding to a garbled string.
- byteArray := [12]byte(oid)
- str = string(byteArray[:])
- }
- case bsontype.Symbol:
- str, err = vr.ReadSymbol()
- if err != nil {
- return emptyValue, err
- }
- case bsontype.Binary:
- data, subtype, err := vr.ReadBinary()
- if err != nil {
- return emptyValue, err
- }
- if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
- return emptyValue, decodeBinaryError{subtype: subtype, typeName: "string"}
- }
- str = string(data)
- case bsontype.Null:
- if err = vr.ReadNull(); err != nil {
- return emptyValue, err
- }
- case bsontype.Undefined:
- if err = vr.ReadUndefined(); err != nil {
- return emptyValue, err
- }
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a string type", vr.Type())
- }
-
- return reflect.ValueOf(str), nil
-}
-
-// DecodeValue is the ValueDecoder for string types.
-func (sc *StringCodec) DecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Kind() != reflect.String {
- return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val}
- }
-
- elem, err := sc.decodeType(dctx, vr, val.Type())
- if err != nil {
- return err
- }
-
- val.SetString(elem.String())
- return nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go
deleted file mode 100644
index f8d9690c13..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go
+++ /dev/null
@@ -1,736 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "errors"
- "fmt"
- "reflect"
- "sort"
- "strings"
- "sync"
- "time"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-// DecodeError represents an error that occurs when unmarshalling BSON bytes into a native Go type.
-type DecodeError struct {
- keys []string
- wrapped error
-}
-
-// Unwrap returns the underlying error
-func (de *DecodeError) Unwrap() error {
- return de.wrapped
-}
-
-// Error implements the error interface.
-func (de *DecodeError) Error() string {
- // The keys are stored in reverse order because the de.keys slice is builtup while propagating the error up the
- // stack of BSON keys, so we call de.Keys(), which reverses them.
- keyPath := strings.Join(de.Keys(), ".")
- return fmt.Sprintf("error decoding key %s: %v", keyPath, de.wrapped)
-}
-
-// Keys returns the BSON key path that caused an error as a slice of strings. The keys in the slice are in top-down
-// order. For example, if the document being unmarshalled was {a: {b: {c: 1}}} and the value for c was supposed to be
-// a string, the keys slice will be ["a", "b", "c"].
-func (de *DecodeError) Keys() []string {
- reversedKeys := make([]string, 0, len(de.keys))
- for idx := len(de.keys) - 1; idx >= 0; idx-- {
- reversedKeys = append(reversedKeys, de.keys[idx])
- }
-
- return reversedKeys
-}
-
-// Zeroer allows custom struct types to implement a report of zero
-// state. All struct types that don't implement Zeroer or where IsZero
-// returns false are considered to be not zero.
-type Zeroer interface {
- IsZero() bool
-}
-
-// StructCodec is the Codec used for struct values.
-//
-// Deprecated: StructCodec will not be directly configurable in Go Driver 2.0.
-// To configure the struct encode and decode behavior, use the configuration
-// methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the struct encode
-// and decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to omit zero-value structs when
-// using the "omitempty" struct tag, use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// OmitZeroStruct: true,
-// })
-//
-// See the deprecation notice for each field in StructCodec for the corresponding
-// settings.
-type StructCodec struct {
- cache sync.Map // map[reflect.Type]*structDescription
- parser StructTagParser
-
- // DecodeZeroStruct causes DecodeValue to delete any existing values from Go structs in the
- // destination value passed to Decode before unmarshaling BSON documents into them.
- //
- // Deprecated: Use bson.Decoder.ZeroStructs or options.BSONOptions.ZeroStructs instead.
- DecodeZeroStruct bool
-
- // DecodeDeepZeroInline causes DecodeValue to delete any existing values from Go structs in the
- // destination value passed to Decode before unmarshaling BSON documents into them.
- //
- // Deprecated: DecodeDeepZeroInline will not be supported in Go Driver 2.0.
- DecodeDeepZeroInline bool
-
- // EncodeOmitDefaultStruct causes the Encoder to consider the zero value for a struct (e.g.
- // MyStruct{}) as empty and omit it from the marshaled BSON when the "omitempty" struct tag
- // option is set.
- //
- // Deprecated: Use bson.Encoder.OmitZeroStruct or options.BSONOptions.OmitZeroStruct instead.
- EncodeOmitDefaultStruct bool
-
- // AllowUnexportedFields allows encoding and decoding values from un-exported struct fields.
- //
- // Deprecated: AllowUnexportedFields does not work on recent versions of Go and will not be
- // supported in Go Driver 2.0.
- AllowUnexportedFields bool
-
- // OverwriteDuplicatedInlinedFields, if false, causes EncodeValue to return an error if there is
- // a duplicate field in the marshaled BSON when the "inline" struct tag option is set. The
- // default value is true.
- //
- // Deprecated: Use bson.Encoder.ErrorOnInlineDuplicates or
- // options.BSONOptions.ErrorOnInlineDuplicates instead.
- OverwriteDuplicatedInlinedFields bool
-}
-
-var _ ValueEncoder = &StructCodec{}
-var _ ValueDecoder = &StructCodec{}
-
-// NewStructCodec returns a StructCodec that uses p for struct tag parsing.
-//
-// Deprecated: NewStructCodec will not be available in Go Driver 2.0. See
-// [StructCodec] for more details.
-func NewStructCodec(p StructTagParser, opts ...*bsonoptions.StructCodecOptions) (*StructCodec, error) {
- if p == nil {
- return nil, errors.New("a StructTagParser must be provided to NewStructCodec")
- }
-
- structOpt := bsonoptions.MergeStructCodecOptions(opts...)
-
- codec := &StructCodec{
- parser: p,
- }
-
- if structOpt.DecodeZeroStruct != nil {
- codec.DecodeZeroStruct = *structOpt.DecodeZeroStruct
- }
- if structOpt.DecodeDeepZeroInline != nil {
- codec.DecodeDeepZeroInline = *structOpt.DecodeDeepZeroInline
- }
- if structOpt.EncodeOmitDefaultStruct != nil {
- codec.EncodeOmitDefaultStruct = *structOpt.EncodeOmitDefaultStruct
- }
- if structOpt.OverwriteDuplicatedInlinedFields != nil {
- codec.OverwriteDuplicatedInlinedFields = *structOpt.OverwriteDuplicatedInlinedFields
- }
- if structOpt.AllowUnexportedFields != nil {
- codec.AllowUnexportedFields = *structOpt.AllowUnexportedFields
- }
-
- return codec, nil
-}
-
-// EncodeValue handles encoding generic struct types.
-func (sc *StructCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Kind() != reflect.Struct {
- return ValueEncoderError{Name: "StructCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Struct}, Received: val}
- }
-
- sd, err := sc.describeStruct(ec.Registry, val.Type(), ec.useJSONStructTags, ec.errorOnInlineDuplicates)
- if err != nil {
- return err
- }
-
- dw, err := vw.WriteDocument()
- if err != nil {
- return err
- }
- var rv reflect.Value
- for _, desc := range sd.fl {
- if desc.inline == nil {
- rv = val.Field(desc.idx)
- } else {
- rv, err = fieldByIndexErr(val, desc.inline)
- if err != nil {
- continue
- }
- }
-
- desc.encoder, rv, err = defaultValueEncoders.lookupElementEncoder(ec, desc.encoder, rv)
-
- if err != nil && !errors.Is(err, errInvalidValue) {
- return err
- }
-
- if errors.Is(err, errInvalidValue) {
- if desc.omitEmpty {
- continue
- }
- vw2, err := dw.WriteDocumentElement(desc.name)
- if err != nil {
- return err
- }
- err = vw2.WriteNull()
- if err != nil {
- return err
- }
- continue
- }
-
- if desc.encoder == nil {
- return ErrNoEncoder{Type: rv.Type()}
- }
-
- encoder := desc.encoder
-
- var empty bool
- if cz, ok := encoder.(CodecZeroer); ok {
- empty = cz.IsTypeZero(rv.Interface())
- } else if rv.Kind() == reflect.Interface {
- // isEmpty will not treat an interface rv as an interface, so we need to check for the
- // nil interface separately.
- empty = rv.IsNil()
- } else {
- empty = isEmpty(rv, sc.EncodeOmitDefaultStruct || ec.omitZeroStruct)
- }
- if desc.omitEmpty && empty {
- continue
- }
-
- vw2, err := dw.WriteDocumentElement(desc.name)
- if err != nil {
- return err
- }
-
- ectx := EncodeContext{
- Registry: ec.Registry,
- MinSize: desc.minSize || ec.MinSize,
- errorOnInlineDuplicates: ec.errorOnInlineDuplicates,
- stringifyMapKeysWithFmt: ec.stringifyMapKeysWithFmt,
- nilMapAsEmpty: ec.nilMapAsEmpty,
- nilSliceAsEmpty: ec.nilSliceAsEmpty,
- nilByteSliceAsEmpty: ec.nilByteSliceAsEmpty,
- omitZeroStruct: ec.omitZeroStruct,
- useJSONStructTags: ec.useJSONStructTags,
- }
- err = encoder.EncodeValue(ectx, vw2, rv)
- if err != nil {
- return err
- }
- }
-
- if sd.inlineMap >= 0 {
- rv := val.Field(sd.inlineMap)
- collisionFn := func(key string) bool {
- _, exists := sd.fm[key]
- return exists
- }
-
- return defaultMapCodec.mapEncodeValue(ec, dw, rv, collisionFn)
- }
-
- return dw.WriteDocumentEnd()
-}
-
-func newDecodeError(key string, original error) error {
- var de *DecodeError
- if !errors.As(original, &de) {
- return &DecodeError{
- keys: []string{key},
- wrapped: original,
- }
- }
-
- de.keys = append(de.keys, key)
- return de
-}
-
-// DecodeValue implements the Codec interface.
-// By default, map types in val will not be cleared. If a map has existing key/value pairs, it will be extended with the new ones from vr.
-// For slices, the decoder will set the length of the slice to zero and append all elements. The underlying array will not be cleared.
-func (sc *StructCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Kind() != reflect.Struct {
- return ValueDecoderError{Name: "StructCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Struct}, Received: val}
- }
-
- switch vrType := vr.Type(); vrType {
- case bsontype.Type(0), bsontype.EmbeddedDocument:
- case bsontype.Null:
- if err := vr.ReadNull(); err != nil {
- return err
- }
-
- val.Set(reflect.Zero(val.Type()))
- return nil
- case bsontype.Undefined:
- if err := vr.ReadUndefined(); err != nil {
- return err
- }
-
- val.Set(reflect.Zero(val.Type()))
- return nil
- default:
- return fmt.Errorf("cannot decode %v into a %s", vrType, val.Type())
- }
-
- sd, err := sc.describeStruct(dc.Registry, val.Type(), dc.useJSONStructTags, false)
- if err != nil {
- return err
- }
-
- if sc.DecodeZeroStruct || dc.zeroStructs {
- val.Set(reflect.Zero(val.Type()))
- }
- if sc.DecodeDeepZeroInline && sd.inline {
- val.Set(deepZero(val.Type()))
- }
-
- var decoder ValueDecoder
- var inlineMap reflect.Value
- if sd.inlineMap >= 0 {
- inlineMap = val.Field(sd.inlineMap)
- decoder, err = dc.LookupDecoder(inlineMap.Type().Elem())
- if err != nil {
- return err
- }
- }
-
- dr, err := vr.ReadDocument()
- if err != nil {
- return err
- }
-
- for {
- name, vr, err := dr.ReadElement()
- if errors.Is(err, bsonrw.ErrEOD) {
- break
- }
- if err != nil {
- return err
- }
-
- fd, exists := sd.fm[name]
- if !exists {
- // if the original name isn't found in the struct description, try again with the name in lowercase
- // this could match if a BSON tag isn't specified because by default, describeStruct lowercases all field
- // names
- fd, exists = sd.fm[strings.ToLower(name)]
- }
-
- if !exists {
- if sd.inlineMap < 0 {
- // The encoding/json package requires a flag to return on error for non-existent fields.
- // This functionality seems appropriate for the struct codec.
- err = vr.Skip()
- if err != nil {
- return err
- }
- continue
- }
-
- if inlineMap.IsNil() {
- inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
- }
-
- elem := reflect.New(inlineMap.Type().Elem()).Elem()
- dc.Ancestor = inlineMap.Type()
- err = decoder.DecodeValue(dc, vr, elem)
- if err != nil {
- return err
- }
- inlineMap.SetMapIndex(reflect.ValueOf(name), elem)
- continue
- }
-
- var field reflect.Value
- if fd.inline == nil {
- field = val.Field(fd.idx)
- } else {
- field, err = getInlineField(val, fd.inline)
- if err != nil {
- return err
- }
- }
-
- if !field.CanSet() { // Being settable is a super set of being addressable.
- innerErr := fmt.Errorf("field %v is not settable", field)
- return newDecodeError(fd.name, innerErr)
- }
- if field.Kind() == reflect.Ptr && field.IsNil() {
- field.Set(reflect.New(field.Type().Elem()))
- }
- field = field.Addr()
-
- dctx := DecodeContext{
- Registry: dc.Registry,
- Truncate: fd.truncate || dc.Truncate,
- defaultDocumentType: dc.defaultDocumentType,
- binaryAsSlice: dc.binaryAsSlice,
- useJSONStructTags: dc.useJSONStructTags,
- useLocalTimeZone: dc.useLocalTimeZone,
- zeroMaps: dc.zeroMaps,
- zeroStructs: dc.zeroStructs,
- }
-
- if fd.decoder == nil {
- return newDecodeError(fd.name, ErrNoDecoder{Type: field.Elem().Type()})
- }
-
- err = fd.decoder.DecodeValue(dctx, vr, field.Elem())
- if err != nil {
- return newDecodeError(fd.name, err)
- }
- }
-
- return nil
-}
-
-func isEmpty(v reflect.Value, omitZeroStruct bool) bool {
- kind := v.Kind()
- if (kind != reflect.Ptr || !v.IsNil()) && v.Type().Implements(tZeroer) {
- return v.Interface().(Zeroer).IsZero()
- }
- switch kind {
- case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
- return v.Len() == 0
- case reflect.Struct:
- if !omitZeroStruct {
- return false
- }
- vt := v.Type()
- if vt == tTime {
- return v.Interface().(time.Time).IsZero()
- }
- numField := vt.NumField()
- for i := 0; i < numField; i++ {
- ff := vt.Field(i)
- if ff.PkgPath != "" && !ff.Anonymous {
- continue // Private field
- }
- if !isEmpty(v.Field(i), omitZeroStruct) {
- return false
- }
- }
- return true
- }
- return !v.IsValid() || v.IsZero()
-}
-
-type structDescription struct {
- fm map[string]fieldDescription
- fl []fieldDescription
- inlineMap int
- inline bool
-}
-
-type fieldDescription struct {
- name string // BSON key name
- fieldName string // struct field name
- idx int
- omitEmpty bool
- minSize bool
- truncate bool
- inline []int
- encoder ValueEncoder
- decoder ValueDecoder
-}
-
-type byIndex []fieldDescription
-
-func (bi byIndex) Len() int { return len(bi) }
-
-func (bi byIndex) Swap(i, j int) { bi[i], bi[j] = bi[j], bi[i] }
-
-func (bi byIndex) Less(i, j int) bool {
- // If a field is inlined, its index in the top level struct is stored at inline[0]
- iIdx, jIdx := bi[i].idx, bi[j].idx
- if len(bi[i].inline) > 0 {
- iIdx = bi[i].inline[0]
- }
- if len(bi[j].inline) > 0 {
- jIdx = bi[j].inline[0]
- }
- if iIdx != jIdx {
- return iIdx < jIdx
- }
- for k, biik := range bi[i].inline {
- if k >= len(bi[j].inline) {
- return false
- }
- if biik != bi[j].inline[k] {
- return biik < bi[j].inline[k]
- }
- }
- return len(bi[i].inline) < len(bi[j].inline)
-}
-
-func (sc *StructCodec) describeStruct(
- r *Registry,
- t reflect.Type,
- useJSONStructTags bool,
- errorOnDuplicates bool,
-) (*structDescription, error) {
- // We need to analyze the struct, including getting the tags, collecting
- // information about inlining, and create a map of the field name to the field.
- if v, ok := sc.cache.Load(t); ok {
- return v.(*structDescription), nil
- }
- // TODO(charlie): Only describe the struct once when called
- // concurrently with the same type.
- ds, err := sc.describeStructSlow(r, t, useJSONStructTags, errorOnDuplicates)
- if err != nil {
- return nil, err
- }
- if v, loaded := sc.cache.LoadOrStore(t, ds); loaded {
- ds = v.(*structDescription)
- }
- return ds, nil
-}
-
-func (sc *StructCodec) describeStructSlow(
- r *Registry,
- t reflect.Type,
- useJSONStructTags bool,
- errorOnDuplicates bool,
-) (*structDescription, error) {
- numFields := t.NumField()
- sd := &structDescription{
- fm: make(map[string]fieldDescription, numFields),
- fl: make([]fieldDescription, 0, numFields),
- inlineMap: -1,
- }
-
- var fields []fieldDescription
- for i := 0; i < numFields; i++ {
- sf := t.Field(i)
- if sf.PkgPath != "" && (!sc.AllowUnexportedFields || !sf.Anonymous) {
- // field is private or unexported fields aren't allowed, ignore
- continue
- }
-
- sfType := sf.Type
- encoder, err := r.LookupEncoder(sfType)
- if err != nil {
- encoder = nil
- }
- decoder, err := r.LookupDecoder(sfType)
- if err != nil {
- decoder = nil
- }
-
- description := fieldDescription{
- fieldName: sf.Name,
- idx: i,
- encoder: encoder,
- decoder: decoder,
- }
-
- var stags StructTags
- // If the caller requested that we use JSON struct tags, use the JSONFallbackStructTagParser
- // instead of the parser defined on the codec.
- if useJSONStructTags {
- stags, err = JSONFallbackStructTagParser.ParseStructTags(sf)
- } else {
- stags, err = sc.parser.ParseStructTags(sf)
- }
- if err != nil {
- return nil, err
- }
- if stags.Skip {
- continue
- }
- description.name = stags.Name
- description.omitEmpty = stags.OmitEmpty
- description.minSize = stags.MinSize
- description.truncate = stags.Truncate
-
- if stags.Inline {
- sd.inline = true
- switch sfType.Kind() {
- case reflect.Map:
- if sd.inlineMap >= 0 {
- return nil, errors.New("(struct " + t.String() + ") multiple inline maps")
- }
- if sfType.Key() != tString {
- return nil, errors.New("(struct " + t.String() + ") inline map must have a string keys")
- }
- sd.inlineMap = description.idx
- case reflect.Ptr:
- sfType = sfType.Elem()
- if sfType.Kind() != reflect.Struct {
- return nil, fmt.Errorf("(struct %s) inline fields must be a struct, a struct pointer, or a map", t.String())
- }
- fallthrough
- case reflect.Struct:
- inlinesf, err := sc.describeStruct(r, sfType, useJSONStructTags, errorOnDuplicates)
- if err != nil {
- return nil, err
- }
- for _, fd := range inlinesf.fl {
- if fd.inline == nil {
- fd.inline = []int{i, fd.idx}
- } else {
- fd.inline = append([]int{i}, fd.inline...)
- }
- fields = append(fields, fd)
-
- }
- default:
- return nil, fmt.Errorf("(struct %s) inline fields must be a struct, a struct pointer, or a map", t.String())
- }
- continue
- }
- fields = append(fields, description)
- }
-
- // Sort fieldDescriptions by name and use dominance rules to determine which should be added for each name
- sort.Slice(fields, func(i, j int) bool {
- x := fields
- // sort field by name, breaking ties with depth, then
- // breaking ties with index sequence.
- if x[i].name != x[j].name {
- return x[i].name < x[j].name
- }
- if len(x[i].inline) != len(x[j].inline) {
- return len(x[i].inline) < len(x[j].inline)
- }
- return byIndex(x).Less(i, j)
- })
-
- for advance, i := 0, 0; i < len(fields); i += advance {
- // One iteration per name.
- // Find the sequence of fields with the name of this first field.
- fi := fields[i]
- name := fi.name
- for advance = 1; i+advance < len(fields); advance++ {
- fj := fields[i+advance]
- if fj.name != name {
- break
- }
- }
- if advance == 1 { // Only one field with this name
- sd.fl = append(sd.fl, fi)
- sd.fm[name] = fi
- continue
- }
- dominant, ok := dominantField(fields[i : i+advance])
- if !ok || !sc.OverwriteDuplicatedInlinedFields || errorOnDuplicates {
- return nil, fmt.Errorf("struct %s has duplicated key %s", t.String(), name)
- }
- sd.fl = append(sd.fl, dominant)
- sd.fm[name] = dominant
- }
-
- sort.Sort(byIndex(sd.fl))
-
- return sd, nil
-}
-
-// dominantField looks through the fields, all of which are known to
-// have the same name, to find the single field that dominates the
-// others using Go's inlining rules. If there are multiple top-level
-// fields, the boolean will be false: This condition is an error in Go
-// and we skip all the fields.
-func dominantField(fields []fieldDescription) (fieldDescription, bool) {
- // The fields are sorted in increasing index-length order, then by presence of tag.
- // That means that the first field is the dominant one. We need only check
- // for error cases: two fields at top level.
- if len(fields) > 1 &&
- len(fields[0].inline) == len(fields[1].inline) {
- return fieldDescription{}, false
- }
- return fields[0], true
-}
-
-func fieldByIndexErr(v reflect.Value, index []int) (result reflect.Value, err error) {
- defer func() {
- if recovered := recover(); recovered != nil {
- switch r := recovered.(type) {
- case string:
- err = fmt.Errorf("%s", r)
- case error:
- err = r
- }
- }
- }()
-
- result = v.FieldByIndex(index)
- return
-}
-
-func getInlineField(val reflect.Value, index []int) (reflect.Value, error) {
- field, err := fieldByIndexErr(val, index)
- if err == nil {
- return field, nil
- }
-
- // if parent of this element doesn't exist, fix its parent
- inlineParent := index[:len(index)-1]
- var fParent reflect.Value
- if fParent, err = fieldByIndexErr(val, inlineParent); err != nil {
- fParent, err = getInlineField(val, inlineParent)
- if err != nil {
- return fParent, err
- }
- }
- fParent.Set(reflect.New(fParent.Type().Elem()))
-
- return fieldByIndexErr(val, index)
-}
-
-// DeepZero returns recursive zero object
-func deepZero(st reflect.Type) (result reflect.Value) {
- if st.Kind() == reflect.Struct {
- numField := st.NumField()
- for i := 0; i < numField; i++ {
- if result == emptyValue {
- result = reflect.Indirect(reflect.New(st))
- }
- f := result.Field(i)
- if f.CanInterface() {
- if f.Type().Kind() == reflect.Struct {
- result.Field(i).Set(recursivePointerTo(deepZero(f.Type().Elem())))
- }
- }
- }
- }
- return result
-}
-
-// recursivePointerTo calls reflect.New(v.Type) but recursively for its fields inside
-func recursivePointerTo(v reflect.Value) reflect.Value {
- v = reflect.Indirect(v)
- result := reflect.New(v.Type())
- if v.Kind() == reflect.Struct {
- for i := 0; i < v.NumField(); i++ {
- if f := v.Field(i); f.Kind() == reflect.Ptr {
- if f.Elem().Kind() == reflect.Struct {
- result.Elem().Field(i).Set(recursivePointerTo(f))
- }
- }
- }
- }
-
- return result
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go
deleted file mode 100644
index 18d85bfb03..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go
+++ /dev/null
@@ -1,148 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "reflect"
- "strings"
-)
-
-// StructTagParser returns the struct tags for a given struct field.
-//
-// Deprecated: Defining custom BSON struct tag parsers will not be supported in Go Driver 2.0.
-type StructTagParser interface {
- ParseStructTags(reflect.StructField) (StructTags, error)
-}
-
-// StructTagParserFunc is an adapter that allows a generic function to be used
-// as a StructTagParser.
-//
-// Deprecated: Defining custom BSON struct tag parsers will not be supported in Go Driver 2.0.
-type StructTagParserFunc func(reflect.StructField) (StructTags, error)
-
-// ParseStructTags implements the StructTagParser interface.
-func (stpf StructTagParserFunc) ParseStructTags(sf reflect.StructField) (StructTags, error) {
- return stpf(sf)
-}
-
-// StructTags represents the struct tag fields that the StructCodec uses during
-// the encoding and decoding process.
-//
-// In the case of a struct, the lowercased field name is used as the key for each exported
-// field but this behavior may be changed using a struct tag. The tag may also contain flags to
-// adjust the marshalling behavior for the field.
-//
-// The properties are defined below:
-//
-// OmitEmpty Only include the field if it's not set to the zero value for the type or to
-// empty slices or maps.
-//
-// MinSize Marshal an integer of a type larger than 32 bits value as an int32, if that's
-// feasible while preserving the numeric value.
-//
-// Truncate When unmarshaling a BSON double, it is permitted to lose precision to fit within
-// a float32.
-//
-// Inline Inline the field, which must be a struct or a map, causing all of its fields
-// or keys to be processed as if they were part of the outer struct. For maps,
-// keys must not conflict with the bson keys of other struct fields.
-//
-// Skip This struct field should be skipped. This is usually denoted by parsing a "-"
-// for the name.
-//
-// Deprecated: Defining custom BSON struct tag parsers will not be supported in Go Driver 2.0.
-type StructTags struct {
- Name string
- OmitEmpty bool
- MinSize bool
- Truncate bool
- Inline bool
- Skip bool
-}
-
-// DefaultStructTagParser is the StructTagParser used by the StructCodec by default.
-// It will handle the bson struct tag. See the documentation for StructTags to see
-// what each of the returned fields means.
-//
-// If there is no name in the struct tag fields, the struct field name is lowercased.
-// The tag formats accepted are:
-//
-// "[][,[,]]"
-//
-// `(...) bson:"[][,[,]]" (...)`
-//
-// An example:
-//
-// type T struct {
-// A bool
-// B int "myb"
-// C string "myc,omitempty"
-// D string `bson:",omitempty" json:"jsonkey"`
-// E int64 ",minsize"
-// F int64 "myf,omitempty,minsize"
-// }
-//
-// A struct tag either consisting entirely of '-' or with a bson key with a
-// value consisting entirely of '-' will return a StructTags with Skip true and
-// the remaining fields will be their default values.
-//
-// Deprecated: DefaultStructTagParser will be removed in Go Driver 2.0.
-var DefaultStructTagParser StructTagParserFunc = func(sf reflect.StructField) (StructTags, error) {
- key := strings.ToLower(sf.Name)
- tag, ok := sf.Tag.Lookup("bson")
- if !ok && !strings.Contains(string(sf.Tag), ":") && len(sf.Tag) > 0 {
- tag = string(sf.Tag)
- }
- return parseTags(key, tag)
-}
-
-func parseTags(key string, tag string) (StructTags, error) {
- var st StructTags
- if tag == "-" {
- st.Skip = true
- return st, nil
- }
-
- for idx, str := range strings.Split(tag, ",") {
- if idx == 0 && str != "" {
- key = str
- }
- switch str {
- case "omitempty":
- st.OmitEmpty = true
- case "minsize":
- st.MinSize = true
- case "truncate":
- st.Truncate = true
- case "inline":
- st.Inline = true
- }
- }
-
- st.Name = key
-
- return st, nil
-}
-
-// JSONFallbackStructTagParser has the same behavior as DefaultStructTagParser
-// but will also fallback to parsing the json tag instead on a field where the
-// bson tag isn't available.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.UseJSONStructTags] and
-// [go.mongodb.org/mongo-driver/bson.Decoder.UseJSONStructTags] instead.
-var JSONFallbackStructTagParser StructTagParserFunc = func(sf reflect.StructField) (StructTags, error) {
- key := strings.ToLower(sf.Name)
- tag, ok := sf.Tag.Lookup("bson")
- if !ok {
- tag, ok = sf.Tag.Lookup("json")
- }
- if !ok && !strings.Contains(string(sf.Tag), ":") && len(sf.Tag) > 0 {
- tag = string(sf.Tag)
- }
-
- return parseTags(key, tag)
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go
deleted file mode 100644
index 22fb762c41..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go
+++ /dev/null
@@ -1,151 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "fmt"
- "reflect"
- "time"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-const (
- timeFormatString = "2006-01-02T15:04:05.999Z07:00"
-)
-
-// TimeCodec is the Codec used for time.Time values.
-//
-// Deprecated: TimeCodec will not be directly configurable in Go Driver 2.0.
-// To configure the time.Time encode and decode behavior, use the configuration
-// methods on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the time.Time encode
-// and decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to ..., use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// UseLocalTimeZone: true,
-// })
-//
-// See the deprecation notice for each field in TimeCodec for the corresponding
-// settings.
-type TimeCodec struct {
- // UseLocalTimeZone specifies if we should decode into the local time zone. Defaults to false.
- //
- // Deprecated: Use bson.Decoder.UseLocalTimeZone or options.BSONOptions.UseLocalTimeZone
- // instead.
- UseLocalTimeZone bool
-}
-
-var (
- defaultTimeCodec = NewTimeCodec()
-
- // Assert that defaultTimeCodec satisfies the typeDecoder interface, which allows it to be used
- // by collection type decoders (e.g. map, slice, etc) to set individual values in a collection.
- _ typeDecoder = defaultTimeCodec
-)
-
-// NewTimeCodec returns a TimeCodec with options opts.
-//
-// Deprecated: NewTimeCodec will not be available in Go Driver 2.0. See
-// [TimeCodec] for more details.
-func NewTimeCodec(opts ...*bsonoptions.TimeCodecOptions) *TimeCodec {
- timeOpt := bsonoptions.MergeTimeCodecOptions(opts...)
-
- codec := TimeCodec{}
- if timeOpt.UseLocalTimeZone != nil {
- codec.UseLocalTimeZone = *timeOpt.UseLocalTimeZone
- }
- return &codec
-}
-
-func (tc *TimeCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- if t != tTime {
- return emptyValue, ValueDecoderError{
- Name: "TimeDecodeValue",
- Types: []reflect.Type{tTime},
- Received: reflect.Zero(t),
- }
- }
-
- var timeVal time.Time
- switch vrType := vr.Type(); vrType {
- case bsontype.DateTime:
- dt, err := vr.ReadDateTime()
- if err != nil {
- return emptyValue, err
- }
- timeVal = time.Unix(dt/1000, dt%1000*1000000)
- case bsontype.String:
- // assume strings are in the isoTimeFormat
- timeStr, err := vr.ReadString()
- if err != nil {
- return emptyValue, err
- }
- timeVal, err = time.Parse(timeFormatString, timeStr)
- if err != nil {
- return emptyValue, err
- }
- case bsontype.Int64:
- i64, err := vr.ReadInt64()
- if err != nil {
- return emptyValue, err
- }
- timeVal = time.Unix(i64/1000, i64%1000*1000000)
- case bsontype.Timestamp:
- t, _, err := vr.ReadTimestamp()
- if err != nil {
- return emptyValue, err
- }
- timeVal = time.Unix(int64(t), 0)
- case bsontype.Null:
- if err := vr.ReadNull(); err != nil {
- return emptyValue, err
- }
- case bsontype.Undefined:
- if err := vr.ReadUndefined(); err != nil {
- return emptyValue, err
- }
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into a time.Time", vrType)
- }
-
- if !tc.UseLocalTimeZone && !dc.useLocalTimeZone {
- timeVal = timeVal.UTC()
- }
- return reflect.ValueOf(timeVal), nil
-}
-
-// DecodeValue is the ValueDecoderFunc for time.Time.
-func (tc *TimeCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() || val.Type() != tTime {
- return ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: val}
- }
-
- elem, err := tc.decodeType(dc, vr, tTime)
- if err != nil {
- return err
- }
-
- val.Set(elem)
- return nil
-}
-
-// EncodeValue is the ValueEncoderFunc for time.TIme.
-func (tc *TimeCodec) EncodeValue(_ EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- if !val.IsValid() || val.Type() != tTime {
- return ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: val}
- }
- tt := val.Interface().(time.Time)
- dt := primitive.NewDateTimeFromTime(tt)
- return vw.WriteDateTime(int64(dt))
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go
deleted file mode 100644
index 6ade17b7d3..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "encoding/json"
- "net/url"
- "reflect"
- "time"
-
- "go.mongodb.org/mongo-driver/bson/primitive"
- "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
-)
-
-var tBool = reflect.TypeOf(false)
-var tFloat64 = reflect.TypeOf(float64(0))
-var tInt32 = reflect.TypeOf(int32(0))
-var tInt64 = reflect.TypeOf(int64(0))
-var tString = reflect.TypeOf("")
-var tTime = reflect.TypeOf(time.Time{})
-
-var tEmpty = reflect.TypeOf((*interface{})(nil)).Elem()
-var tByteSlice = reflect.TypeOf([]byte(nil))
-var tByte = reflect.TypeOf(byte(0x00))
-var tURL = reflect.TypeOf(url.URL{})
-var tJSONNumber = reflect.TypeOf(json.Number(""))
-
-var tValueMarshaler = reflect.TypeOf((*ValueMarshaler)(nil)).Elem()
-var tValueUnmarshaler = reflect.TypeOf((*ValueUnmarshaler)(nil)).Elem()
-var tMarshaler = reflect.TypeOf((*Marshaler)(nil)).Elem()
-var tUnmarshaler = reflect.TypeOf((*Unmarshaler)(nil)).Elem()
-var tProxy = reflect.TypeOf((*Proxy)(nil)).Elem()
-var tZeroer = reflect.TypeOf((*Zeroer)(nil)).Elem()
-
-var tBinary = reflect.TypeOf(primitive.Binary{})
-var tUndefined = reflect.TypeOf(primitive.Undefined{})
-var tOID = reflect.TypeOf(primitive.ObjectID{})
-var tDateTime = reflect.TypeOf(primitive.DateTime(0))
-var tNull = reflect.TypeOf(primitive.Null{})
-var tRegex = reflect.TypeOf(primitive.Regex{})
-var tCodeWithScope = reflect.TypeOf(primitive.CodeWithScope{})
-var tDBPointer = reflect.TypeOf(primitive.DBPointer{})
-var tJavaScript = reflect.TypeOf(primitive.JavaScript(""))
-var tSymbol = reflect.TypeOf(primitive.Symbol(""))
-var tTimestamp = reflect.TypeOf(primitive.Timestamp{})
-var tDecimal = reflect.TypeOf(primitive.Decimal128{})
-var tMinKey = reflect.TypeOf(primitive.MinKey{})
-var tMaxKey = reflect.TypeOf(primitive.MaxKey{})
-var tD = reflect.TypeOf(primitive.D{})
-var tA = reflect.TypeOf(primitive.A{})
-var tE = reflect.TypeOf(primitive.E{})
-
-var tCoreDocument = reflect.TypeOf(bsoncore.Document{})
-var tCoreArray = reflect.TypeOf(bsoncore.Array{})
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go
deleted file mode 100644
index 39b07135b1..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go
+++ /dev/null
@@ -1,202 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsoncodec
-
-import (
- "fmt"
- "math"
- "reflect"
-
- "go.mongodb.org/mongo-driver/bson/bsonoptions"
- "go.mongodb.org/mongo-driver/bson/bsonrw"
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-// UIntCodec is the Codec used for uint values.
-//
-// Deprecated: UIntCodec will not be directly configurable in Go Driver 2.0. To
-// configure the uint encode and decode behavior, use the configuration methods
-// on a [go.mongodb.org/mongo-driver/bson.Encoder] or
-// [go.mongodb.org/mongo-driver/bson.Decoder]. To configure the uint encode and
-// decode behavior for a mongo.Client, use
-// [go.mongodb.org/mongo-driver/mongo/options.ClientOptions.SetBSONOptions].
-//
-// For example, to configure a mongo.Client to marshal Go uint values as the
-// minimum BSON int size that can represent the value, use:
-//
-// opt := options.Client().SetBSONOptions(&options.BSONOptions{
-// IntMinSize: true,
-// })
-//
-// See the deprecation notice for each field in UIntCodec for the corresponding
-// settings.
-type UIntCodec struct {
- // EncodeToMinSize causes EncodeValue to marshal Go uint values (excluding uint64) as the
- // minimum BSON int size (either 32-bit or 64-bit) that can represent the integer value.
- //
- // Deprecated: Use bson.Encoder.IntMinSize or options.BSONOptions.IntMinSize instead.
- EncodeToMinSize bool
-}
-
-var (
- defaultUIntCodec = NewUIntCodec()
-
- // Assert that defaultUIntCodec satisfies the typeDecoder interface, which allows it to be used
- // by collection type decoders (e.g. map, slice, etc) to set individual values in a collection.
- _ typeDecoder = defaultUIntCodec
-)
-
-// NewUIntCodec returns a UIntCodec with options opts.
-//
-// Deprecated: NewUIntCodec will not be available in Go Driver 2.0. See
-// [UIntCodec] for more details.
-func NewUIntCodec(opts ...*bsonoptions.UIntCodecOptions) *UIntCodec {
- uintOpt := bsonoptions.MergeUIntCodecOptions(opts...)
-
- codec := UIntCodec{}
- if uintOpt.EncodeToMinSize != nil {
- codec.EncodeToMinSize = *uintOpt.EncodeToMinSize
- }
- return &codec
-}
-
-// EncodeValue is the ValueEncoder for uint types.
-func (uic *UIntCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
- switch val.Kind() {
- case reflect.Uint8, reflect.Uint16:
- return vw.WriteInt32(int32(val.Uint()))
- case reflect.Uint, reflect.Uint32, reflect.Uint64:
- u64 := val.Uint()
-
- // If ec.MinSize or if encodeToMinSize is true for a non-uint64 value we should write val as an int32
- useMinSize := ec.MinSize || (uic.EncodeToMinSize && val.Kind() != reflect.Uint64)
-
- if u64 <= math.MaxInt32 && useMinSize {
- return vw.WriteInt32(int32(u64))
- }
- if u64 > math.MaxInt64 {
- return fmt.Errorf("%d overflows int64", u64)
- }
- return vw.WriteInt64(int64(u64))
- }
-
- return ValueEncoderError{
- Name: "UintEncodeValue",
- Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
- Received: val,
- }
-}
-
-func (uic *UIntCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
- var i64 int64
- var err error
- switch vrType := vr.Type(); vrType {
- case bsontype.Int32:
- i32, err := vr.ReadInt32()
- if err != nil {
- return emptyValue, err
- }
- i64 = int64(i32)
- case bsontype.Int64:
- i64, err = vr.ReadInt64()
- if err != nil {
- return emptyValue, err
- }
- case bsontype.Double:
- f64, err := vr.ReadDouble()
- if err != nil {
- return emptyValue, err
- }
- if !dc.Truncate && math.Floor(f64) != f64 {
- return emptyValue, errCannotTruncate
- }
- if f64 > float64(math.MaxInt64) {
- return emptyValue, fmt.Errorf("%g overflows int64", f64)
- }
- i64 = int64(f64)
- case bsontype.Boolean:
- b, err := vr.ReadBoolean()
- if err != nil {
- return emptyValue, err
- }
- if b {
- i64 = 1
- }
- case bsontype.Null:
- if err = vr.ReadNull(); err != nil {
- return emptyValue, err
- }
- case bsontype.Undefined:
- if err = vr.ReadUndefined(); err != nil {
- return emptyValue, err
- }
- default:
- return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType)
- }
-
- switch t.Kind() {
- case reflect.Uint8:
- if i64 < 0 || i64 > math.MaxUint8 {
- return emptyValue, fmt.Errorf("%d overflows uint8", i64)
- }
-
- return reflect.ValueOf(uint8(i64)), nil
- case reflect.Uint16:
- if i64 < 0 || i64 > math.MaxUint16 {
- return emptyValue, fmt.Errorf("%d overflows uint16", i64)
- }
-
- return reflect.ValueOf(uint16(i64)), nil
- case reflect.Uint32:
- if i64 < 0 || i64 > math.MaxUint32 {
- return emptyValue, fmt.Errorf("%d overflows uint32", i64)
- }
-
- return reflect.ValueOf(uint32(i64)), nil
- case reflect.Uint64:
- if i64 < 0 {
- return emptyValue, fmt.Errorf("%d overflows uint64", i64)
- }
-
- return reflect.ValueOf(uint64(i64)), nil
- case reflect.Uint:
- if i64 < 0 {
- return emptyValue, fmt.Errorf("%d overflows uint", i64)
- }
- v := uint64(i64)
- if v > math.MaxUint { // Can we fit this inside of an uint
- return emptyValue, fmt.Errorf("%d overflows uint", i64)
- }
-
- return reflect.ValueOf(uint(v)), nil
- default:
- return emptyValue, ValueDecoderError{
- Name: "UintDecodeValue",
- Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
- Received: reflect.Zero(t),
- }
- }
-}
-
-// DecodeValue is the ValueDecoder for uint types.
-func (uic *UIntCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
- if !val.CanSet() {
- return ValueDecoderError{
- Name: "UintDecodeValue",
- Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
- Received: val,
- }
- }
-
- elem, err := uic.decodeType(dc, vr, val.Type())
- if err != nil {
- return err
- }
-
- val.SetUint(elem.Uint())
- return nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go
deleted file mode 100644
index 996bd17127..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-// ByteSliceCodecOptions represents all possible options for byte slice encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type ByteSliceCodecOptions struct {
- EncodeNilAsEmpty *bool // Specifies if a nil byte slice should encode as an empty binary instead of null. Defaults to false.
-}
-
-// ByteSliceCodec creates a new *ByteSliceCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func ByteSliceCodec() *ByteSliceCodecOptions {
- return &ByteSliceCodecOptions{}
-}
-
-// SetEncodeNilAsEmpty specifies if a nil byte slice should encode as an empty binary instead of null. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.NilByteSliceAsEmpty] instead.
-func (bs *ByteSliceCodecOptions) SetEncodeNilAsEmpty(b bool) *ByteSliceCodecOptions {
- bs.EncodeNilAsEmpty = &b
- return bs
-}
-
-// MergeByteSliceCodecOptions combines the given *ByteSliceCodecOptions into a single *ByteSliceCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeByteSliceCodecOptions(opts ...*ByteSliceCodecOptions) *ByteSliceCodecOptions {
- bs := ByteSliceCodec()
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.EncodeNilAsEmpty != nil {
- bs.EncodeNilAsEmpty = opt.EncodeNilAsEmpty
- }
- }
-
- return bs
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go
deleted file mode 100644
index c40973c8d4..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2022-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-// Package bsonoptions defines the optional configurations for the BSON codecs.
-package bsonoptions
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go
deleted file mode 100644
index f522c7e03f..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-// EmptyInterfaceCodecOptions represents all possible options for interface{} encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type EmptyInterfaceCodecOptions struct {
- DecodeBinaryAsSlice *bool // Specifies if Old and Generic type binarys should default to []slice instead of primitive.Binary. Defaults to false.
-}
-
-// EmptyInterfaceCodec creates a new *EmptyInterfaceCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func EmptyInterfaceCodec() *EmptyInterfaceCodecOptions {
- return &EmptyInterfaceCodecOptions{}
-}
-
-// SetDecodeBinaryAsSlice specifies if Old and Generic type binarys should default to []slice instead of primitive.Binary. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.BinaryAsSlice] instead.
-func (e *EmptyInterfaceCodecOptions) SetDecodeBinaryAsSlice(b bool) *EmptyInterfaceCodecOptions {
- e.DecodeBinaryAsSlice = &b
- return e
-}
-
-// MergeEmptyInterfaceCodecOptions combines the given *EmptyInterfaceCodecOptions into a single *EmptyInterfaceCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeEmptyInterfaceCodecOptions(opts ...*EmptyInterfaceCodecOptions) *EmptyInterfaceCodecOptions {
- e := EmptyInterfaceCodec()
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.DecodeBinaryAsSlice != nil {
- e.DecodeBinaryAsSlice = opt.DecodeBinaryAsSlice
- }
- }
-
- return e
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go
deleted file mode 100644
index a7a7c1d980..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go
+++ /dev/null
@@ -1,82 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-// MapCodecOptions represents all possible options for map encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type MapCodecOptions struct {
- DecodeZerosMap *bool // Specifies if the map should be zeroed before decoding into it. Defaults to false.
- EncodeNilAsEmpty *bool // Specifies if a nil map should encode as an empty document instead of null. Defaults to false.
- // Specifies how keys should be handled. If false, the behavior matches encoding/json, where the encoding key type must
- // either be a string, an integer type, or implement bsoncodec.KeyMarshaler and the decoding key type must either be a
- // string, an integer type, or implement bsoncodec.KeyUnmarshaler. If true, keys are encoded with fmt.Sprint() and the
- // encoding key type must be a string, an integer type, or a float. If true, the use of Stringer will override
- // TextMarshaler/TextUnmarshaler. Defaults to false.
- EncodeKeysWithStringer *bool
-}
-
-// MapCodec creates a new *MapCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func MapCodec() *MapCodecOptions {
- return &MapCodecOptions{}
-}
-
-// SetDecodeZerosMap specifies if the map should be zeroed before decoding into it. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.ZeroMaps] instead.
-func (t *MapCodecOptions) SetDecodeZerosMap(b bool) *MapCodecOptions {
- t.DecodeZerosMap = &b
- return t
-}
-
-// SetEncodeNilAsEmpty specifies if a nil map should encode as an empty document instead of null. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.NilMapAsEmpty] instead.
-func (t *MapCodecOptions) SetEncodeNilAsEmpty(b bool) *MapCodecOptions {
- t.EncodeNilAsEmpty = &b
- return t
-}
-
-// SetEncodeKeysWithStringer specifies how keys should be handled. If false, the behavior matches encoding/json, where the
-// encoding key type must either be a string, an integer type, or implement bsoncodec.KeyMarshaler and the decoding key
-// type must either be a string, an integer type, or implement bsoncodec.KeyUnmarshaler. If true, keys are encoded with
-// fmt.Sprint() and the encoding key type must be a string, an integer type, or a float. If true, the use of Stringer
-// will override TextMarshaler/TextUnmarshaler. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.StringifyMapKeysWithFmt] instead.
-func (t *MapCodecOptions) SetEncodeKeysWithStringer(b bool) *MapCodecOptions {
- t.EncodeKeysWithStringer = &b
- return t
-}
-
-// MergeMapCodecOptions combines the given *MapCodecOptions into a single *MapCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeMapCodecOptions(opts ...*MapCodecOptions) *MapCodecOptions {
- s := MapCodec()
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.DecodeZerosMap != nil {
- s.DecodeZerosMap = opt.DecodeZerosMap
- }
- if opt.EncodeNilAsEmpty != nil {
- s.EncodeNilAsEmpty = opt.EncodeNilAsEmpty
- }
- if opt.EncodeKeysWithStringer != nil {
- s.EncodeKeysWithStringer = opt.EncodeKeysWithStringer
- }
- }
-
- return s
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go
deleted file mode 100644
index 3c1e4f35ba..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-// SliceCodecOptions represents all possible options for slice encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type SliceCodecOptions struct {
- EncodeNilAsEmpty *bool // Specifies if a nil slice should encode as an empty array instead of null. Defaults to false.
-}
-
-// SliceCodec creates a new *SliceCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func SliceCodec() *SliceCodecOptions {
- return &SliceCodecOptions{}
-}
-
-// SetEncodeNilAsEmpty specifies if a nil slice should encode as an empty array instead of null. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.NilSliceAsEmpty] instead.
-func (s *SliceCodecOptions) SetEncodeNilAsEmpty(b bool) *SliceCodecOptions {
- s.EncodeNilAsEmpty = &b
- return s
-}
-
-// MergeSliceCodecOptions combines the given *SliceCodecOptions into a single *SliceCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeSliceCodecOptions(opts ...*SliceCodecOptions) *SliceCodecOptions {
- s := SliceCodec()
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.EncodeNilAsEmpty != nil {
- s.EncodeNilAsEmpty = opt.EncodeNilAsEmpty
- }
- }
-
- return s
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go
deleted file mode 100644
index f8b76f996e..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-var defaultDecodeOIDAsHex = true
-
-// StringCodecOptions represents all possible options for string encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type StringCodecOptions struct {
- DecodeObjectIDAsHex *bool // Specifies if we should decode ObjectID as the hex value. Defaults to true.
-}
-
-// StringCodec creates a new *StringCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func StringCodec() *StringCodecOptions {
- return &StringCodecOptions{}
-}
-
-// SetDecodeObjectIDAsHex specifies if object IDs should be decoded as their hex representation. If false, a string made
-// from the raw object ID bytes will be used. Defaults to true.
-//
-// Deprecated: Decoding object IDs as raw bytes will not be supported in Go Driver 2.0.
-func (t *StringCodecOptions) SetDecodeObjectIDAsHex(b bool) *StringCodecOptions {
- t.DecodeObjectIDAsHex = &b
- return t
-}
-
-// MergeStringCodecOptions combines the given *StringCodecOptions into a single *StringCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeStringCodecOptions(opts ...*StringCodecOptions) *StringCodecOptions {
- s := &StringCodecOptions{&defaultDecodeOIDAsHex}
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.DecodeObjectIDAsHex != nil {
- s.DecodeObjectIDAsHex = opt.DecodeObjectIDAsHex
- }
- }
-
- return s
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go
deleted file mode 100644
index 1cbfa32e8b..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go
+++ /dev/null
@@ -1,107 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-var defaultOverwriteDuplicatedInlinedFields = true
-
-// StructCodecOptions represents all possible options for struct encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type StructCodecOptions struct {
- DecodeZeroStruct *bool // Specifies if structs should be zeroed before decoding into them. Defaults to false.
- DecodeDeepZeroInline *bool // Specifies if structs should be recursively zeroed when a inline value is decoded. Defaults to false.
- EncodeOmitDefaultStruct *bool // Specifies if default structs should be considered empty by omitempty. Defaults to false.
- AllowUnexportedFields *bool // Specifies if unexported fields should be marshaled/unmarshaled. Defaults to false.
- OverwriteDuplicatedInlinedFields *bool // Specifies if fields in inlined structs can be overwritten by higher level struct fields with the same key. Defaults to true.
-}
-
-// StructCodec creates a new *StructCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func StructCodec() *StructCodecOptions {
- return &StructCodecOptions{}
-}
-
-// SetDecodeZeroStruct specifies if structs should be zeroed before decoding into them. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.ZeroStructs] instead.
-func (t *StructCodecOptions) SetDecodeZeroStruct(b bool) *StructCodecOptions {
- t.DecodeZeroStruct = &b
- return t
-}
-
-// SetDecodeDeepZeroInline specifies if structs should be zeroed before decoding into them. Defaults to false.
-//
-// Deprecated: DecodeDeepZeroInline will not be supported in Go Driver 2.0.
-func (t *StructCodecOptions) SetDecodeDeepZeroInline(b bool) *StructCodecOptions {
- t.DecodeDeepZeroInline = &b
- return t
-}
-
-// SetEncodeOmitDefaultStruct specifies if default structs should be considered empty by omitempty. A default struct has all
-// its values set to their default value. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.OmitZeroStruct] instead.
-func (t *StructCodecOptions) SetEncodeOmitDefaultStruct(b bool) *StructCodecOptions {
- t.EncodeOmitDefaultStruct = &b
- return t
-}
-
-// SetOverwriteDuplicatedInlinedFields specifies if inlined struct fields can be overwritten by higher level struct fields with the
-// same bson key. When true and decoding, values will be written to the outermost struct with a matching key, and when
-// encoding, keys will have the value of the top-most matching field. When false, decoding and encoding will error if
-// there are duplicate keys after the struct is inlined. Defaults to true.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.ErrorOnInlineDuplicates] instead.
-func (t *StructCodecOptions) SetOverwriteDuplicatedInlinedFields(b bool) *StructCodecOptions {
- t.OverwriteDuplicatedInlinedFields = &b
- return t
-}
-
-// SetAllowUnexportedFields specifies if unexported fields should be marshaled/unmarshaled. Defaults to false.
-//
-// Deprecated: AllowUnexportedFields does not work on recent versions of Go and will not be
-// supported in Go Driver 2.0.
-func (t *StructCodecOptions) SetAllowUnexportedFields(b bool) *StructCodecOptions {
- t.AllowUnexportedFields = &b
- return t
-}
-
-// MergeStructCodecOptions combines the given *StructCodecOptions into a single *StructCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeStructCodecOptions(opts ...*StructCodecOptions) *StructCodecOptions {
- s := &StructCodecOptions{
- OverwriteDuplicatedInlinedFields: &defaultOverwriteDuplicatedInlinedFields,
- }
- for _, opt := range opts {
- if opt == nil {
- continue
- }
-
- if opt.DecodeZeroStruct != nil {
- s.DecodeZeroStruct = opt.DecodeZeroStruct
- }
- if opt.DecodeDeepZeroInline != nil {
- s.DecodeDeepZeroInline = opt.DecodeDeepZeroInline
- }
- if opt.EncodeOmitDefaultStruct != nil {
- s.EncodeOmitDefaultStruct = opt.EncodeOmitDefaultStruct
- }
- if opt.OverwriteDuplicatedInlinedFields != nil {
- s.OverwriteDuplicatedInlinedFields = opt.OverwriteDuplicatedInlinedFields
- }
- if opt.AllowUnexportedFields != nil {
- s.AllowUnexportedFields = opt.AllowUnexportedFields
- }
- }
-
- return s
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go
deleted file mode 100644
index 3f38433d22..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-// TimeCodecOptions represents all possible options for time.Time encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type TimeCodecOptions struct {
- UseLocalTimeZone *bool // Specifies if we should decode into the local time zone. Defaults to false.
-}
-
-// TimeCodec creates a new *TimeCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func TimeCodec() *TimeCodecOptions {
- return &TimeCodecOptions{}
-}
-
-// SetUseLocalTimeZone specifies if we should decode into the local time zone. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Decoder.UseLocalTimeZone] instead.
-func (t *TimeCodecOptions) SetUseLocalTimeZone(b bool) *TimeCodecOptions {
- t.UseLocalTimeZone = &b
- return t
-}
-
-// MergeTimeCodecOptions combines the given *TimeCodecOptions into a single *TimeCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeTimeCodecOptions(opts ...*TimeCodecOptions) *TimeCodecOptions {
- t := TimeCodec()
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.UseLocalTimeZone != nil {
- t.UseLocalTimeZone = opt.UseLocalTimeZone
- }
- }
-
- return t
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go
deleted file mode 100644
index 5091e4d963..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonoptions
-
-// UIntCodecOptions represents all possible options for uint encoding and decoding.
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-type UIntCodecOptions struct {
- EncodeToMinSize *bool // Specifies if all uints except uint64 should be decoded to minimum size bsontype. Defaults to false.
-}
-
-// UIntCodec creates a new *UIntCodecOptions
-//
-// Deprecated: Use the bson.Encoder and bson.Decoder configuration methods to set the desired BSON marshal
-// and unmarshal behavior instead.
-func UIntCodec() *UIntCodecOptions {
- return &UIntCodecOptions{}
-}
-
-// SetEncodeToMinSize specifies if all uints except uint64 should be decoded to minimum size bsontype. Defaults to false.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.Encoder.IntMinSize] instead.
-func (u *UIntCodecOptions) SetEncodeToMinSize(b bool) *UIntCodecOptions {
- u.EncodeToMinSize = &b
- return u
-}
-
-// MergeUIntCodecOptions combines the given *UIntCodecOptions into a single *UIntCodecOptions in a last one wins fashion.
-//
-// Deprecated: Merging options structs will not be supported in Go Driver 2.0. Users should create a
-// single options struct instead.
-func MergeUIntCodecOptions(opts ...*UIntCodecOptions) *UIntCodecOptions {
- u := UIntCodec()
- for _, opt := range opts {
- if opt == nil {
- continue
- }
- if opt.EncodeToMinSize != nil {
- u.EncodeToMinSize = opt.EncodeToMinSize
- }
- }
-
- return u
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go
deleted file mode 100644
index 1e25570b85..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go
+++ /dev/null
@@ -1,489 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonrw
-
-import (
- "errors"
- "fmt"
- "io"
-
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
- "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
-)
-
-// Copier is a type that allows copying between ValueReaders, ValueWriters, and
-// []byte values.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-type Copier struct{}
-
-// NewCopier creates a new copier with the given registry. If a nil registry is provided
-// a default registry is used.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func NewCopier() Copier {
- return Copier{}
-}
-
-// CopyDocument handles copying a document from src to dst.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func CopyDocument(dst ValueWriter, src ValueReader) error {
- return Copier{}.CopyDocument(dst, src)
-}
-
-// CopyDocument handles copying one document from the src to the dst.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) CopyDocument(dst ValueWriter, src ValueReader) error {
- dr, err := src.ReadDocument()
- if err != nil {
- return err
- }
-
- dw, err := dst.WriteDocument()
- if err != nil {
- return err
- }
-
- return c.copyDocumentCore(dw, dr)
-}
-
-// CopyArrayFromBytes copies the values from a BSON array represented as a
-// []byte to a ValueWriter.
-//
-// Deprecated: Copying BSON arrays using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) CopyArrayFromBytes(dst ValueWriter, src []byte) error {
- aw, err := dst.WriteArray()
- if err != nil {
- return err
- }
-
- err = c.CopyBytesToArrayWriter(aw, src)
- if err != nil {
- return err
- }
-
- return aw.WriteArrayEnd()
-}
-
-// CopyDocumentFromBytes copies the values from a BSON document represented as a
-// []byte to a ValueWriter.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) CopyDocumentFromBytes(dst ValueWriter, src []byte) error {
- dw, err := dst.WriteDocument()
- if err != nil {
- return err
- }
-
- err = c.CopyBytesToDocumentWriter(dw, src)
- if err != nil {
- return err
- }
-
- return dw.WriteDocumentEnd()
-}
-
-type writeElementFn func(key string) (ValueWriter, error)
-
-// CopyBytesToArrayWriter copies the values from a BSON Array represented as a []byte to an
-// ArrayWriter.
-//
-// Deprecated: Copying BSON arrays using the ArrayWriter interface will not be supported in Go
-// Driver 2.0.
-func (c Copier) CopyBytesToArrayWriter(dst ArrayWriter, src []byte) error {
- wef := func(_ string) (ValueWriter, error) {
- return dst.WriteArrayElement()
- }
-
- return c.copyBytesToValueWriter(src, wef)
-}
-
-// CopyBytesToDocumentWriter copies the values from a BSON document represented as a []byte to a
-// DocumentWriter.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) CopyBytesToDocumentWriter(dst DocumentWriter, src []byte) error {
- wef := func(key string) (ValueWriter, error) {
- return dst.WriteDocumentElement(key)
- }
-
- return c.copyBytesToValueWriter(src, wef)
-}
-
-func (c Copier) copyBytesToValueWriter(src []byte, wef writeElementFn) error {
- // TODO(skriptble): Create errors types here. Anything that is a tag should be a property.
- length, rem, ok := bsoncore.ReadLength(src)
- if !ok {
- return fmt.Errorf("couldn't read length from src, not enough bytes. length=%d", len(src))
- }
- if len(src) < int(length) {
- return fmt.Errorf("length read exceeds number of bytes available. length=%d bytes=%d", len(src), length)
- }
- rem = rem[:length-4]
-
- var t bsontype.Type
- var key string
- var val bsoncore.Value
- for {
- t, rem, ok = bsoncore.ReadType(rem)
- if !ok {
- return io.EOF
- }
- if t == bsontype.Type(0) {
- if len(rem) != 0 {
- return fmt.Errorf("document end byte found before end of document. remaining bytes=%v", rem)
- }
- break
- }
-
- key, rem, ok = bsoncore.ReadKey(rem)
- if !ok {
- return fmt.Errorf("invalid key found. remaining bytes=%v", rem)
- }
-
- // write as either array element or document element using writeElementFn
- vw, err := wef(key)
- if err != nil {
- return err
- }
-
- val, rem, ok = bsoncore.ReadValue(rem, t)
- if !ok {
- return fmt.Errorf("not enough bytes available to read type. bytes=%d type=%s", len(rem), t)
- }
- err = c.CopyValueFromBytes(vw, t, val.Data)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-// CopyDocumentToBytes copies an entire document from the ValueReader and
-// returns it as bytes.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) CopyDocumentToBytes(src ValueReader) ([]byte, error) {
- return c.AppendDocumentBytes(nil, src)
-}
-
-// AppendDocumentBytes functions the same as CopyDocumentToBytes, but will
-// append the result to dst.
-//
-// Deprecated: Copying BSON documents using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) AppendDocumentBytes(dst []byte, src ValueReader) ([]byte, error) {
- if br, ok := src.(BytesReader); ok {
- _, dst, err := br.ReadValueBytes(dst)
- return dst, err
- }
-
- vw := vwPool.Get().(*valueWriter)
- defer putValueWriter(vw)
-
- vw.reset(dst)
-
- err := c.CopyDocument(vw, src)
- dst = vw.buf
- return dst, err
-}
-
-// AppendArrayBytes copies an array from the ValueReader to dst.
-//
-// Deprecated: Copying BSON arrays using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) AppendArrayBytes(dst []byte, src ValueReader) ([]byte, error) {
- if br, ok := src.(BytesReader); ok {
- _, dst, err := br.ReadValueBytes(dst)
- return dst, err
- }
-
- vw := vwPool.Get().(*valueWriter)
- defer putValueWriter(vw)
-
- vw.reset(dst)
-
- err := c.copyArray(vw, src)
- dst = vw.buf
- return dst, err
-}
-
-// CopyValueFromBytes will write the value represtend by t and src to dst.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.UnmarshalValue] instead.
-func (c Copier) CopyValueFromBytes(dst ValueWriter, t bsontype.Type, src []byte) error {
- if wvb, ok := dst.(BytesWriter); ok {
- return wvb.WriteValueBytes(t, src)
- }
-
- vr := vrPool.Get().(*valueReader)
- defer vrPool.Put(vr)
-
- vr.reset(src)
- vr.pushElement(t)
-
- return c.CopyValue(dst, vr)
-}
-
-// CopyValueToBytes copies a value from src and returns it as a bsontype.Type and a
-// []byte.
-//
-// Deprecated: Use [go.mongodb.org/mongo-driver/bson.MarshalValue] instead.
-func (c Copier) CopyValueToBytes(src ValueReader) (bsontype.Type, []byte, error) {
- return c.AppendValueBytes(nil, src)
-}
-
-// AppendValueBytes functions the same as CopyValueToBytes, but will append the
-// result to dst.
-//
-// Deprecated: Appending individual BSON elements to an existing slice will not be supported in Go
-// Driver 2.0.
-func (c Copier) AppendValueBytes(dst []byte, src ValueReader) (bsontype.Type, []byte, error) {
- if br, ok := src.(BytesReader); ok {
- return br.ReadValueBytes(dst)
- }
-
- vw := vwPool.Get().(*valueWriter)
- defer putValueWriter(vw)
-
- start := len(dst)
-
- vw.reset(dst)
- vw.push(mElement)
-
- err := c.CopyValue(vw, src)
- if err != nil {
- return 0, dst, err
- }
-
- return bsontype.Type(vw.buf[start]), vw.buf[start+2:], nil
-}
-
-// CopyValue will copy a single value from src to dst.
-//
-// Deprecated: Copying BSON values using the ValueWriter and ValueReader interfaces will not be
-// supported in Go Driver 2.0.
-func (c Copier) CopyValue(dst ValueWriter, src ValueReader) error {
- var err error
- switch src.Type() {
- case bsontype.Double:
- var f64 float64
- f64, err = src.ReadDouble()
- if err != nil {
- break
- }
- err = dst.WriteDouble(f64)
- case bsontype.String:
- var str string
- str, err = src.ReadString()
- if err != nil {
- return err
- }
- err = dst.WriteString(str)
- case bsontype.EmbeddedDocument:
- err = c.CopyDocument(dst, src)
- case bsontype.Array:
- err = c.copyArray(dst, src)
- case bsontype.Binary:
- var data []byte
- var subtype byte
- data, subtype, err = src.ReadBinary()
- if err != nil {
- break
- }
- err = dst.WriteBinaryWithSubtype(data, subtype)
- case bsontype.Undefined:
- err = src.ReadUndefined()
- if err != nil {
- break
- }
- err = dst.WriteUndefined()
- case bsontype.ObjectID:
- var oid primitive.ObjectID
- oid, err = src.ReadObjectID()
- if err != nil {
- break
- }
- err = dst.WriteObjectID(oid)
- case bsontype.Boolean:
- var b bool
- b, err = src.ReadBoolean()
- if err != nil {
- break
- }
- err = dst.WriteBoolean(b)
- case bsontype.DateTime:
- var dt int64
- dt, err = src.ReadDateTime()
- if err != nil {
- break
- }
- err = dst.WriteDateTime(dt)
- case bsontype.Null:
- err = src.ReadNull()
- if err != nil {
- break
- }
- err = dst.WriteNull()
- case bsontype.Regex:
- var pattern, options string
- pattern, options, err = src.ReadRegex()
- if err != nil {
- break
- }
- err = dst.WriteRegex(pattern, options)
- case bsontype.DBPointer:
- var ns string
- var pointer primitive.ObjectID
- ns, pointer, err = src.ReadDBPointer()
- if err != nil {
- break
- }
- err = dst.WriteDBPointer(ns, pointer)
- case bsontype.JavaScript:
- var js string
- js, err = src.ReadJavascript()
- if err != nil {
- break
- }
- err = dst.WriteJavascript(js)
- case bsontype.Symbol:
- var symbol string
- symbol, err = src.ReadSymbol()
- if err != nil {
- break
- }
- err = dst.WriteSymbol(symbol)
- case bsontype.CodeWithScope:
- var code string
- var srcScope DocumentReader
- code, srcScope, err = src.ReadCodeWithScope()
- if err != nil {
- break
- }
-
- var dstScope DocumentWriter
- dstScope, err = dst.WriteCodeWithScope(code)
- if err != nil {
- break
- }
- err = c.copyDocumentCore(dstScope, srcScope)
- case bsontype.Int32:
- var i32 int32
- i32, err = src.ReadInt32()
- if err != nil {
- break
- }
- err = dst.WriteInt32(i32)
- case bsontype.Timestamp:
- var t, i uint32
- t, i, err = src.ReadTimestamp()
- if err != nil {
- break
- }
- err = dst.WriteTimestamp(t, i)
- case bsontype.Int64:
- var i64 int64
- i64, err = src.ReadInt64()
- if err != nil {
- break
- }
- err = dst.WriteInt64(i64)
- case bsontype.Decimal128:
- var d128 primitive.Decimal128
- d128, err = src.ReadDecimal128()
- if err != nil {
- break
- }
- err = dst.WriteDecimal128(d128)
- case bsontype.MinKey:
- err = src.ReadMinKey()
- if err != nil {
- break
- }
- err = dst.WriteMinKey()
- case bsontype.MaxKey:
- err = src.ReadMaxKey()
- if err != nil {
- break
- }
- err = dst.WriteMaxKey()
- default:
- err = fmt.Errorf("Cannot copy unknown BSON type %s", src.Type())
- }
-
- return err
-}
-
-func (c Copier) copyArray(dst ValueWriter, src ValueReader) error {
- ar, err := src.ReadArray()
- if err != nil {
- return err
- }
-
- aw, err := dst.WriteArray()
- if err != nil {
- return err
- }
-
- for {
- vr, err := ar.ReadValue()
- if errors.Is(err, ErrEOA) {
- break
- }
- if err != nil {
- return err
- }
-
- vw, err := aw.WriteArrayElement()
- if err != nil {
- return err
- }
-
- err = c.CopyValue(vw, vr)
- if err != nil {
- return err
- }
- }
-
- return aw.WriteArrayEnd()
-}
-
-func (c Copier) copyDocumentCore(dw DocumentWriter, dr DocumentReader) error {
- for {
- key, vr, err := dr.ReadElement()
- if errors.Is(err, ErrEOD) {
- break
- }
- if err != nil {
- return err
- }
-
- vw, err := dw.WriteDocumentElement(key)
- if err != nil {
- return err
- }
-
- err = c.CopyValue(vw, vr)
- if err != nil {
- return err
- }
- }
-
- return dw.WriteDocumentEnd()
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go
deleted file mode 100644
index 750b0d2af5..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-// Package bsonrw contains abstractions for reading and writing
-// BSON and BSON like types from sources.
-package bsonrw // import "go.mongodb.org/mongo-driver/bson/bsonrw"
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go
deleted file mode 100644
index f0702d9d30..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go
+++ /dev/null
@@ -1,806 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonrw
-
-import (
- "encoding/base64"
- "encoding/hex"
- "errors"
- "fmt"
- "io"
- "strings"
-
- "go.mongodb.org/mongo-driver/bson/bsontype"
-)
-
-const maxNestingDepth = 200
-
-// ErrInvalidJSON indicates the JSON input is invalid
-var ErrInvalidJSON = errors.New("invalid JSON input")
-
-type jsonParseState byte
-
-const (
- jpsStartState jsonParseState = iota
- jpsSawBeginObject
- jpsSawEndObject
- jpsSawBeginArray
- jpsSawEndArray
- jpsSawColon
- jpsSawComma
- jpsSawKey
- jpsSawValue
- jpsDoneState
- jpsInvalidState
-)
-
-type jsonParseMode byte
-
-const (
- jpmInvalidMode jsonParseMode = iota
- jpmObjectMode
- jpmArrayMode
-)
-
-type extJSONValue struct {
- t bsontype.Type
- v interface{}
-}
-
-type extJSONObject struct {
- keys []string
- values []*extJSONValue
-}
-
-type extJSONParser struct {
- js *jsonScanner
- s jsonParseState
- m []jsonParseMode
- k string
- v *extJSONValue
-
- err error
- canonical bool
- depth int
- maxDepth int
-
- emptyObject bool
- relaxedUUID bool
-}
-
-// newExtJSONParser returns a new extended JSON parser, ready to to begin
-// parsing from the first character of the argued json input. It will not
-// perform any read-ahead and will therefore not report any errors about
-// malformed JSON at this point.
-func newExtJSONParser(r io.Reader, canonical bool) *extJSONParser {
- return &extJSONParser{
- js: &jsonScanner{r: r},
- s: jpsStartState,
- m: []jsonParseMode{},
- canonical: canonical,
- maxDepth: maxNestingDepth,
- }
-}
-
-// peekType examines the next value and returns its BSON Type
-func (ejp *extJSONParser) peekType() (bsontype.Type, error) {
- var t bsontype.Type
- var err error
- initialState := ejp.s
-
- ejp.advanceState()
- switch ejp.s {
- case jpsSawValue:
- t = ejp.v.t
- case jpsSawBeginArray:
- t = bsontype.Array
- case jpsInvalidState:
- err = ejp.err
- case jpsSawComma:
- // in array mode, seeing a comma means we need to progress again to actually observe a type
- if ejp.peekMode() == jpmArrayMode {
- return ejp.peekType()
- }
- case jpsSawEndArray:
- // this would only be a valid state if we were in array mode, so return end-of-array error
- err = ErrEOA
- case jpsSawBeginObject:
- // peek key to determine type
- ejp.advanceState()
- switch ejp.s {
- case jpsSawEndObject: // empty embedded document
- t = bsontype.EmbeddedDocument
- ejp.emptyObject = true
- case jpsInvalidState:
- err = ejp.err
- case jpsSawKey:
- if initialState == jpsStartState {
- return bsontype.EmbeddedDocument, nil
- }
- t = wrapperKeyBSONType(ejp.k)
-
- // if $uuid is encountered, parse as binary subtype 4
- if ejp.k == "$uuid" {
- ejp.relaxedUUID = true
- t = bsontype.Binary
- }
-
- switch t {
- case bsontype.JavaScript:
- // just saw $code, need to check for $scope at same level
- _, err = ejp.readValue(bsontype.JavaScript)
- if err != nil {
- break
- }
-
- switch ejp.s {
- case jpsSawEndObject: // type is TypeJavaScript
- case jpsSawComma:
- ejp.advanceState()
-
- if ejp.s == jpsSawKey && ejp.k == "$scope" {
- t = bsontype.CodeWithScope
- } else {
- err = fmt.Errorf("invalid extended JSON: unexpected key %s in CodeWithScope object", ejp.k)
- }
- case jpsInvalidState:
- err = ejp.err
- default:
- err = ErrInvalidJSON
- }
- case bsontype.CodeWithScope:
- err = errors.New("invalid extended JSON: code with $scope must contain $code before $scope")
- }
- }
- }
-
- return t, err
-}
-
-// readKey parses the next key and its type and returns them
-func (ejp *extJSONParser) readKey() (string, bsontype.Type, error) {
- if ejp.emptyObject {
- ejp.emptyObject = false
- return "", 0, ErrEOD
- }
-
- // advance to key (or return with error)
- switch ejp.s {
- case jpsStartState:
- ejp.advanceState()
- if ejp.s == jpsSawBeginObject {
- ejp.advanceState()
- }
- case jpsSawBeginObject:
- ejp.advanceState()
- case jpsSawValue, jpsSawEndObject, jpsSawEndArray:
- ejp.advanceState()
- switch ejp.s {
- case jpsSawBeginObject, jpsSawComma:
- ejp.advanceState()
- case jpsSawEndObject:
- return "", 0, ErrEOD
- case jpsDoneState:
- return "", 0, io.EOF
- case jpsInvalidState:
- return "", 0, ejp.err
- default:
- return "", 0, ErrInvalidJSON
- }
- case jpsSawKey: // do nothing (key was peeked before)
- default:
- return "", 0, invalidRequestError("key")
- }
-
- // read key
- var key string
-
- switch ejp.s {
- case jpsSawKey:
- key = ejp.k
- case jpsSawEndObject:
- return "", 0, ErrEOD
- case jpsInvalidState:
- return "", 0, ejp.err
- default:
- return "", 0, invalidRequestError("key")
- }
-
- // check for colon
- ejp.advanceState()
- if err := ensureColon(ejp.s, key); err != nil {
- return "", 0, err
- }
-
- // peek at the value to determine type
- t, err := ejp.peekType()
- if err != nil {
- return "", 0, err
- }
-
- return key, t, nil
-}
-
-// readValue returns the value corresponding to the Type returned by peekType
-func (ejp *extJSONParser) readValue(t bsontype.Type) (*extJSONValue, error) {
- if ejp.s == jpsInvalidState {
- return nil, ejp.err
- }
-
- var v *extJSONValue
-
- switch t {
- case bsontype.Null, bsontype.Boolean, bsontype.String:
- if ejp.s != jpsSawValue {
- return nil, invalidRequestError(t.String())
- }
- v = ejp.v
- case bsontype.Int32, bsontype.Int64, bsontype.Double:
- // relaxed version allows these to be literal number values
- if ejp.s == jpsSawValue {
- v = ejp.v
- break
- }
- fallthrough
- case bsontype.Decimal128, bsontype.Symbol, bsontype.ObjectID, bsontype.MinKey, bsontype.MaxKey, bsontype.Undefined:
- switch ejp.s {
- case jpsSawKey:
- // read colon
- ejp.advanceState()
- if err := ensureColon(ejp.s, ejp.k); err != nil {
- return nil, err
- }
-
- // read value
- ejp.advanceState()
- if ejp.s != jpsSawValue || !ejp.ensureExtValueType(t) {
- return nil, invalidJSONErrorForType("value", t)
- }
-
- v = ejp.v
-
- // read end object
- ejp.advanceState()
- if ejp.s != jpsSawEndObject {
- return nil, invalidJSONErrorForType("} after value", t)
- }
- default:
- return nil, invalidRequestError(t.String())
- }
- case bsontype.Binary, bsontype.Regex, bsontype.Timestamp, bsontype.DBPointer:
- if ejp.s != jpsSawKey {
- return nil, invalidRequestError(t.String())
- }
- // read colon
- ejp.advanceState()
- if err := ensureColon(ejp.s, ejp.k); err != nil {
- return nil, err
- }
-
- ejp.advanceState()
- if t == bsontype.Binary && ejp.s == jpsSawValue {
- // convert relaxed $uuid format
- if ejp.relaxedUUID {
- defer func() { ejp.relaxedUUID = false }()
- uuid, err := ejp.v.parseSymbol()
- if err != nil {
- return nil, err
- }
-
- // RFC 4122 defines the length of a UUID as 36 and the hyphens in a UUID as appearing
- // in the 8th, 13th, 18th, and 23rd characters.
- //
- // See https://tools.ietf.org/html/rfc4122#section-3
- valid := len(uuid) == 36 &&
- string(uuid[8]) == "-" &&
- string(uuid[13]) == "-" &&
- string(uuid[18]) == "-" &&
- string(uuid[23]) == "-"
- if !valid {
- return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens")
- }
-
- // remove hyphens
- uuidNoHyphens := strings.ReplaceAll(uuid, "-", "")
- if len(uuidNoHyphens) != 32 {
- return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens")
- }
-
- // convert hex to bytes
- bytes, err := hex.DecodeString(uuidNoHyphens)
- if err != nil {
- return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding hex bytes: %w", err)
- }
-
- ejp.advanceState()
- if ejp.s != jpsSawEndObject {
- return nil, invalidJSONErrorForType("$uuid and value and then }", bsontype.Binary)
- }
-
- base64 := &extJSONValue{
- t: bsontype.String,
- v: base64.StdEncoding.EncodeToString(bytes),
- }
- subType := &extJSONValue{
- t: bsontype.String,
- v: "04",
- }
-
- v = &extJSONValue{
- t: bsontype.EmbeddedDocument,
- v: &extJSONObject{
- keys: []string{"base64", "subType"},
- values: []*extJSONValue{base64, subType},
- },
- }
-
- break
- }
-
- // convert legacy $binary format
- base64 := ejp.v
-
- ejp.advanceState()
- if ejp.s != jpsSawComma {
- return nil, invalidJSONErrorForType(",", bsontype.Binary)
- }
-
- ejp.advanceState()
- key, t, err := ejp.readKey()
- if err != nil {
- return nil, err
- }
- if key != "$type" {
- return nil, invalidJSONErrorForType("$type", bsontype.Binary)
- }
-
- subType, err := ejp.readValue(t)
- if err != nil {
- return nil, err
- }
-
- ejp.advanceState()
- if ejp.s != jpsSawEndObject {
- return nil, invalidJSONErrorForType("2 key-value pairs and then }", bsontype.Binary)
- }
-
- v = &extJSONValue{
- t: bsontype.EmbeddedDocument,
- v: &extJSONObject{
- keys: []string{"base64", "subType"},
- values: []*extJSONValue{base64, subType},
- },
- }
- break
- }
-
- // read KV pairs
- if ejp.s != jpsSawBeginObject {
- return nil, invalidJSONErrorForType("{", t)
- }
-
- keys, vals, err := ejp.readObject(2, true)
- if err != nil {
- return nil, err
- }
-
- ejp.advanceState()
- if ejp.s != jpsSawEndObject {
- return nil, invalidJSONErrorForType("2 key-value pairs and then }", t)
- }
-
- v = &extJSONValue{t: bsontype.EmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}}
-
- case bsontype.DateTime:
- switch ejp.s {
- case jpsSawValue:
- v = ejp.v
- case jpsSawKey:
- // read colon
- ejp.advanceState()
- if err := ensureColon(ejp.s, ejp.k); err != nil {
- return nil, err
- }
-
- ejp.advanceState()
- switch ejp.s {
- case jpsSawBeginObject:
- keys, vals, err := ejp.readObject(1, true)
- if err != nil {
- return nil, err
- }
- v = &extJSONValue{t: bsontype.EmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}}
- case jpsSawValue:
- if ejp.canonical {
- return nil, invalidJSONError("{")
- }
- v = ejp.v
- default:
- if ejp.canonical {
- return nil, invalidJSONErrorForType("object", t)
- }
- return nil, invalidJSONErrorForType("ISO-8601 Internet Date/Time Format as described in RFC-3339", t)
- }
-
- ejp.advanceState()
- if ejp.s != jpsSawEndObject {
- return nil, invalidJSONErrorForType("value and then }", t)
- }
- default:
- return nil, invalidRequestError(t.String())
- }
- case bsontype.JavaScript:
- switch ejp.s {
- case jpsSawKey:
- // read colon
- ejp.advanceState()
- if err := ensureColon(ejp.s, ejp.k); err != nil {
- return nil, err
- }
-
- // read value
- ejp.advanceState()
- if ejp.s != jpsSawValue {
- return nil, invalidJSONErrorForType("value", t)
- }
- v = ejp.v
-
- // read end object or comma and just return
- ejp.advanceState()
- case jpsSawEndObject:
- v = ejp.v
- default:
- return nil, invalidRequestError(t.String())
- }
- case bsontype.CodeWithScope:
- if ejp.s == jpsSawKey && ejp.k == "$scope" {
- v = ejp.v // this is the $code string from earlier
-
- // read colon
- ejp.advanceState()
- if err := ensureColon(ejp.s, ejp.k); err != nil {
- return nil, err
- }
-
- // read {
- ejp.advanceState()
- if ejp.s != jpsSawBeginObject {
- return nil, invalidJSONError("$scope to be embedded document")
- }
- } else {
- return nil, invalidRequestError(t.String())
- }
- case bsontype.EmbeddedDocument, bsontype.Array:
- return nil, invalidRequestError(t.String())
- }
-
- return v, nil
-}
-
-// readObject is a utility method for reading full objects of known (or expected) size
-// it is useful for extended JSON types such as binary, datetime, regex, and timestamp
-func (ejp *extJSONParser) readObject(numKeys int, started bool) ([]string, []*extJSONValue, error) {
- keys := make([]string, numKeys)
- vals := make([]*extJSONValue, numKeys)
-
- if !started {
- ejp.advanceState()
- if ejp.s != jpsSawBeginObject {
- return nil, nil, invalidJSONError("{")
- }
- }
-
- for i := 0; i < numKeys; i++ {
- key, t, err := ejp.readKey()
- if err != nil {
- return nil, nil, err
- }
-
- switch ejp.s {
- case jpsSawKey:
- v, err := ejp.readValue(t)
- if err != nil {
- return nil, nil, err
- }
-
- keys[i] = key
- vals[i] = v
- case jpsSawValue:
- keys[i] = key
- vals[i] = ejp.v
- default:
- return nil, nil, invalidJSONError("value")
- }
- }
-
- ejp.advanceState()
- if ejp.s != jpsSawEndObject {
- return nil, nil, invalidJSONError("}")
- }
-
- return keys, vals, nil
-}
-
-// advanceState reads the next JSON token from the scanner and transitions
-// from the current state based on that token's type
-func (ejp *extJSONParser) advanceState() {
- if ejp.s == jpsDoneState || ejp.s == jpsInvalidState {
- return
- }
-
- jt, err := ejp.js.nextToken()
-
- if err != nil {
- ejp.err = err
- ejp.s = jpsInvalidState
- return
- }
-
- valid := ejp.validateToken(jt.t)
- if !valid {
- ejp.err = unexpectedTokenError(jt)
- ejp.s = jpsInvalidState
- return
- }
-
- switch jt.t {
- case jttBeginObject:
- ejp.s = jpsSawBeginObject
- ejp.pushMode(jpmObjectMode)
- ejp.depth++
-
- if ejp.depth > ejp.maxDepth {
- ejp.err = nestingDepthError(jt.p, ejp.depth)
- ejp.s = jpsInvalidState
- }
- case jttEndObject:
- ejp.s = jpsSawEndObject
- ejp.depth--
-
- if ejp.popMode() != jpmObjectMode {
- ejp.err = unexpectedTokenError(jt)
- ejp.s = jpsInvalidState
- }
- case jttBeginArray:
- ejp.s = jpsSawBeginArray
- ejp.pushMode(jpmArrayMode)
- case jttEndArray:
- ejp.s = jpsSawEndArray
-
- if ejp.popMode() != jpmArrayMode {
- ejp.err = unexpectedTokenError(jt)
- ejp.s = jpsInvalidState
- }
- case jttColon:
- ejp.s = jpsSawColon
- case jttComma:
- ejp.s = jpsSawComma
- case jttEOF:
- ejp.s = jpsDoneState
- if len(ejp.m) != 0 {
- ejp.err = unexpectedTokenError(jt)
- ejp.s = jpsInvalidState
- }
- case jttString:
- switch ejp.s {
- case jpsSawComma:
- if ejp.peekMode() == jpmArrayMode {
- ejp.s = jpsSawValue
- ejp.v = extendJSONToken(jt)
- return
- }
- fallthrough
- case jpsSawBeginObject:
- ejp.s = jpsSawKey
- ejp.k = jt.v.(string)
- return
- }
- fallthrough
- default:
- ejp.s = jpsSawValue
- ejp.v = extendJSONToken(jt)
- }
-}
-
-var jpsValidTransitionTokens = map[jsonParseState]map[jsonTokenType]bool{
- jpsStartState: {
- jttBeginObject: true,
- jttBeginArray: true,
- jttInt32: true,
- jttInt64: true,
- jttDouble: true,
- jttString: true,
- jttBool: true,
- jttNull: true,
- jttEOF: true,
- },
- jpsSawBeginObject: {
- jttEndObject: true,
- jttString: true,
- },
- jpsSawEndObject: {
- jttEndObject: true,
- jttEndArray: true,
- jttComma: true,
- jttEOF: true,
- },
- jpsSawBeginArray: {
- jttBeginObject: true,
- jttBeginArray: true,
- jttEndArray: true,
- jttInt32: true,
- jttInt64: true,
- jttDouble: true,
- jttString: true,
- jttBool: true,
- jttNull: true,
- },
- jpsSawEndArray: {
- jttEndObject: true,
- jttEndArray: true,
- jttComma: true,
- jttEOF: true,
- },
- jpsSawColon: {
- jttBeginObject: true,
- jttBeginArray: true,
- jttInt32: true,
- jttInt64: true,
- jttDouble: true,
- jttString: true,
- jttBool: true,
- jttNull: true,
- },
- jpsSawComma: {
- jttBeginObject: true,
- jttBeginArray: true,
- jttInt32: true,
- jttInt64: true,
- jttDouble: true,
- jttString: true,
- jttBool: true,
- jttNull: true,
- },
- jpsSawKey: {
- jttColon: true,
- },
- jpsSawValue: {
- jttEndObject: true,
- jttEndArray: true,
- jttComma: true,
- jttEOF: true,
- },
- jpsDoneState: {},
- jpsInvalidState: {},
-}
-
-func (ejp *extJSONParser) validateToken(jtt jsonTokenType) bool {
- switch ejp.s {
- case jpsSawEndObject:
- // if we are at depth zero and the next token is a '{',
- // we can consider it valid only if we are not in array mode.
- if jtt == jttBeginObject && ejp.depth == 0 {
- return ejp.peekMode() != jpmArrayMode
- }
- case jpsSawComma:
- switch ejp.peekMode() {
- // the only valid next token after a comma inside a document is a string (a key)
- case jpmObjectMode:
- return jtt == jttString
- case jpmInvalidMode:
- return false
- }
- }
-
- _, ok := jpsValidTransitionTokens[ejp.s][jtt]
- return ok
-}
-
-// ensureExtValueType returns true if the current value has the expected
-// value type for single-key extended JSON types. For example,
-// {"$numberInt": v} v must be TypeString
-func (ejp *extJSONParser) ensureExtValueType(t bsontype.Type) bool {
- switch t {
- case bsontype.MinKey, bsontype.MaxKey:
- return ejp.v.t == bsontype.Int32
- case bsontype.Undefined:
- return ejp.v.t == bsontype.Boolean
- case bsontype.Int32, bsontype.Int64, bsontype.Double, bsontype.Decimal128, bsontype.Symbol, bsontype.ObjectID:
- return ejp.v.t == bsontype.String
- default:
- return false
- }
-}
-
-func (ejp *extJSONParser) pushMode(m jsonParseMode) {
- ejp.m = append(ejp.m, m)
-}
-
-func (ejp *extJSONParser) popMode() jsonParseMode {
- l := len(ejp.m)
- if l == 0 {
- return jpmInvalidMode
- }
-
- m := ejp.m[l-1]
- ejp.m = ejp.m[:l-1]
-
- return m
-}
-
-func (ejp *extJSONParser) peekMode() jsonParseMode {
- l := len(ejp.m)
- if l == 0 {
- return jpmInvalidMode
- }
-
- return ejp.m[l-1]
-}
-
-func extendJSONToken(jt *jsonToken) *extJSONValue {
- var t bsontype.Type
-
- switch jt.t {
- case jttInt32:
- t = bsontype.Int32
- case jttInt64:
- t = bsontype.Int64
- case jttDouble:
- t = bsontype.Double
- case jttString:
- t = bsontype.String
- case jttBool:
- t = bsontype.Boolean
- case jttNull:
- t = bsontype.Null
- default:
- return nil
- }
-
- return &extJSONValue{t: t, v: jt.v}
-}
-
-func ensureColon(s jsonParseState, key string) error {
- if s != jpsSawColon {
- return fmt.Errorf("invalid JSON input: missing colon after key \"%s\"", key)
- }
-
- return nil
-}
-
-func invalidRequestError(s string) error {
- return fmt.Errorf("invalid request to read %s", s)
-}
-
-func invalidJSONError(expected string) error {
- return fmt.Errorf("invalid JSON input; expected %s", expected)
-}
-
-func invalidJSONErrorForType(expected string, t bsontype.Type) error {
- return fmt.Errorf("invalid JSON input; expected %s for %s", expected, t)
-}
-
-func unexpectedTokenError(jt *jsonToken) error {
- switch jt.t {
- case jttInt32, jttInt64, jttDouble:
- return fmt.Errorf("invalid JSON input; unexpected number (%v) at position %d", jt.v, jt.p)
- case jttString:
- return fmt.Errorf("invalid JSON input; unexpected string (\"%v\") at position %d", jt.v, jt.p)
- case jttBool:
- return fmt.Errorf("invalid JSON input; unexpected boolean literal (%v) at position %d", jt.v, jt.p)
- case jttNull:
- return fmt.Errorf("invalid JSON input; unexpected null literal at position %d", jt.p)
- case jttEOF:
- return fmt.Errorf("invalid JSON input; unexpected end of input at position %d", jt.p)
- default:
- return fmt.Errorf("invalid JSON input; unexpected %c at position %d", jt.v.(byte), jt.p)
- }
-}
-
-func nestingDepthError(p, depth int) error {
- return fmt.Errorf("invalid JSON input; nesting too deep (%d levels) at position %d", depth, p)
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go
deleted file mode 100644
index 59ddfc4485..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go
+++ /dev/null
@@ -1,653 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-
-package bsonrw
-
-import (
- "errors"
- "fmt"
- "io"
- "sync"
-
- "go.mongodb.org/mongo-driver/bson/bsontype"
- "go.mongodb.org/mongo-driver/bson/primitive"
-)
-
-// ExtJSONValueReaderPool is a pool for ValueReaders that read ExtJSON.
-//
-// Deprecated: ExtJSONValueReaderPool will not be supported in Go Driver 2.0.
-type ExtJSONValueReaderPool struct {
- pool sync.Pool
-}
-
-// NewExtJSONValueReaderPool instantiates a new ExtJSONValueReaderPool.
-//
-// Deprecated: ExtJSONValueReaderPool will not be supported in Go Driver 2.0.
-func NewExtJSONValueReaderPool() *ExtJSONValueReaderPool {
- return &ExtJSONValueReaderPool{
- pool: sync.Pool{
- New: func() interface{} {
- return new(extJSONValueReader)
- },
- },
- }
-}
-
-// Get retrieves a ValueReader from the pool and uses src as the underlying ExtJSON.
-//
-// Deprecated: ExtJSONValueReaderPool will not be supported in Go Driver 2.0.
-func (bvrp *ExtJSONValueReaderPool) Get(r io.Reader, canonical bool) (ValueReader, error) {
- vr := bvrp.pool.Get().(*extJSONValueReader)
- return vr.reset(r, canonical)
-}
-
-// Put inserts a ValueReader into the pool. If the ValueReader is not a ExtJSON ValueReader nothing
-// is inserted into the pool and ok will be false.
-//
-// Deprecated: ExtJSONValueReaderPool will not be supported in Go Driver 2.0.
-func (bvrp *ExtJSONValueReaderPool) Put(vr ValueReader) (ok bool) {
- bvr, ok := vr.(*extJSONValueReader)
- if !ok {
- return false
- }
-
- bvr, _ = bvr.reset(nil, false)
- bvrp.pool.Put(bvr)
- return true
-}
-
-type ejvrState struct {
- mode mode
- vType bsontype.Type
- depth int
-}
-
-// extJSONValueReader is for reading extended JSON.
-type extJSONValueReader struct {
- p *extJSONParser
-
- stack []ejvrState
- frame int
-}
-
-// NewExtJSONValueReader creates a new ValueReader from a given io.Reader
-// It will interpret the JSON of r as canonical or relaxed according to the
-// given canonical flag
-func NewExtJSONValueReader(r io.Reader, canonical bool) (ValueReader, error) {
- return newExtJSONValueReader(r, canonical)
-}
-
-func newExtJSONValueReader(r io.Reader, canonical bool) (*extJSONValueReader, error) {
- ejvr := new(extJSONValueReader)
- return ejvr.reset(r, canonical)
-}
-
-func (ejvr *extJSONValueReader) reset(r io.Reader, canonical bool) (*extJSONValueReader, error) {
- p := newExtJSONParser(r, canonical)
- typ, err := p.peekType()
-
- if err != nil {
- return nil, ErrInvalidJSON
- }
-
- var m mode
- switch typ {
- case bsontype.EmbeddedDocument:
- m = mTopLevel
- case bsontype.Array:
- m = mArray
- default:
- m = mValue
- }
-
- stack := make([]ejvrState, 1, 5)
- stack[0] = ejvrState{
- mode: m,
- vType: typ,
- }
- return &extJSONValueReader{
- p: p,
- stack: stack,
- }, nil
-}
-
-func (ejvr *extJSONValueReader) advanceFrame() {
- if ejvr.frame+1 >= len(ejvr.stack) { // We need to grow the stack
- length := len(ejvr.stack)
- if length+1 >= cap(ejvr.stack) {
- // double it
- buf := make([]ejvrState, 2*cap(ejvr.stack)+1)
- copy(buf, ejvr.stack)
- ejvr.stack = buf
- }
- ejvr.stack = ejvr.stack[:length+1]
- }
- ejvr.frame++
-
- // Clean the stack
- ejvr.stack[ejvr.frame].mode = 0
- ejvr.stack[ejvr.frame].vType = 0
- ejvr.stack[ejvr.frame].depth = 0
-}
-
-func (ejvr *extJSONValueReader) pushDocument() {
- ejvr.advanceFrame()
-
- ejvr.stack[ejvr.frame].mode = mDocument
- ejvr.stack[ejvr.frame].depth = ejvr.p.depth
-}
-
-func (ejvr *extJSONValueReader) pushCodeWithScope() {
- ejvr.advanceFrame()
-
- ejvr.stack[ejvr.frame].mode = mCodeWithScope
-}
-
-func (ejvr *extJSONValueReader) pushArray() {
- ejvr.advanceFrame()
-
- ejvr.stack[ejvr.frame].mode = mArray
-}
-
-func (ejvr *extJSONValueReader) push(m mode, t bsontype.Type) {
- ejvr.advanceFrame()
-
- ejvr.stack[ejvr.frame].mode = m
- ejvr.stack[ejvr.frame].vType = t
-}
-
-func (ejvr *extJSONValueReader) pop() {
- switch ejvr.stack[ejvr.frame].mode {
- case mElement, mValue:
- ejvr.frame--
- case mDocument, mArray, mCodeWithScope:
- ejvr.frame -= 2 // we pop twice to jump over the vrElement: vrDocument -> vrElement -> vrDocument/TopLevel/etc...
- }
-}
-
-func (ejvr *extJSONValueReader) skipObject() {
- // read entire object until depth returns to 0 (last ending } or ] seen)
- depth := 1
- for depth > 0 {
- ejvr.p.advanceState()
-
- // If object is empty, raise depth and continue. When emptyObject is true, the
- // parser has already read both the opening and closing brackets of an empty
- // object ("{}"), so the next valid token will be part of the parent document,
- // not part of the nested document.
- //
- // If there is a comma, there are remaining fields, emptyObject must be set back
- // to false, and comma must be skipped with advanceState().
- if ejvr.p.emptyObject {
- if ejvr.p.s == jpsSawComma {
- ejvr.p.emptyObject = false
- ejvr.p.advanceState()
- }
- depth--
- continue
- }
-
- switch ejvr.p.s {
- case jpsSawBeginObject, jpsSawBeginArray:
- depth++
- case jpsSawEndObject, jpsSawEndArray:
- depth--
- }
- }
-}
-
-func (ejvr *extJSONValueReader) invalidTransitionErr(destination mode, name string, modes []mode) error {
- te := TransitionError{
- name: name,
- current: ejvr.stack[ejvr.frame].mode,
- destination: destination,
- modes: modes,
- action: "read",
- }
- if ejvr.frame != 0 {
- te.parent = ejvr.stack[ejvr.frame-1].mode
- }
- return te
-}
-
-func (ejvr *extJSONValueReader) typeError(t bsontype.Type) error {
- return fmt.Errorf("positioned on %s, but attempted to read %s", ejvr.stack[ejvr.frame].vType, t)
-}
-
-func (ejvr *extJSONValueReader) ensureElementValue(t bsontype.Type, destination mode, callerName string, addModes ...mode) error {
- switch ejvr.stack[ejvr.frame].mode {
- case mElement, mValue:
- if ejvr.stack[ejvr.frame].vType != t {
- return ejvr.typeError(t)
- }
- default:
- modes := []mode{mElement, mValue}
- if addModes != nil {
- modes = append(modes, addModes...)
- }
- return ejvr.invalidTransitionErr(destination, callerName, modes)
- }
-
- return nil
-}
-
-func (ejvr *extJSONValueReader) Type() bsontype.Type {
- return ejvr.stack[ejvr.frame].vType
-}
-
-func (ejvr *extJSONValueReader) Skip() error {
- switch ejvr.stack[ejvr.frame].mode {
- case mElement, mValue:
- default:
- return ejvr.invalidTransitionErr(0, "Skip", []mode{mElement, mValue})
- }
-
- defer ejvr.pop()
-
- t := ejvr.stack[ejvr.frame].vType
- switch t {
- case bsontype.Array, bsontype.EmbeddedDocument, bsontype.CodeWithScope:
- // read entire array, doc or CodeWithScope
- ejvr.skipObject()
- default:
- _, err := ejvr.p.readValue(t)
- if err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func (ejvr *extJSONValueReader) ReadArray() (ArrayReader, error) {
- switch ejvr.stack[ejvr.frame].mode {
- case mTopLevel: // allow reading array from top level
- case mArray:
- return ejvr, nil
- default:
- if err := ejvr.ensureElementValue(bsontype.Array, mArray, "ReadArray", mTopLevel, mArray); err != nil {
- return nil, err
- }
- }
-
- ejvr.pushArray()
-
- return ejvr, nil
-}
-
-func (ejvr *extJSONValueReader) ReadBinary() (b []byte, btype byte, err error) {
- if err := ejvr.ensureElementValue(bsontype.Binary, 0, "ReadBinary"); err != nil {
- return nil, 0, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Binary)
- if err != nil {
- return nil, 0, err
- }
-
- b, btype, err = v.parseBinary()
-
- ejvr.pop()
- return b, btype, err
-}
-
-func (ejvr *extJSONValueReader) ReadBoolean() (bool, error) {
- if err := ejvr.ensureElementValue(bsontype.Boolean, 0, "ReadBoolean"); err != nil {
- return false, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Boolean)
- if err != nil {
- return false, err
- }
-
- if v.t != bsontype.Boolean {
- return false, fmt.Errorf("expected type bool, but got type %s", v.t)
- }
-
- ejvr.pop()
- return v.v.(bool), nil
-}
-
-func (ejvr *extJSONValueReader) ReadDocument() (DocumentReader, error) {
- switch ejvr.stack[ejvr.frame].mode {
- case mTopLevel:
- return ejvr, nil
- case mElement, mValue:
- if ejvr.stack[ejvr.frame].vType != bsontype.EmbeddedDocument {
- return nil, ejvr.typeError(bsontype.EmbeddedDocument)
- }
-
- ejvr.pushDocument()
- return ejvr, nil
- default:
- return nil, ejvr.invalidTransitionErr(mDocument, "ReadDocument", []mode{mTopLevel, mElement, mValue})
- }
-}
-
-func (ejvr *extJSONValueReader) ReadCodeWithScope() (code string, dr DocumentReader, err error) {
- if err = ejvr.ensureElementValue(bsontype.CodeWithScope, 0, "ReadCodeWithScope"); err != nil {
- return "", nil, err
- }
-
- v, err := ejvr.p.readValue(bsontype.CodeWithScope)
- if err != nil {
- return "", nil, err
- }
-
- code, err = v.parseJavascript()
-
- ejvr.pushCodeWithScope()
- return code, ejvr, err
-}
-
-func (ejvr *extJSONValueReader) ReadDBPointer() (ns string, oid primitive.ObjectID, err error) {
- if err = ejvr.ensureElementValue(bsontype.DBPointer, 0, "ReadDBPointer"); err != nil {
- return "", primitive.NilObjectID, err
- }
-
- v, err := ejvr.p.readValue(bsontype.DBPointer)
- if err != nil {
- return "", primitive.NilObjectID, err
- }
-
- ns, oid, err = v.parseDBPointer()
-
- ejvr.pop()
- return ns, oid, err
-}
-
-func (ejvr *extJSONValueReader) ReadDateTime() (int64, error) {
- if err := ejvr.ensureElementValue(bsontype.DateTime, 0, "ReadDateTime"); err != nil {
- return 0, err
- }
-
- v, err := ejvr.p.readValue(bsontype.DateTime)
- if err != nil {
- return 0, err
- }
-
- d, err := v.parseDateTime()
-
- ejvr.pop()
- return d, err
-}
-
-func (ejvr *extJSONValueReader) ReadDecimal128() (primitive.Decimal128, error) {
- if err := ejvr.ensureElementValue(bsontype.Decimal128, 0, "ReadDecimal128"); err != nil {
- return primitive.Decimal128{}, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Decimal128)
- if err != nil {
- return primitive.Decimal128{}, err
- }
-
- d, err := v.parseDecimal128()
-
- ejvr.pop()
- return d, err
-}
-
-func (ejvr *extJSONValueReader) ReadDouble() (float64, error) {
- if err := ejvr.ensureElementValue(bsontype.Double, 0, "ReadDouble"); err != nil {
- return 0, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Double)
- if err != nil {
- return 0, err
- }
-
- d, err := v.parseDouble()
-
- ejvr.pop()
- return d, err
-}
-
-func (ejvr *extJSONValueReader) ReadInt32() (int32, error) {
- if err := ejvr.ensureElementValue(bsontype.Int32, 0, "ReadInt32"); err != nil {
- return 0, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Int32)
- if err != nil {
- return 0, err
- }
-
- i, err := v.parseInt32()
-
- ejvr.pop()
- return i, err
-}
-
-func (ejvr *extJSONValueReader) ReadInt64() (int64, error) {
- if err := ejvr.ensureElementValue(bsontype.Int64, 0, "ReadInt64"); err != nil {
- return 0, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Int64)
- if err != nil {
- return 0, err
- }
-
- i, err := v.parseInt64()
-
- ejvr.pop()
- return i, err
-}
-
-func (ejvr *extJSONValueReader) ReadJavascript() (code string, err error) {
- if err = ejvr.ensureElementValue(bsontype.JavaScript, 0, "ReadJavascript"); err != nil {
- return "", err
- }
-
- v, err := ejvr.p.readValue(bsontype.JavaScript)
- if err != nil {
- return "", err
- }
-
- code, err = v.parseJavascript()
-
- ejvr.pop()
- return code, err
-}
-
-func (ejvr *extJSONValueReader) ReadMaxKey() error {
- if err := ejvr.ensureElementValue(bsontype.MaxKey, 0, "ReadMaxKey"); err != nil {
- return err
- }
-
- v, err := ejvr.p.readValue(bsontype.MaxKey)
- if err != nil {
- return err
- }
-
- err = v.parseMinMaxKey("max")
-
- ejvr.pop()
- return err
-}
-
-func (ejvr *extJSONValueReader) ReadMinKey() error {
- if err := ejvr.ensureElementValue(bsontype.MinKey, 0, "ReadMinKey"); err != nil {
- return err
- }
-
- v, err := ejvr.p.readValue(bsontype.MinKey)
- if err != nil {
- return err
- }
-
- err = v.parseMinMaxKey("min")
-
- ejvr.pop()
- return err
-}
-
-func (ejvr *extJSONValueReader) ReadNull() error {
- if err := ejvr.ensureElementValue(bsontype.Null, 0, "ReadNull"); err != nil {
- return err
- }
-
- v, err := ejvr.p.readValue(bsontype.Null)
- if err != nil {
- return err
- }
-
- if v.t != bsontype.Null {
- return fmt.Errorf("expected type null but got type %s", v.t)
- }
-
- ejvr.pop()
- return nil
-}
-
-func (ejvr *extJSONValueReader) ReadObjectID() (primitive.ObjectID, error) {
- if err := ejvr.ensureElementValue(bsontype.ObjectID, 0, "ReadObjectID"); err != nil {
- return primitive.ObjectID{}, err
- }
-
- v, err := ejvr.p.readValue(bsontype.ObjectID)
- if err != nil {
- return primitive.ObjectID{}, err
- }
-
- oid, err := v.parseObjectID()
-
- ejvr.pop()
- return oid, err
-}
-
-func (ejvr *extJSONValueReader) ReadRegex() (pattern string, options string, err error) {
- if err = ejvr.ensureElementValue(bsontype.Regex, 0, "ReadRegex"); err != nil {
- return "", "", err
- }
-
- v, err := ejvr.p.readValue(bsontype.Regex)
- if err != nil {
- return "", "", err
- }
-
- pattern, options, err = v.parseRegex()
-
- ejvr.pop()
- return pattern, options, err
-}
-
-func (ejvr *extJSONValueReader) ReadString() (string, error) {
- if err := ejvr.ensureElementValue(bsontype.String, 0, "ReadString"); err != nil {
- return "", err
- }
-
- v, err := ejvr.p.readValue(bsontype.String)
- if err != nil {
- return "", err
- }
-
- if v.t != bsontype.String {
- return "", fmt.Errorf("expected type string but got type %s", v.t)
- }
-
- ejvr.pop()
- return v.v.(string), nil
-}
-
-func (ejvr *extJSONValueReader) ReadSymbol() (symbol string, err error) {
- if err = ejvr.ensureElementValue(bsontype.Symbol, 0, "ReadSymbol"); err != nil {
- return "", err
- }
-
- v, err := ejvr.p.readValue(bsontype.Symbol)
- if err != nil {
- return "", err
- }
-
- symbol, err = v.parseSymbol()
-
- ejvr.pop()
- return symbol, err
-}
-
-func (ejvr *extJSONValueReader) ReadTimestamp() (t uint32, i uint32, err error) {
- if err = ejvr.ensureElementValue(bsontype.Timestamp, 0, "ReadTimestamp"); err != nil {
- return 0, 0, err
- }
-
- v, err := ejvr.p.readValue(bsontype.Timestamp)
- if err != nil {
- return 0, 0, err
- }
-
- t, i, err = v.parseTimestamp()
-
- ejvr.pop()
- return t, i, err
-}
-
-func (ejvr *extJSONValueReader) ReadUndefined() error {
- if err := ejvr.ensureElementValue(bsontype.Undefined, 0, "ReadUndefined"); err != nil {
- return err
- }
-
- v, err := ejvr.p.readValue(bsontype.Undefined)
- if err != nil {
- return err
- }
-
- err = v.parseUndefined()
-
- ejvr.pop()
- return err
-}
-
-func (ejvr *extJSONValueReader) ReadElement() (string, ValueReader, error) {
- switch ejvr.stack[ejvr.frame].mode {
- case mTopLevel, mDocument, mCodeWithScope:
- default:
- return "", nil, ejvr.invalidTransitionErr(mElement, "ReadElement", []mode{mTopLevel, mDocument, mCodeWithScope})
- }
-
- name, t, err := ejvr.p.readKey()
-
- if err != nil {
- if errors.Is(err, ErrEOD) {
- if ejvr.stack[ejvr.frame].mode == mCodeWithScope {
- _, err := ejvr.p.peekType()
- if err != nil {
- return "", nil, err
- }
- }
-
- ejvr.pop()
- }
-
- return "", nil, err
- }
-
- ejvr.push(mElement, t)
- return name, ejvr, nil
-}
-
-func (ejvr *extJSONValueReader) ReadValue() (ValueReader, error) {
- switch ejvr.stack[ejvr.frame].mode {
- case mArray:
- default:
- return nil, ejvr.invalidTransitionErr(mValue, "ReadValue", []mode{mArray})
- }
-
- t, err := ejvr.p.peekType()
- if err != nil {
- if errors.Is(err, ErrEOA) {
- ejvr.pop()
- }
-
- return nil, err
- }
-
- ejvr.push(mValue, t)
- return ejvr, nil
-}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go
deleted file mode 100644
index ba39c9601f..0000000000
--- a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go
+++ /dev/null
@@ -1,223 +0,0 @@
-// Copyright (C) MongoDB, Inc. 2017-present.
-//
-// Licensed under the Apache License, Version 2.0 (the "License"); you may
-// not use this file except in compliance with the License. You may obtain
-// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
-//
-// Based on github.com/golang/go by The Go Authors
-// See THIRD-PARTY-NOTICES for original license terms.
-
-package bsonrw
-
-import "unicode/utf8"
-
-// safeSet holds the value true if the ASCII character with the given array
-// position can be represented inside a JSON string without any further
-// escaping.
-//
-// All values are true except for the ASCII control characters (0-31), the
-// double quote ("), and the backslash character ("\").
-var safeSet = [utf8.RuneSelf]bool{
- ' ': true,
- '!': true,
- '"': false,
- '#': true,
- '$': true,
- '%': true,
- '&': true,
- '\'': true,
- '(': true,
- ')': true,
- '*': true,
- '+': true,
- ',': true,
- '-': true,
- '.': true,
- '/': true,
- '0': true,
- '1': true,
- '2': true,
- '3': true,
- '4': true,
- '5': true,
- '6': true,
- '7': true,
- '8': true,
- '9': true,
- ':': true,
- ';': true,
- '<': true,
- '=': true,
- '>': true,
- '?': true,
- '@': true,
- 'A': true,
- 'B': true,
- 'C': true,
- 'D': true,
- 'E': true,
- 'F': true,
- 'G': true,
- 'H': true,
- 'I': true,
- 'J': true,
- 'K': true,
- 'L': true,
- 'M': true,
- 'N': true,
- 'O': true,
- 'P': true,
- 'Q': true,
- 'R': true,
- 'S': true,
- 'T': true,
- 'U': true,
- 'V': true,
- 'W': true,
- 'X': true,
- 'Y': true,
- 'Z': true,
- '[': true,
- '\\': false,
- ']': true,
- '^': true,
- '_': true,
- '`': true,
- 'a': true,
- 'b': true,
- 'c': true,
- 'd': true,
- 'e': true,
- 'f': true,
- 'g': true,
- 'h': true,
- 'i': true,
- 'j': true,
- 'k': true,
- 'l': true,
- 'm': true,
- 'n': true,
- 'o': true,
- 'p': true,
- 'q': true,
- 'r': true,
- 's': true,
- 't': true,
- 'u': true,
- 'v': true,
- 'w': true,
- 'x': true,
- 'y': true,
- 'z': true,
- '{': true,
- '|': true,
- '}': true,
- '~': true,
- '\u007f': true,
-}
-
-// htmlSafeSet holds the value true if the ASCII character with the given
-// array position can be safely represented inside a JSON string, embedded
-// inside of HTML