mirror of https://github.com/knative/client.git
Remove replace of go-openapi/spec from go.mod (#1443)
This commit is contained in:
parent
7e90dca8f3
commit
817068bd7f
|
|
@ -30,6 +30,7 @@ kn broker describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn broker list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ kn channel describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ kn channel list-types
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn channel list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ kn domain describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn domain list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ kn revision describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -37,6 +37,7 @@ kn revision list
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
-s, --service string Service name
|
-s, --service string Service name
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ kn route describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -29,6 +29,7 @@ kn route list NAME
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ kn service describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file|url.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--target string Work on local directory instead of a remote cluster (experimental)
|
--target string Work on local directory instead of a remote cluster (experimental)
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
|
|
|
||||||
|
|
@ -31,6 +31,7 @@ kn service export NAME
|
||||||
--mode string Format for exporting all routed revisions. One of replay|export (experimental)
|
--mode string Format for exporting all routed revisions. One of replay|export (experimental)
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
--with-revisions Export all routed revisions (experimental)
|
--with-revisions Export all routed revisions (experimental)
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@ kn service list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--target string Work on local directory instead of a remote cluster (experimental)
|
--target string Work on local directory instead of a remote cluster (experimental)
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ kn source apiserver describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn source apiserver list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ kn source binding describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn source binding list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn source container list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ kn source list-types
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,6 +29,7 @@ kn source list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-t, --type strings Filter list on given source type. This flag can be given multiple times.
|
-t, --type strings Filter list on given source type. This flag can be given multiple times.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ kn source ping describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn source ping list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ kn subscription describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn subscription list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ kn trigger describe NAME
|
||||||
-h, --help help for describe
|
-h, --help help for describe
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
-v, --verbose More output.
|
-v, --verbose More output.
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ kn trigger list
|
||||||
-n, --namespace string Specify the namespace to operate in.
|
-n, --namespace string Specify the namespace to operate in.
|
||||||
--no-headers When using the default output format, don't print headers (default: print headers).
|
--no-headers When using the default output format, don't print headers (default: print headers).
|
||||||
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
-o, --output string Output format. One of: json|yaml|name|go-template|go-template-file|template|templatefile|jsonpath|jsonpath-as-json|jsonpath-file.
|
||||||
|
--show-managed-fields If true, keep the managedFields when printing objects in JSON or YAML format.
|
||||||
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
--template string Template string or path to template file to use when -o=go-template, -o=go-template-file. The template format is golang templates [http://golang.org/pkg/text/template/#pkg-overview].
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
4
go.mod
4
go.mod
|
|
@ -15,7 +15,7 @@ require (
|
||||||
k8s.io/api v0.21.4
|
k8s.io/api v0.21.4
|
||||||
k8s.io/apiextensions-apiserver v0.21.4
|
k8s.io/apiextensions-apiserver v0.21.4
|
||||||
k8s.io/apimachinery v0.21.4
|
k8s.io/apimachinery v0.21.4
|
||||||
k8s.io/cli-runtime v0.20.7
|
k8s.io/cli-runtime v0.21.4
|
||||||
k8s.io/client-go v0.21.4
|
k8s.io/client-go v0.21.4
|
||||||
k8s.io/code-generator v0.21.4
|
k8s.io/code-generator v0.21.4
|
||||||
knative.dev/eventing v0.25.1-0.20210827141738-ea5ed9adf51f
|
knative.dev/eventing v0.25.1-0.20210827141738-ea5ed9adf51f
|
||||||
|
|
@ -25,5 +25,3 @@ require (
|
||||||
knative.dev/serving v0.25.1-0.20210827140938-e6a7166509e6
|
knative.dev/serving v0.25.1-0.20210827140938-e6a7166509e6
|
||||||
sigs.k8s.io/yaml v1.2.0
|
sigs.k8s.io/yaml v1.2.0
|
||||||
)
|
)
|
||||||
|
|
||||||
replace github.com/go-openapi/spec => github.com/go-openapi/spec v0.19.3
|
|
||||||
|
|
|
||||||
89
go.sum
89
go.sum
|
|
@ -108,6 +108,7 @@ github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb0
|
||||||
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
|
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
|
||||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
|
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
||||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||||
|
|
@ -118,6 +119,7 @@ github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWX
|
||||||
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
|
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
|
||||||
github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g=
|
github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g=
|
||||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||||
|
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||||
github.com/ahmetb/gen-crd-api-reference-docs v0.3.1-0.20210420163308-c1402a70e2f1/go.mod h1:TdjdkYhlOifCQWPs1UdTma97kQQMozf5h26hTuG70u8=
|
github.com/ahmetb/gen-crd-api-reference-docs v0.3.1-0.20210420163308-c1402a70e2f1/go.mod h1:TdjdkYhlOifCQWPs1UdTma97kQQMozf5h26hTuG70u8=
|
||||||
github.com/ahmetb/gen-crd-api-reference-docs v0.3.1-0.20210609063737-0067dc6dcea2/go.mod h1:TdjdkYhlOifCQWPs1UdTma97kQQMozf5h26hTuG70u8=
|
github.com/ahmetb/gen-crd-api-reference-docs v0.3.1-0.20210609063737-0067dc6dcea2/go.mod h1:TdjdkYhlOifCQWPs1UdTma97kQQMozf5h26hTuG70u8=
|
||||||
github.com/alecthomas/jsonschema v0.0.0-20180308105923-f2c93856175a/go.mod h1:qpebaTNSsyUn5rPSJMsfqEtDw71TTggXM6stUDI16HA=
|
github.com/alecthomas/jsonschema v0.0.0-20180308105923-f2c93856175a/go.mod h1:qpebaTNSsyUn5rPSJMsfqEtDw71TTggXM6stUDI16HA=
|
||||||
|
|
@ -127,6 +129,7 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF
|
||||||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||||
github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0=
|
github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0=
|
||||||
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||||
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||||
|
|
@ -135,6 +138,7 @@ github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5
|
||||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||||
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
|
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
|
||||||
|
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||||
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
|
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
|
||||||
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
|
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
|
||||||
|
|
@ -331,6 +335,7 @@ github.com/docker/go-events v0.0.0-20170721190031-9461782956ad/go.mod h1:Uw6Uezg
|
||||||
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
|
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
|
||||||
github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=
|
github.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=
|
||||||
github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=
|
github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=
|
||||||
|
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
|
github.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
|
||||||
github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
|
github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
|
||||||
|
|
@ -356,6 +361,7 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.m
|
||||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
|
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
|
||||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||||
github.com/evanphx/json-patch v0.5.2/go.mod h1:ZWS5hhDbVDyob71nXKNL0+PWn6ToqBHMikGIFbs31qQ=
|
github.com/evanphx/json-patch v0.5.2/go.mod h1:ZWS5hhDbVDyob71nXKNL0+PWn6ToqBHMikGIFbs31qQ=
|
||||||
|
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||||
github.com/evanphx/json-patch v4.9.0+incompatible h1:kLcOMZeuLAJvL2BPWLMIj5oaZQobrkAqrL+WFZwQses=
|
github.com/evanphx/json-patch v4.9.0+incompatible h1:kLcOMZeuLAJvL2BPWLMIj5oaZQobrkAqrL+WFZwQses=
|
||||||
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||||
github.com/evanphx/json-patch/v5 v5.5.0 h1:bAmFiUJ+o0o2B4OiTFeE3MqCOtyo+jjPP9iZ0VRxYUc=
|
github.com/evanphx/json-patch/v5 v5.5.0 h1:bAmFiUJ+o0o2B4OiTFeE3MqCOtyo+jjPP9iZ0VRxYUc=
|
||||||
|
|
@ -371,8 +377,11 @@ github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4
|
||||||
github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA=
|
github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA=
|
||||||
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
||||||
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
|
||||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
|
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
||||||
|
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
||||||
|
github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
|
||||||
|
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
|
|
@ -390,27 +399,66 @@ github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7
|
||||||
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
|
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
|
||||||
github.com/go-logr/logr v0.4.0 h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc=
|
github.com/go-logr/logr v0.4.0 h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc=
|
||||||
github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
|
github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
|
||||||
|
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI=
|
||||||
|
github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik=
|
||||||
|
github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik=
|
||||||
|
github.com/go-openapi/analysis v0.19.2/go.mod h1:3P1osvZa9jKjb8ed2TPng3f0i/UY9snX6gxi44djMjk=
|
||||||
|
github.com/go-openapi/analysis v0.19.5/go.mod h1:hkEAkxagaIvIP7VTn8ygJNkd4kAYON2rCu0v0ObL0AU=
|
||||||
|
github.com/go-openapi/errors v0.17.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0=
|
||||||
|
github.com/go-openapi/errors v0.18.0/go.mod h1:LcZQpmvG4wyF5j4IhA73wkLFQg+QJXOQHVjmcZxhka0=
|
||||||
|
github.com/go-openapi/errors v0.19.2/go.mod h1:qX0BLWsyaKfvhluLejVpVNwNRdXZhEbTA4kxxpKBC94=
|
||||||
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
|
github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
|
||||||
|
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||||
|
github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
||||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
|
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
|
||||||
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
||||||
|
github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
||||||
|
github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
||||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
||||||
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
|
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
|
||||||
github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM=
|
github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM=
|
||||||
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
|
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
|
||||||
github.com/go-openapi/spec v0.19.3 h1:0XRyw8kguri6Yw4SxhsQA/atC88yqrk0+G4YhI2wabc=
|
github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||||
|
github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||||
|
github.com/go-openapi/loads v0.19.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||||
|
github.com/go-openapi/loads v0.19.2/go.mod h1:QAskZPMX5V0C2gvfkGZzJlINuP7Hx/4+ix5jWFxsNPs=
|
||||||
|
github.com/go-openapi/loads v0.19.4/go.mod h1:zZVHonKd8DXyxyw4yfnVjPzBjIQcLt0CCsn0N0ZrQsk=
|
||||||
|
github.com/go-openapi/runtime v0.0.0-20180920151709-4f900dc2ade9/go.mod h1:6v9a6LTXWQCdL8k1AO3cvqx5OtZY/Y9wKTgaoP6YRfA=
|
||||||
|
github.com/go-openapi/runtime v0.19.0/go.mod h1:OwNfisksmmaZse4+gpV3Ne9AyMOlP1lt4sK4FXt0O64=
|
||||||
|
github.com/go-openapi/runtime v0.19.4/go.mod h1:X277bwSUBxVlCYR3r7xgZZGKVvBd/29gLDlFGtJ8NL4=
|
||||||
|
github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
|
||||||
|
github.com/go-openapi/spec v0.17.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI=
|
||||||
|
github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI=
|
||||||
|
github.com/go-openapi/spec v0.19.2/go.mod h1:sCxk3jxKgioEJikev4fgkNmwS+3kuYdJtcsZsD5zxMY=
|
||||||
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
|
github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=
|
||||||
|
github.com/go-openapi/spec v0.19.5/go.mod h1:Hm2Jr4jv8G1ciIAo+frC/Ft+rR2kQDh8JHKHb3gWUSk=
|
||||||
|
github.com/go-openapi/spec v0.19.6/go.mod h1:Hm2Jr4jv8G1ciIAo+frC/Ft+rR2kQDh8JHKHb3gWUSk=
|
||||||
|
github.com/go-openapi/spec v0.20.2 h1:pFPUZsiIbZ20kLUcuCGeuQWG735fPMxW7wHF9BWlnQU=
|
||||||
|
github.com/go-openapi/spec v0.20.2/go.mod h1:RW6Xcbs6LOyWLU/mXGdzn2Qc+3aj+ASfI7rvSZh1Vls=
|
||||||
|
github.com/go-openapi/strfmt v0.17.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU=
|
||||||
|
github.com/go-openapi/strfmt v0.18.0/go.mod h1:P82hnJI0CXkErkXi8IKjPbNBM6lV6+5pLP5l494TcyU=
|
||||||
|
github.com/go-openapi/strfmt v0.19.0/go.mod h1:+uW+93UVvGGq2qGaZxdDeJqSAqBqBdl+ZPMF/cC8nDY=
|
||||||
|
github.com/go-openapi/strfmt v0.19.3/go.mod h1:0yX7dbo8mKIvc3XSKp7MNfxw4JytCfCD6+bY1AVL9LU=
|
||||||
|
github.com/go-openapi/strfmt v0.19.5/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk0dgdHXr2Qk=
|
||||||
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
||||||
|
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||||
|
github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||||
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||||
|
github.com/go-openapi/swag v0.19.13/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||||
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
|
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
|
||||||
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||||
|
github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4=
|
||||||
|
github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA=
|
||||||
|
github.com/go-openapi/validate v0.19.8/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4=
|
||||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
|
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/gobuffalo/flect v0.2.3/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc=
|
github.com/gobuffalo/flect v0.2.3/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc=
|
||||||
|
github.com/gobuffalo/here v0.6.0/go.mod h1:wAG085dHOYqUpf+Ap+WOdrPTp5IYcDAs/x7PLa8Y5fM=
|
||||||
github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw=
|
github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw=
|
||||||
github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw=
|
github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw=
|
||||||
github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
|
github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
|
||||||
|
|
@ -517,6 +565,8 @@ github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLe
|
||||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
|
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
|
||||||
|
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
|
||||||
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
|
@ -658,12 +708,15 @@ github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czP
|
||||||
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
|
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
|
||||||
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||||
github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
|
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
|
github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=
|
||||||
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
|
github.com/markbates/pkger v0.17.1/go.mod h1:0JoVlrol20BSywW79rN3kdFFsE5xYM+rSCQDXbLhiuI=
|
||||||
github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
|
github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
|
||||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||||
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||||
|
|
@ -704,6 +757,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
|
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0=
|
||||||
|
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4=
|
||||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ=
|
github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ=
|
||||||
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||||
|
|
@ -879,6 +934,7 @@ github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdh
|
||||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||||
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
|
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
|
||||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||||
|
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||||
|
|
@ -937,6 +993,7 @@ github.com/streadway/quantile v0.0.0-20150917103942-b0c588724d25/go.mod h1:lbP8t
|
||||||
github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||||
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
|
@ -952,6 +1009,7 @@ github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG
|
||||||
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||||
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
|
||||||
github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I=
|
github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I=
|
||||||
|
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||||
github.com/tsenart/go-tsz v0.0.0-20180814232043-cdeb9e1e981e/go.mod h1:SWZznP1z5Ki7hDT2ioqiFKEse8K9tU2OUvaRI0NeGQo=
|
github.com/tsenart/go-tsz v0.0.0-20180814232043-cdeb9e1e981e/go.mod h1:SWZznP1z5Ki7hDT2ioqiFKEse8K9tU2OUvaRI0NeGQo=
|
||||||
|
|
@ -967,6 +1025,7 @@ github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtX
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/vdemeester/k8s-pkg-credentialprovider v1.20.7/go.mod h1:K2nMO14cgZitdwBqdQps9tInJgcaXcU/7q5F59lpbNI=
|
github.com/vdemeester/k8s-pkg-credentialprovider v1.20.7/go.mod h1:K2nMO14cgZitdwBqdQps9tInJgcaXcU/7q5F59lpbNI=
|
||||||
|
github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw=
|
||||||
github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=
|
github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=
|
||||||
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
|
github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=
|
||||||
github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=
|
github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=
|
||||||
|
|
@ -981,6 +1040,8 @@ github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2
|
||||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||||
github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=
|
github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=
|
||||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||||
|
github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca h1:1CFlNzQhALwjS9mBAUkycX616GzgsuYUOCHA5+HSlXI=
|
||||||
|
github.com/xlab/treeprint v0.0.0-20181112141820-a009c3971eca/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
|
@ -999,6 +1060,9 @@ go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3C
|
||||||
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||||
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||||
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
|
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
|
||||||
|
go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
|
||||||
|
go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
|
||||||
|
go.mongodb.org/mongo-driver v1.1.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
|
||||||
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
|
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
|
||||||
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||||
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
|
||||||
|
|
@ -1012,6 +1076,8 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M=
|
||||||
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||||
go.opentelemetry.io/otel v0.16.0/go.mod h1:e4GKElweB8W2gWUqbghw0B8t5MCTccc9212eNHnOHwA=
|
go.opentelemetry.io/otel v0.16.0/go.mod h1:e4GKElweB8W2gWUqbghw0B8t5MCTccc9212eNHnOHwA=
|
||||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||||
|
go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 h1:+FNtrFTmVw0YZGpBGX56XDee331t6JAXeK2bcyhLOOc=
|
||||||
|
go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5/go.mod h1:nmDLcffg48OtT/PSW0Hg7FvpRQsQh5OSqIylirxKC7o=
|
||||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||||
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
||||||
|
|
@ -1039,9 +1105,11 @@ golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnf
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190829043050-9756ffdc2472/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190829043050-9756ffdc2472/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
|
@ -1097,6 +1165,7 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
|
@ -1106,6 +1175,7 @@ golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73r
|
||||||
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
|
@ -1193,6 +1263,7 @@ golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5h
|
||||||
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190321052220-f7bb7a8bee54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -1212,6 +1283,7 @@ golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20191002063906-3421d5a6bb1c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
|
@ -1306,6 +1378,7 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm
|
||||||
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
|
@ -1317,6 +1390,7 @@ golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBn
|
||||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
|
|
@ -1558,6 +1632,7 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
|
@ -1602,8 +1677,8 @@ k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM=
|
||||||
k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q=
|
k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q=
|
||||||
k8s.io/apiserver v0.20.7/go.mod h1:7gbB7UjDdP1/epYBGnIUE6jWY4Wpz99cZ7igfDa9rv4=
|
k8s.io/apiserver v0.20.7/go.mod h1:7gbB7UjDdP1/epYBGnIUE6jWY4Wpz99cZ7igfDa9rv4=
|
||||||
k8s.io/apiserver v0.21.4/go.mod h1:SErUuFBBPZUcD2nsUU8hItxoYheqyYr2o/pCINEPW8g=
|
k8s.io/apiserver v0.21.4/go.mod h1:SErUuFBBPZUcD2nsUU8hItxoYheqyYr2o/pCINEPW8g=
|
||||||
k8s.io/cli-runtime v0.20.7 h1:0MPtbXb5ZyrAu+RxSMvRSOUHrwQO3wStTwKYCRAo7Os=
|
k8s.io/cli-runtime v0.21.4 h1:kvOzx6dKg+9wRuHTzSqo8tfTV6ixZCkmi+ag54s7mn8=
|
||||||
k8s.io/cli-runtime v0.20.7/go.mod h1:Cru9t2VsPNSxwWKKu6suwzkZSNkewRSH7Ru2rglFGn8=
|
k8s.io/cli-runtime v0.21.4/go.mod h1:eRbLHYkdVWzvG87yrkgGd8CqX6/+fAG9DTdAqTXmlRY=
|
||||||
k8s.io/client-go v0.19.7/go.mod h1:iytGI7S3kmv6bWnn+bSQUE4VlrEi4YFssvVB7J7Hvqg=
|
k8s.io/client-go v0.19.7/go.mod h1:iytGI7S3kmv6bWnn+bSQUE4VlrEi4YFssvVB7J7Hvqg=
|
||||||
k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y=
|
k8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y=
|
||||||
k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k=
|
k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k=
|
||||||
|
|
@ -1679,8 +1754,10 @@ sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.9/go.mod h1:dzAXnQb
|
||||||
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
|
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
|
||||||
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
|
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
|
||||||
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.22/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
|
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.22/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
|
||||||
sigs.k8s.io/kustomize v2.0.3+incompatible h1:JUufWFNlI44MdtnjUqVnvh29rR37PQFzPbLXqhyOyX0=
|
sigs.k8s.io/kustomize/api v0.8.8 h1:G2z6JPSSjtWWgMeWSoHdXqyftJNmMmyxXpwENGoOtGE=
|
||||||
sigs.k8s.io/kustomize v2.0.3+incompatible/go.mod h1:MkjgH3RdOWrievjo6c9T245dYlB5QeXV4WCbnt/PEpU=
|
sigs.k8s.io/kustomize/api v0.8.8/go.mod h1:He1zoK0nk43Pc6NlV085xDXDXTNprtcyKZVm3swsdNY=
|
||||||
|
sigs.k8s.io/kustomize/kyaml v0.10.17 h1:4zrV0ym5AYa0e512q7K3Wp1u7mzoWW0xR3UHJcGWGIg=
|
||||||
|
sigs.k8s.io/kustomize/kyaml v0.10.17/go.mod h1:mlQFagmkm1P+W4lZJbJ/yaxMd8PqMRSC4cPcfUVt5Hg=
|
||||||
sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
||||||
sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
||||||
sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
||||||
|
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
Copyright (c) 2012,2013 Ernest Micklei
|
|
||||||
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 Sam Ghods
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above
|
|
||||||
copyright notice, this list of conditions and the following disclaimer
|
|
||||||
in the documentation and/or other materials provided with the
|
|
||||||
distribution.
|
|
||||||
* Neither the name of Google Inc. nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
Copyright (c) 2015 Conrad Irwin <conrad@bugsnag.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) [2015] [go-gitignore]
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
Copyright (c) 2013, Patrick Mezard
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
The names of its contributors may not be used to endorse or promote
|
||||||
|
products derived from this software without specific prior written
|
||||||
|
permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
||||||
|
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
Copyright © 2016 Maxim Kupriianov <max@kc.vc>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the “Software”), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
Copyright (c) 2017 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived
|
||||||
|
from this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
@ -0,0 +1,202 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 QRI, Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
# OSX leaves these everywhere on SMB shares
|
|
||||||
._*
|
|
||||||
|
|
||||||
# Eclipse files
|
|
||||||
.classpath
|
|
||||||
.project
|
|
||||||
.settings/**
|
|
||||||
|
|
||||||
# Emacs save files
|
|
||||||
*~
|
|
||||||
|
|
||||||
# Vim-related files
|
|
||||||
[._]*.s[a-w][a-z]
|
|
||||||
[._]s[a-w][a-z]
|
|
||||||
*.un~
|
|
||||||
Session.vim
|
|
||||||
.netrwhist
|
|
||||||
|
|
||||||
# Go test binaries
|
|
||||||
*.test
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
language: go
|
|
||||||
go:
|
|
||||||
- 1.3
|
|
||||||
- 1.4
|
|
||||||
script:
|
|
||||||
- go test
|
|
||||||
- go build
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 Sam Ghods
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above
|
|
||||||
copyright notice, this list of conditions and the following disclaimer
|
|
||||||
in the documentation and/or other materials provided with the
|
|
||||||
distribution.
|
|
||||||
* Neither the name of Google Inc. nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
@ -1,121 +0,0 @@
|
||||||
# YAML marshaling and unmarshaling support for Go
|
|
||||||
|
|
||||||
[](https://travis-ci.org/ghodss/yaml)
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
|
|
||||||
A wrapper around [go-yaml](https://github.com/go-yaml/yaml) designed to enable a better way of handling YAML when marshaling to and from structs.
|
|
||||||
|
|
||||||
In short, this library first converts YAML to JSON using go-yaml and then uses `json.Marshal` and `json.Unmarshal` to convert to or from the struct. This means that it effectively reuses the JSON struct tags as well as the custom JSON methods `MarshalJSON` and `UnmarshalJSON` unlike go-yaml. For a detailed overview of the rationale behind this method, [see this blog post](http://ghodss.com/2014/the-right-way-to-handle-yaml-in-golang/).
|
|
||||||
|
|
||||||
## Compatibility
|
|
||||||
|
|
||||||
This package uses [go-yaml](https://github.com/go-yaml/yaml) and therefore supports [everything go-yaml supports](https://github.com/go-yaml/yaml#compatibility).
|
|
||||||
|
|
||||||
## Caveats
|
|
||||||
|
|
||||||
**Caveat #1:** When using `yaml.Marshal` and `yaml.Unmarshal`, binary data should NOT be preceded with the `!!binary` YAML tag. If you do, go-yaml will convert the binary data from base64 to native binary data, which is not compatible with JSON. You can still use binary in your YAML files though - just store them without the `!!binary` tag and decode the base64 in your code (e.g. in the custom JSON methods `MarshalJSON` and `UnmarshalJSON`). This also has the benefit that your YAML and your JSON binary data will be decoded exactly the same way. As an example:
|
|
||||||
|
|
||||||
```
|
|
||||||
BAD:
|
|
||||||
exampleKey: !!binary gIGC
|
|
||||||
|
|
||||||
GOOD:
|
|
||||||
exampleKey: gIGC
|
|
||||||
... and decode the base64 data in your code.
|
|
||||||
```
|
|
||||||
|
|
||||||
**Caveat #2:** When using `YAMLToJSON` directly, maps with keys that are maps will result in an error since this is not supported by JSON. This error will occur in `Unmarshal` as well since you can't unmarshal map keys anyways since struct fields can't be keys.
|
|
||||||
|
|
||||||
## Installation and usage
|
|
||||||
|
|
||||||
To install, run:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ go get github.com/ghodss/yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
And import using:
|
|
||||||
|
|
||||||
```
|
|
||||||
import "github.com/ghodss/yaml"
|
|
||||||
```
|
|
||||||
|
|
||||||
Usage is very similar to the JSON library:
|
|
||||||
|
|
||||||
```go
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/ghodss/yaml"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Person struct {
|
|
||||||
Name string `json:"name"` // Affects YAML field names too.
|
|
||||||
Age int `json:"age"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
// Marshal a Person struct to YAML.
|
|
||||||
p := Person{"John", 30}
|
|
||||||
y, err := yaml.Marshal(p)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("err: %v\n", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fmt.Println(string(y))
|
|
||||||
/* Output:
|
|
||||||
age: 30
|
|
||||||
name: John
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Unmarshal the YAML back into a Person struct.
|
|
||||||
var p2 Person
|
|
||||||
err = yaml.Unmarshal(y, &p2)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("err: %v\n", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fmt.Println(p2)
|
|
||||||
/* Output:
|
|
||||||
{John 30}
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
`yaml.YAMLToJSON` and `yaml.JSONToYAML` methods are also available:
|
|
||||||
|
|
||||||
```go
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/ghodss/yaml"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
j := []byte(`{"name": "John", "age": 30}`)
|
|
||||||
y, err := yaml.JSONToYAML(j)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("err: %v\n", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fmt.Println(string(y))
|
|
||||||
/* Output:
|
|
||||||
name: John
|
|
||||||
age: 30
|
|
||||||
*/
|
|
||||||
j2, err := yaml.YAMLToJSON(y)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("err: %v\n", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fmt.Println(string(j2))
|
|
||||||
/* Output:
|
|
||||||
{"age":30,"name":"John"}
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
@ -1,501 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
package yaml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding"
|
|
||||||
"encoding/json"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// indirect walks down v allocating pointers as needed,
|
|
||||||
// until it gets to a non-pointer.
|
|
||||||
// if it encounters an Unmarshaler, indirect stops and returns that.
|
|
||||||
// if decodingNull is true, indirect stops at the last pointer so it can be set to nil.
|
|
||||||
func indirect(v reflect.Value, decodingNull bool) (json.Unmarshaler, encoding.TextUnmarshaler, reflect.Value) {
|
|
||||||
// If v is a named type and is addressable,
|
|
||||||
// start with its address, so that if the type has pointer methods,
|
|
||||||
// we find them.
|
|
||||||
if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() {
|
|
||||||
v = v.Addr()
|
|
||||||
}
|
|
||||||
for {
|
|
||||||
// Load value from interface, but only if the result will be
|
|
||||||
// usefully addressable.
|
|
||||||
if v.Kind() == reflect.Interface && !v.IsNil() {
|
|
||||||
e := v.Elem()
|
|
||||||
if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) {
|
|
||||||
v = e
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.Kind() != reflect.Ptr {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if v.IsNil() {
|
|
||||||
if v.CanSet() {
|
|
||||||
v.Set(reflect.New(v.Type().Elem()))
|
|
||||||
} else {
|
|
||||||
v = reflect.New(v.Type().Elem())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if v.Type().NumMethod() > 0 {
|
|
||||||
if u, ok := v.Interface().(json.Unmarshaler); ok {
|
|
||||||
return u, nil, reflect.Value{}
|
|
||||||
}
|
|
||||||
if u, ok := v.Interface().(encoding.TextUnmarshaler); ok {
|
|
||||||
return nil, u, reflect.Value{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
v = v.Elem()
|
|
||||||
}
|
|
||||||
return nil, nil, v
|
|
||||||
}
|
|
||||||
|
|
||||||
// A field represents a single field found in a struct.
|
|
||||||
type field struct {
|
|
||||||
name string
|
|
||||||
nameBytes []byte // []byte(name)
|
|
||||||
equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent
|
|
||||||
|
|
||||||
tag bool
|
|
||||||
index []int
|
|
||||||
typ reflect.Type
|
|
||||||
omitEmpty bool
|
|
||||||
quoted bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func fillField(f field) field {
|
|
||||||
f.nameBytes = []byte(f.name)
|
|
||||||
f.equalFold = foldFunc(f.nameBytes)
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
|
|
||||||
// byName sorts field by name, breaking ties with depth,
|
|
||||||
// then breaking ties with "name came from json tag", then
|
|
||||||
// breaking ties with index sequence.
|
|
||||||
type byName []field
|
|
||||||
|
|
||||||
func (x byName) Len() int { return len(x) }
|
|
||||||
|
|
||||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
|
|
||||||
func (x byName) Less(i, j int) bool {
|
|
||||||
if x[i].name != x[j].name {
|
|
||||||
return x[i].name < x[j].name
|
|
||||||
}
|
|
||||||
if len(x[i].index) != len(x[j].index) {
|
|
||||||
return len(x[i].index) < len(x[j].index)
|
|
||||||
}
|
|
||||||
if x[i].tag != x[j].tag {
|
|
||||||
return x[i].tag
|
|
||||||
}
|
|
||||||
return byIndex(x).Less(i, j)
|
|
||||||
}
|
|
||||||
|
|
||||||
// byIndex sorts field by index sequence.
|
|
||||||
type byIndex []field
|
|
||||||
|
|
||||||
func (x byIndex) Len() int { return len(x) }
|
|
||||||
|
|
||||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
|
||||||
|
|
||||||
func (x byIndex) Less(i, j int) bool {
|
|
||||||
for k, xik := range x[i].index {
|
|
||||||
if k >= len(x[j].index) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if xik != x[j].index[k] {
|
|
||||||
return xik < x[j].index[k]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return len(x[i].index) < len(x[j].index)
|
|
||||||
}
|
|
||||||
|
|
||||||
// typeFields returns a list of fields that JSON should recognize for the given type.
|
|
||||||
// The algorithm is breadth-first search over the set of structs to include - the top struct
|
|
||||||
// and then any reachable anonymous structs.
|
|
||||||
func typeFields(t reflect.Type) []field {
|
|
||||||
// Anonymous fields to explore at the current level and the next.
|
|
||||||
current := []field{}
|
|
||||||
next := []field{{typ: t}}
|
|
||||||
|
|
||||||
// Count of queued names for current level and the next.
|
|
||||||
count := map[reflect.Type]int{}
|
|
||||||
nextCount := map[reflect.Type]int{}
|
|
||||||
|
|
||||||
// Types already visited at an earlier level.
|
|
||||||
visited := map[reflect.Type]bool{}
|
|
||||||
|
|
||||||
// Fields found.
|
|
||||||
var fields []field
|
|
||||||
|
|
||||||
for len(next) > 0 {
|
|
||||||
current, next = next, current[:0]
|
|
||||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
|
||||||
|
|
||||||
for _, f := range current {
|
|
||||||
if visited[f.typ] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
visited[f.typ] = true
|
|
||||||
|
|
||||||
// Scan f.typ for fields to include.
|
|
||||||
for i := 0; i < f.typ.NumField(); i++ {
|
|
||||||
sf := f.typ.Field(i)
|
|
||||||
if sf.PkgPath != "" { // unexported
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
tag := sf.Tag.Get("json")
|
|
||||||
if tag == "-" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
name, opts := parseTag(tag)
|
|
||||||
if !isValidTag(name) {
|
|
||||||
name = ""
|
|
||||||
}
|
|
||||||
index := make([]int, len(f.index)+1)
|
|
||||||
copy(index, f.index)
|
|
||||||
index[len(f.index)] = i
|
|
||||||
|
|
||||||
ft := sf.Type
|
|
||||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
|
||||||
// Follow pointer.
|
|
||||||
ft = ft.Elem()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record found field and index sequence.
|
|
||||||
if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
|
||||||
tagged := name != ""
|
|
||||||
if name == "" {
|
|
||||||
name = sf.Name
|
|
||||||
}
|
|
||||||
fields = append(fields, fillField(field{
|
|
||||||
name: name,
|
|
||||||
tag: tagged,
|
|
||||||
index: index,
|
|
||||||
typ: ft,
|
|
||||||
omitEmpty: opts.Contains("omitempty"),
|
|
||||||
quoted: opts.Contains("string"),
|
|
||||||
}))
|
|
||||||
if count[f.typ] > 1 {
|
|
||||||
// If there were multiple instances, add a second,
|
|
||||||
// so that the annihilation code will see a duplicate.
|
|
||||||
// It only cares about the distinction between 1 or 2,
|
|
||||||
// so don't bother generating any more copies.
|
|
||||||
fields = append(fields, fields[len(fields)-1])
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Record new anonymous struct to explore in next round.
|
|
||||||
nextCount[ft]++
|
|
||||||
if nextCount[ft] == 1 {
|
|
||||||
next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(byName(fields))
|
|
||||||
|
|
||||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
|
||||||
// except that fields with JSON tags are promoted.
|
|
||||||
|
|
||||||
// The fields are sorted in primary order of name, secondary order
|
|
||||||
// of field index length. Loop over names; for each name, delete
|
|
||||||
// hidden fields by choosing the one dominant field that survives.
|
|
||||||
out := fields[:0]
|
|
||||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
|
||||||
// One iteration per name.
|
|
||||||
// Find the sequence of fields with the name of this first field.
|
|
||||||
fi := fields[i]
|
|
||||||
name := fi.name
|
|
||||||
for advance = 1; i+advance < len(fields); advance++ {
|
|
||||||
fj := fields[i+advance]
|
|
||||||
if fj.name != name {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if advance == 1 { // Only one field with this name
|
|
||||||
out = append(out, fi)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
dominant, ok := dominantField(fields[i : i+advance])
|
|
||||||
if ok {
|
|
||||||
out = append(out, dominant)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fields = out
|
|
||||||
sort.Sort(byIndex(fields))
|
|
||||||
|
|
||||||
return fields
|
|
||||||
}
|
|
||||||
|
|
||||||
// dominantField looks through the fields, all of which are known to
|
|
||||||
// have the same name, to find the single field that dominates the
|
|
||||||
// others using Go's embedding rules, modified by the presence of
|
|
||||||
// JSON tags. If there are multiple top-level fields, the boolean
|
|
||||||
// will be false: This condition is an error in Go and we skip all
|
|
||||||
// the fields.
|
|
||||||
func dominantField(fields []field) (field, bool) {
|
|
||||||
// The fields are sorted in increasing index-length order. The winner
|
|
||||||
// must therefore be one with the shortest index length. Drop all
|
|
||||||
// longer entries, which is easy: just truncate the slice.
|
|
||||||
length := len(fields[0].index)
|
|
||||||
tagged := -1 // Index of first tagged field.
|
|
||||||
for i, f := range fields {
|
|
||||||
if len(f.index) > length {
|
|
||||||
fields = fields[:i]
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if f.tag {
|
|
||||||
if tagged >= 0 {
|
|
||||||
// Multiple tagged fields at the same level: conflict.
|
|
||||||
// Return no field.
|
|
||||||
return field{}, false
|
|
||||||
}
|
|
||||||
tagged = i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if tagged >= 0 {
|
|
||||||
return fields[tagged], true
|
|
||||||
}
|
|
||||||
// All remaining fields have the same length. If there's more than one,
|
|
||||||
// we have a conflict (two fields named "X" at the same level) and we
|
|
||||||
// return no field.
|
|
||||||
if len(fields) > 1 {
|
|
||||||
return field{}, false
|
|
||||||
}
|
|
||||||
return fields[0], true
|
|
||||||
}
|
|
||||||
|
|
||||||
var fieldCache struct {
|
|
||||||
sync.RWMutex
|
|
||||||
m map[reflect.Type][]field
|
|
||||||
}
|
|
||||||
|
|
||||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
|
||||||
func cachedTypeFields(t reflect.Type) []field {
|
|
||||||
fieldCache.RLock()
|
|
||||||
f := fieldCache.m[t]
|
|
||||||
fieldCache.RUnlock()
|
|
||||||
if f != nil {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compute fields without lock.
|
|
||||||
// Might duplicate effort but won't hold other computations back.
|
|
||||||
f = typeFields(t)
|
|
||||||
if f == nil {
|
|
||||||
f = []field{}
|
|
||||||
}
|
|
||||||
|
|
||||||
fieldCache.Lock()
|
|
||||||
if fieldCache.m == nil {
|
|
||||||
fieldCache.m = map[reflect.Type][]field{}
|
|
||||||
}
|
|
||||||
fieldCache.m[t] = f
|
|
||||||
fieldCache.Unlock()
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
|
|
||||||
func isValidTag(s string) bool {
|
|
||||||
if s == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, c := range s {
|
|
||||||
switch {
|
|
||||||
case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
|
|
||||||
// Backslash and quote chars are reserved, but
|
|
||||||
// otherwise any punctuation chars are allowed
|
|
||||||
// in a tag name.
|
|
||||||
default:
|
|
||||||
if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
|
|
||||||
kelvin = '\u212a'
|
|
||||||
smallLongEss = '\u017f'
|
|
||||||
)
|
|
||||||
|
|
||||||
// foldFunc returns one of four different case folding equivalence
|
|
||||||
// functions, from most general (and slow) to fastest:
|
|
||||||
//
|
|
||||||
// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
|
|
||||||
// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
|
|
||||||
// 3) asciiEqualFold, no special, but includes non-letters (including _)
|
|
||||||
// 4) simpleLetterEqualFold, no specials, no non-letters.
|
|
||||||
//
|
|
||||||
// The letters S and K are special because they map to 3 runes, not just 2:
|
|
||||||
// * S maps to s and to U+017F 'ſ' Latin small letter long s
|
|
||||||
// * k maps to K and to U+212A 'K' Kelvin sign
|
|
||||||
// See http://play.golang.org/p/tTxjOc0OGo
|
|
||||||
//
|
|
||||||
// The returned function is specialized for matching against s and
|
|
||||||
// should only be given s. It's not curried for performance reasons.
|
|
||||||
func foldFunc(s []byte) func(s, t []byte) bool {
|
|
||||||
nonLetter := false
|
|
||||||
special := false // special letter
|
|
||||||
for _, b := range s {
|
|
||||||
if b >= utf8.RuneSelf {
|
|
||||||
return bytes.EqualFold
|
|
||||||
}
|
|
||||||
upper := b & caseMask
|
|
||||||
if upper < 'A' || upper > 'Z' {
|
|
||||||
nonLetter = true
|
|
||||||
} else if upper == 'K' || upper == 'S' {
|
|
||||||
// See above for why these letters are special.
|
|
||||||
special = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if special {
|
|
||||||
return equalFoldRight
|
|
||||||
}
|
|
||||||
if nonLetter {
|
|
||||||
return asciiEqualFold
|
|
||||||
}
|
|
||||||
return simpleLetterEqualFold
|
|
||||||
}
|
|
||||||
|
|
||||||
// equalFoldRight is a specialization of bytes.EqualFold when s is
|
|
||||||
// known to be all ASCII (including punctuation), but contains an 's',
|
|
||||||
// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
|
|
||||||
// See comments on foldFunc.
|
|
||||||
func equalFoldRight(s, t []byte) bool {
|
|
||||||
for _, sb := range s {
|
|
||||||
if len(t) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
tb := t[0]
|
|
||||||
if tb < utf8.RuneSelf {
|
|
||||||
if sb != tb {
|
|
||||||
sbUpper := sb & caseMask
|
|
||||||
if 'A' <= sbUpper && sbUpper <= 'Z' {
|
|
||||||
if sbUpper != tb&caseMask {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t = t[1:]
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// sb is ASCII and t is not. t must be either kelvin
|
|
||||||
// sign or long s; sb must be s, S, k, or K.
|
|
||||||
tr, size := utf8.DecodeRune(t)
|
|
||||||
switch sb {
|
|
||||||
case 's', 'S':
|
|
||||||
if tr != smallLongEss {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
case 'k', 'K':
|
|
||||||
if tr != kelvin {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
t = t[size:]
|
|
||||||
|
|
||||||
}
|
|
||||||
if len(t) > 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// asciiEqualFold is a specialization of bytes.EqualFold for use when
|
|
||||||
// s is all ASCII (but may contain non-letters) and contains no
|
|
||||||
// special-folding letters.
|
|
||||||
// See comments on foldFunc.
|
|
||||||
func asciiEqualFold(s, t []byte) bool {
|
|
||||||
if len(s) != len(t) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, sb := range s {
|
|
||||||
tb := t[i]
|
|
||||||
if sb == tb {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
|
|
||||||
if sb&caseMask != tb&caseMask {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// simpleLetterEqualFold is a specialization of bytes.EqualFold for
|
|
||||||
// use when s is all ASCII letters (no underscores, etc) and also
|
|
||||||
// doesn't contain 'k', 'K', 's', or 'S'.
|
|
||||||
// See comments on foldFunc.
|
|
||||||
func simpleLetterEqualFold(s, t []byte) bool {
|
|
||||||
if len(s) != len(t) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, b := range s {
|
|
||||||
if b&caseMask != t[i]&caseMask {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// tagOptions is the string following a comma in a struct field's "json"
|
|
||||||
// tag, or the empty string. It does not include the leading comma.
|
|
||||||
type tagOptions string
|
|
||||||
|
|
||||||
// parseTag splits a struct field's json tag into its name and
|
|
||||||
// comma-separated options.
|
|
||||||
func parseTag(tag string) (string, tagOptions) {
|
|
||||||
if idx := strings.Index(tag, ","); idx != -1 {
|
|
||||||
return tag[:idx], tagOptions(tag[idx+1:])
|
|
||||||
}
|
|
||||||
return tag, tagOptions("")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Contains reports whether a comma-separated list of options
|
|
||||||
// contains a particular substr flag. substr must be surrounded by a
|
|
||||||
// string boundary or commas.
|
|
||||||
func (o tagOptions) Contains(optionName string) bool {
|
|
||||||
if len(o) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
s := string(o)
|
|
||||||
for s != "" {
|
|
||||||
var next string
|
|
||||||
i := strings.Index(s, ",")
|
|
||||||
if i >= 0 {
|
|
||||||
s, next = s[:i], s[i+1:]
|
|
||||||
}
|
|
||||||
if s == optionName {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
s = next
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
@ -1,277 +0,0 @@
|
||||||
package yaml
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Marshals the object into JSON then converts JSON to YAML and returns the
|
|
||||||
// YAML.
|
|
||||||
func Marshal(o interface{}) ([]byte, error) {
|
|
||||||
j, err := json.Marshal(o)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error marshaling into JSON: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
y, err := JSONToYAML(j)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("error converting JSON to YAML: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return y, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Converts YAML to JSON then uses JSON to unmarshal into an object.
|
|
||||||
func Unmarshal(y []byte, o interface{}) error {
|
|
||||||
vo := reflect.ValueOf(o)
|
|
||||||
j, err := yamlToJSON(y, &vo)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error converting YAML to JSON: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = json.Unmarshal(j, o)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error unmarshaling JSON: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert JSON to YAML.
|
|
||||||
func JSONToYAML(j []byte) ([]byte, error) {
|
|
||||||
// Convert the JSON to an object.
|
|
||||||
var jsonObj interface{}
|
|
||||||
// We are using yaml.Unmarshal here (instead of json.Unmarshal) because the
|
|
||||||
// Go JSON library doesn't try to pick the right number type (int, float,
|
|
||||||
// etc.) when unmarshalling to interface{}, it just picks float64
|
|
||||||
// universally. go-yaml does go through the effort of picking the right
|
|
||||||
// number type, so we can preserve number type throughout this process.
|
|
||||||
err := yaml.Unmarshal(j, &jsonObj)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Marshal this object into YAML.
|
|
||||||
return yaml.Marshal(jsonObj)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert YAML to JSON. Since JSON is a subset of YAML, passing JSON through
|
|
||||||
// this method should be a no-op.
|
|
||||||
//
|
|
||||||
// Things YAML can do that are not supported by JSON:
|
|
||||||
// * In YAML you can have binary and null keys in your maps. These are invalid
|
|
||||||
// in JSON. (int and float keys are converted to strings.)
|
|
||||||
// * Binary data in YAML with the !!binary tag is not supported. If you want to
|
|
||||||
// use binary data with this library, encode the data as base64 as usual but do
|
|
||||||
// not use the !!binary tag in your YAML. This will ensure the original base64
|
|
||||||
// encoded data makes it all the way through to the JSON.
|
|
||||||
func YAMLToJSON(y []byte) ([]byte, error) {
|
|
||||||
return yamlToJSON(y, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func yamlToJSON(y []byte, jsonTarget *reflect.Value) ([]byte, error) {
|
|
||||||
// Convert the YAML to an object.
|
|
||||||
var yamlObj interface{}
|
|
||||||
err := yaml.Unmarshal(y, &yamlObj)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// YAML objects are not completely compatible with JSON objects (e.g. you
|
|
||||||
// can have non-string keys in YAML). So, convert the YAML-compatible object
|
|
||||||
// to a JSON-compatible object, failing with an error if irrecoverable
|
|
||||||
// incompatibilties happen along the way.
|
|
||||||
jsonObj, err := convertToJSONableObject(yamlObj, jsonTarget)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert this object to JSON and return the data.
|
|
||||||
return json.Marshal(jsonObj)
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertToJSONableObject(yamlObj interface{}, jsonTarget *reflect.Value) (interface{}, error) {
|
|
||||||
var err error
|
|
||||||
|
|
||||||
// Resolve jsonTarget to a concrete value (i.e. not a pointer or an
|
|
||||||
// interface). We pass decodingNull as false because we're not actually
|
|
||||||
// decoding into the value, we're just checking if the ultimate target is a
|
|
||||||
// string.
|
|
||||||
if jsonTarget != nil {
|
|
||||||
ju, tu, pv := indirect(*jsonTarget, false)
|
|
||||||
// We have a JSON or Text Umarshaler at this level, so we can't be trying
|
|
||||||
// to decode into a string.
|
|
||||||
if ju != nil || tu != nil {
|
|
||||||
jsonTarget = nil
|
|
||||||
} else {
|
|
||||||
jsonTarget = &pv
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If yamlObj is a number or a boolean, check if jsonTarget is a string -
|
|
||||||
// if so, coerce. Else return normal.
|
|
||||||
// If yamlObj is a map or array, find the field that each key is
|
|
||||||
// unmarshaling to, and when you recurse pass the reflect.Value for that
|
|
||||||
// field back into this function.
|
|
||||||
switch typedYAMLObj := yamlObj.(type) {
|
|
||||||
case map[interface{}]interface{}:
|
|
||||||
// JSON does not support arbitrary keys in a map, so we must convert
|
|
||||||
// these keys to strings.
|
|
||||||
//
|
|
||||||
// From my reading of go-yaml v2 (specifically the resolve function),
|
|
||||||
// keys can only have the types string, int, int64, float64, binary
|
|
||||||
// (unsupported), or null (unsupported).
|
|
||||||
strMap := make(map[string]interface{})
|
|
||||||
for k, v := range typedYAMLObj {
|
|
||||||
// Resolve the key to a string first.
|
|
||||||
var keyString string
|
|
||||||
switch typedKey := k.(type) {
|
|
||||||
case string:
|
|
||||||
keyString = typedKey
|
|
||||||
case int:
|
|
||||||
keyString = strconv.Itoa(typedKey)
|
|
||||||
case int64:
|
|
||||||
// go-yaml will only return an int64 as a key if the system
|
|
||||||
// architecture is 32-bit and the key's value is between 32-bit
|
|
||||||
// and 64-bit. Otherwise the key type will simply be int.
|
|
||||||
keyString = strconv.FormatInt(typedKey, 10)
|
|
||||||
case float64:
|
|
||||||
// Stolen from go-yaml to use the same conversion to string as
|
|
||||||
// the go-yaml library uses to convert float to string when
|
|
||||||
// Marshaling.
|
|
||||||
s := strconv.FormatFloat(typedKey, 'g', -1, 32)
|
|
||||||
switch s {
|
|
||||||
case "+Inf":
|
|
||||||
s = ".inf"
|
|
||||||
case "-Inf":
|
|
||||||
s = "-.inf"
|
|
||||||
case "NaN":
|
|
||||||
s = ".nan"
|
|
||||||
}
|
|
||||||
keyString = s
|
|
||||||
case bool:
|
|
||||||
if typedKey {
|
|
||||||
keyString = "true"
|
|
||||||
} else {
|
|
||||||
keyString = "false"
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("Unsupported map key of type: %s, key: %+#v, value: %+#v",
|
|
||||||
reflect.TypeOf(k), k, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// jsonTarget should be a struct or a map. If it's a struct, find
|
|
||||||
// the field it's going to map to and pass its reflect.Value. If
|
|
||||||
// it's a map, find the element type of the map and pass the
|
|
||||||
// reflect.Value created from that type. If it's neither, just pass
|
|
||||||
// nil - JSON conversion will error for us if it's a real issue.
|
|
||||||
if jsonTarget != nil {
|
|
||||||
t := *jsonTarget
|
|
||||||
if t.Kind() == reflect.Struct {
|
|
||||||
keyBytes := []byte(keyString)
|
|
||||||
// Find the field that the JSON library would use.
|
|
||||||
var f *field
|
|
||||||
fields := cachedTypeFields(t.Type())
|
|
||||||
for i := range fields {
|
|
||||||
ff := &fields[i]
|
|
||||||
if bytes.Equal(ff.nameBytes, keyBytes) {
|
|
||||||
f = ff
|
|
||||||
break
|
|
||||||
}
|
|
||||||
// Do case-insensitive comparison.
|
|
||||||
if f == nil && ff.equalFold(ff.nameBytes, keyBytes) {
|
|
||||||
f = ff
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if f != nil {
|
|
||||||
// Find the reflect.Value of the most preferential
|
|
||||||
// struct field.
|
|
||||||
jtf := t.Field(f.index[0])
|
|
||||||
strMap[keyString], err = convertToJSONableObject(v, &jtf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else if t.Kind() == reflect.Map {
|
|
||||||
// Create a zero value of the map's element type to use as
|
|
||||||
// the JSON target.
|
|
||||||
jtv := reflect.Zero(t.Type().Elem())
|
|
||||||
strMap[keyString], err = convertToJSONableObject(v, &jtv)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
strMap[keyString], err = convertToJSONableObject(v, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return strMap, nil
|
|
||||||
case []interface{}:
|
|
||||||
// We need to recurse into arrays in case there are any
|
|
||||||
// map[interface{}]interface{}'s inside and to convert any
|
|
||||||
// numbers to strings.
|
|
||||||
|
|
||||||
// If jsonTarget is a slice (which it really should be), find the
|
|
||||||
// thing it's going to map to. If it's not a slice, just pass nil
|
|
||||||
// - JSON conversion will error for us if it's a real issue.
|
|
||||||
var jsonSliceElemValue *reflect.Value
|
|
||||||
if jsonTarget != nil {
|
|
||||||
t := *jsonTarget
|
|
||||||
if t.Kind() == reflect.Slice {
|
|
||||||
// By default slices point to nil, but we need a reflect.Value
|
|
||||||
// pointing to a value of the slice type, so we create one here.
|
|
||||||
ev := reflect.Indirect(reflect.New(t.Type().Elem()))
|
|
||||||
jsonSliceElemValue = &ev
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make and use a new array.
|
|
||||||
arr := make([]interface{}, len(typedYAMLObj))
|
|
||||||
for i, v := range typedYAMLObj {
|
|
||||||
arr[i], err = convertToJSONableObject(v, jsonSliceElemValue)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return arr, nil
|
|
||||||
default:
|
|
||||||
// If the target type is a string and the YAML type is a number,
|
|
||||||
// convert the YAML type to a string.
|
|
||||||
if jsonTarget != nil && (*jsonTarget).Kind() == reflect.String {
|
|
||||||
// Based on my reading of go-yaml, it may return int, int64,
|
|
||||||
// float64, or uint64.
|
|
||||||
var s string
|
|
||||||
switch typedVal := typedYAMLObj.(type) {
|
|
||||||
case int:
|
|
||||||
s = strconv.FormatInt(int64(typedVal), 10)
|
|
||||||
case int64:
|
|
||||||
s = strconv.FormatInt(typedVal, 10)
|
|
||||||
case float64:
|
|
||||||
s = strconv.FormatFloat(typedVal, 'g', -1, 32)
|
|
||||||
case uint64:
|
|
||||||
s = strconv.FormatUint(typedVal, 10)
|
|
||||||
case bool:
|
|
||||||
if typedVal {
|
|
||||||
s = "true"
|
|
||||||
} else {
|
|
||||||
s = "false"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(s) > 0 {
|
|
||||||
yamlObj = interface{}(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return yamlObj, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
@ -0,0 +1,5 @@
|
||||||
|
language: go
|
||||||
|
|
||||||
|
go:
|
||||||
|
- "1.8.x"
|
||||||
|
- "1.10.x"
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
Copyright (c) 2015 Conrad Irwin <conrad@bugsnag.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
@ -0,0 +1,66 @@
|
||||||
|
go-errors/errors
|
||||||
|
================
|
||||||
|
|
||||||
|
[](https://travis-ci.org/go-errors/errors)
|
||||||
|
|
||||||
|
Package errors adds stacktrace support to errors in go.
|
||||||
|
|
||||||
|
This is particularly useful when you want to understand the state of execution
|
||||||
|
when an error was returned unexpectedly.
|
||||||
|
|
||||||
|
It provides the type \*Error which implements the standard golang error
|
||||||
|
interface, so you can use this library interchangably with code that is
|
||||||
|
expecting a normal error return.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
Full documentation is available on
|
||||||
|
[godoc](https://godoc.org/github.com/go-errors/errors), but here's a simple
|
||||||
|
example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package crashy
|
||||||
|
|
||||||
|
import "github.com/go-errors/errors"
|
||||||
|
|
||||||
|
var Crashed = errors.Errorf("oh dear")
|
||||||
|
|
||||||
|
func Crash() error {
|
||||||
|
return errors.New(Crashed)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This can be called as follows:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crashy"
|
||||||
|
"fmt"
|
||||||
|
"github.com/go-errors/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
err := crashy.Crash()
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, crashy.Crashed) {
|
||||||
|
fmt.Println(err.(*errors.Error).ErrorStack())
|
||||||
|
} else {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Meta-fu
|
||||||
|
-------
|
||||||
|
|
||||||
|
This package was original written to allow reporting to
|
||||||
|
[Bugsnag](https://bugsnag.com/) from
|
||||||
|
[bugsnag-go](https://github.com/bugsnag/bugsnag-go), but after I found similar
|
||||||
|
packages by Facebook and Dropbox, it was moved to one canonical location so
|
||||||
|
everyone can benefit.
|
||||||
|
|
||||||
|
This package is licensed under the MIT license, see LICENSE.MIT for details.
|
||||||
|
|
@ -0,0 +1,217 @@
|
||||||
|
// Package errors provides errors that have stack-traces.
|
||||||
|
//
|
||||||
|
// This is particularly useful when you want to understand the
|
||||||
|
// state of execution when an error was returned unexpectedly.
|
||||||
|
//
|
||||||
|
// It provides the type *Error which implements the standard
|
||||||
|
// golang error interface, so you can use this library interchangably
|
||||||
|
// with code that is expecting a normal error return.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// package crashy
|
||||||
|
//
|
||||||
|
// import "github.com/go-errors/errors"
|
||||||
|
//
|
||||||
|
// var Crashed = errors.Errorf("oh dear")
|
||||||
|
//
|
||||||
|
// func Crash() error {
|
||||||
|
// return errors.New(Crashed)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// This can be called as follows:
|
||||||
|
//
|
||||||
|
// package main
|
||||||
|
//
|
||||||
|
// import (
|
||||||
|
// "crashy"
|
||||||
|
// "fmt"
|
||||||
|
// "github.com/go-errors/errors"
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// func main() {
|
||||||
|
// err := crashy.Crash()
|
||||||
|
// if err != nil {
|
||||||
|
// if errors.Is(err, crashy.Crashed) {
|
||||||
|
// fmt.Println(err.(*errors.Error).ErrorStack())
|
||||||
|
// } else {
|
||||||
|
// panic(err)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// This package was original written to allow reporting to Bugsnag,
|
||||||
|
// but after I found similar packages by Facebook and Dropbox, it
|
||||||
|
// was moved to one canonical location so everyone can benefit.
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The maximum number of stackframes on any error.
|
||||||
|
var MaxStackDepth = 50
|
||||||
|
|
||||||
|
// Error is an error with an attached stacktrace. It can be used
|
||||||
|
// wherever the builtin error interface is expected.
|
||||||
|
type Error struct {
|
||||||
|
Err error
|
||||||
|
stack []uintptr
|
||||||
|
frames []StackFrame
|
||||||
|
prefix string
|
||||||
|
}
|
||||||
|
|
||||||
|
// New makes an Error from the given value. If that value is already an
|
||||||
|
// error then it will be used directly, if not, it will be passed to
|
||||||
|
// fmt.Errorf("%v"). The stacktrace will point to the line of code that
|
||||||
|
// called New.
|
||||||
|
func New(e interface{}) *Error {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch e := e.(type) {
|
||||||
|
case error:
|
||||||
|
err = e
|
||||||
|
default:
|
||||||
|
err = fmt.Errorf("%v", e)
|
||||||
|
}
|
||||||
|
|
||||||
|
stack := make([]uintptr, MaxStackDepth)
|
||||||
|
length := runtime.Callers(2, stack[:])
|
||||||
|
return &Error{
|
||||||
|
Err: err,
|
||||||
|
stack: stack[:length],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap makes an Error from the given value. If that value is already an
|
||||||
|
// error then it will be used directly, if not, it will be passed to
|
||||||
|
// fmt.Errorf("%v"). The skip parameter indicates how far up the stack
|
||||||
|
// to start the stacktrace. 0 is from the current call, 1 from its caller, etc.
|
||||||
|
func Wrap(e interface{}, skip int) *Error {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch e := e.(type) {
|
||||||
|
case *Error:
|
||||||
|
return e
|
||||||
|
case error:
|
||||||
|
err = e
|
||||||
|
default:
|
||||||
|
err = fmt.Errorf("%v", e)
|
||||||
|
}
|
||||||
|
|
||||||
|
stack := make([]uintptr, MaxStackDepth)
|
||||||
|
length := runtime.Callers(2+skip, stack[:])
|
||||||
|
return &Error{
|
||||||
|
Err: err,
|
||||||
|
stack: stack[:length],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WrapPrefix makes an Error from the given value. If that value is already an
|
||||||
|
// error then it will be used directly, if not, it will be passed to
|
||||||
|
// fmt.Errorf("%v"). The prefix parameter is used to add a prefix to the
|
||||||
|
// error message when calling Error(). The skip parameter indicates how far
|
||||||
|
// up the stack to start the stacktrace. 0 is from the current call,
|
||||||
|
// 1 from its caller, etc.
|
||||||
|
func WrapPrefix(e interface{}, prefix string, skip int) *Error {
|
||||||
|
|
||||||
|
err := Wrap(e, 1+skip)
|
||||||
|
|
||||||
|
if err.prefix != "" {
|
||||||
|
prefix = fmt.Sprintf("%s: %s", prefix, err.prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Error{
|
||||||
|
Err: err.Err,
|
||||||
|
stack: err.stack,
|
||||||
|
prefix: prefix,
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is detects whether the error is equal to a given error. Errors
|
||||||
|
// are considered equal by this function if they are the same object,
|
||||||
|
// or if they both contain the same error inside an errors.Error.
|
||||||
|
func Is(e error, original error) bool {
|
||||||
|
|
||||||
|
if e == original {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if e, ok := e.(*Error); ok {
|
||||||
|
return Is(e.Err, original)
|
||||||
|
}
|
||||||
|
|
||||||
|
if original, ok := original.(*Error); ok {
|
||||||
|
return Is(e, original.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Errorf creates a new error with the given message. You can use it
|
||||||
|
// as a drop-in replacement for fmt.Errorf() to provide descriptive
|
||||||
|
// errors in return values.
|
||||||
|
func Errorf(format string, a ...interface{}) *Error {
|
||||||
|
return Wrap(fmt.Errorf(format, a...), 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error returns the underlying error's message.
|
||||||
|
func (err *Error) Error() string {
|
||||||
|
|
||||||
|
msg := err.Err.Error()
|
||||||
|
if err.prefix != "" {
|
||||||
|
msg = fmt.Sprintf("%s: %s", err.prefix, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
return msg
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stack returns the callstack formatted the same way that go does
|
||||||
|
// in runtime/debug.Stack()
|
||||||
|
func (err *Error) Stack() []byte {
|
||||||
|
buf := bytes.Buffer{}
|
||||||
|
|
||||||
|
for _, frame := range err.StackFrames() {
|
||||||
|
buf.WriteString(frame.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Callers satisfies the bugsnag ErrorWithCallerS() interface
|
||||||
|
// so that the stack can be read out.
|
||||||
|
func (err *Error) Callers() []uintptr {
|
||||||
|
return err.stack
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorStack returns a string that contains both the
|
||||||
|
// error message and the callstack.
|
||||||
|
func (err *Error) ErrorStack() string {
|
||||||
|
return err.TypeName() + " " + err.Error() + "\n" + string(err.Stack())
|
||||||
|
}
|
||||||
|
|
||||||
|
// StackFrames returns an array of frames containing information about the
|
||||||
|
// stack.
|
||||||
|
func (err *Error) StackFrames() []StackFrame {
|
||||||
|
if err.frames == nil {
|
||||||
|
err.frames = make([]StackFrame, len(err.stack))
|
||||||
|
|
||||||
|
for i, pc := range err.stack {
|
||||||
|
err.frames[i] = NewStackFrame(pc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return err.frames
|
||||||
|
}
|
||||||
|
|
||||||
|
// TypeName returns the type this error. e.g. *errors.stringError.
|
||||||
|
func (err *Error) TypeName() string {
|
||||||
|
if _, ok := err.Err.(uncaughtPanic); ok {
|
||||||
|
return "panic"
|
||||||
|
}
|
||||||
|
return reflect.TypeOf(err.Err).String()
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,127 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type uncaughtPanic struct{ message string }
|
||||||
|
|
||||||
|
func (p uncaughtPanic) Error() string {
|
||||||
|
return p.message
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsePanic allows you to get an error object from the output of a go program
|
||||||
|
// that panicked. This is particularly useful with https://github.com/mitchellh/panicwrap.
|
||||||
|
func ParsePanic(text string) (*Error, error) {
|
||||||
|
lines := strings.Split(text, "\n")
|
||||||
|
|
||||||
|
state := "start"
|
||||||
|
|
||||||
|
var message string
|
||||||
|
var stack []StackFrame
|
||||||
|
|
||||||
|
for i := 0; i < len(lines); i++ {
|
||||||
|
line := lines[i]
|
||||||
|
|
||||||
|
if state == "start" {
|
||||||
|
if strings.HasPrefix(line, "panic: ") {
|
||||||
|
message = strings.TrimPrefix(line, "panic: ")
|
||||||
|
state = "seek"
|
||||||
|
} else {
|
||||||
|
return nil, Errorf("bugsnag.panicParser: Invalid line (no prefix): %s", line)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if state == "seek" {
|
||||||
|
if strings.HasPrefix(line, "goroutine ") && strings.HasSuffix(line, "[running]:") {
|
||||||
|
state = "parsing"
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if state == "parsing" {
|
||||||
|
if line == "" {
|
||||||
|
state = "done"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
createdBy := false
|
||||||
|
if strings.HasPrefix(line, "created by ") {
|
||||||
|
line = strings.TrimPrefix(line, "created by ")
|
||||||
|
createdBy = true
|
||||||
|
}
|
||||||
|
|
||||||
|
i++
|
||||||
|
|
||||||
|
if i >= len(lines) {
|
||||||
|
return nil, Errorf("bugsnag.panicParser: Invalid line (unpaired): %s", line)
|
||||||
|
}
|
||||||
|
|
||||||
|
frame, err := parsePanicFrame(line, lines[i], createdBy)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
stack = append(stack, *frame)
|
||||||
|
if createdBy {
|
||||||
|
state = "done"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if state == "done" || state == "parsing" {
|
||||||
|
return &Error{Err: uncaughtPanic{message}, frames: stack}, nil
|
||||||
|
}
|
||||||
|
return nil, Errorf("could not parse panic: %v", text)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The lines we're passing look like this:
|
||||||
|
//
|
||||||
|
// main.(*foo).destruct(0xc208067e98)
|
||||||
|
// /0/go/src/github.com/bugsnag/bugsnag-go/pan/main.go:22 +0x151
|
||||||
|
func parsePanicFrame(name string, line string, createdBy bool) (*StackFrame, error) {
|
||||||
|
idx := strings.LastIndex(name, "(")
|
||||||
|
if idx == -1 && !createdBy {
|
||||||
|
return nil, Errorf("bugsnag.panicParser: Invalid line (no call): %s", name)
|
||||||
|
}
|
||||||
|
if idx != -1 {
|
||||||
|
name = name[:idx]
|
||||||
|
}
|
||||||
|
pkg := ""
|
||||||
|
|
||||||
|
if lastslash := strings.LastIndex(name, "/"); lastslash >= 0 {
|
||||||
|
pkg += name[:lastslash] + "/"
|
||||||
|
name = name[lastslash+1:]
|
||||||
|
}
|
||||||
|
if period := strings.Index(name, "."); period >= 0 {
|
||||||
|
pkg += name[:period]
|
||||||
|
name = name[period+1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
name = strings.Replace(name, "·", ".", -1)
|
||||||
|
|
||||||
|
if !strings.HasPrefix(line, "\t") {
|
||||||
|
return nil, Errorf("bugsnag.panicParser: Invalid line (no tab): %s", line)
|
||||||
|
}
|
||||||
|
|
||||||
|
idx = strings.LastIndex(line, ":")
|
||||||
|
if idx == -1 {
|
||||||
|
return nil, Errorf("bugsnag.panicParser: Invalid line (no line number): %s", line)
|
||||||
|
}
|
||||||
|
file := line[1:idx]
|
||||||
|
|
||||||
|
number := line[idx+1:]
|
||||||
|
if idx = strings.Index(number, " +"); idx > -1 {
|
||||||
|
number = number[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
lno, err := strconv.ParseInt(number, 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return nil, Errorf("bugsnag.panicParser: Invalid line (bad line number): %s", line)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &StackFrame{
|
||||||
|
File: file,
|
||||||
|
LineNumber: int(lno),
|
||||||
|
Package: pkg,
|
||||||
|
Name: name,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,102 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// A StackFrame contains all necessary information about to generate a line
|
||||||
|
// in a callstack.
|
||||||
|
type StackFrame struct {
|
||||||
|
// The path to the file containing this ProgramCounter
|
||||||
|
File string
|
||||||
|
// The LineNumber in that file
|
||||||
|
LineNumber int
|
||||||
|
// The Name of the function that contains this ProgramCounter
|
||||||
|
Name string
|
||||||
|
// The Package that contains this function
|
||||||
|
Package string
|
||||||
|
// The underlying ProgramCounter
|
||||||
|
ProgramCounter uintptr
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStackFrame popoulates a stack frame object from the program counter.
|
||||||
|
func NewStackFrame(pc uintptr) (frame StackFrame) {
|
||||||
|
|
||||||
|
frame = StackFrame{ProgramCounter: pc}
|
||||||
|
if frame.Func() == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
frame.Package, frame.Name = packageAndName(frame.Func())
|
||||||
|
|
||||||
|
// pc -1 because the program counters we use are usually return addresses,
|
||||||
|
// and we want to show the line that corresponds to the function call
|
||||||
|
frame.File, frame.LineNumber = frame.Func().FileLine(pc - 1)
|
||||||
|
return
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Func returns the function that contained this frame.
|
||||||
|
func (frame *StackFrame) Func() *runtime.Func {
|
||||||
|
if frame.ProgramCounter == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return runtime.FuncForPC(frame.ProgramCounter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the stackframe formatted in the same way as go does
|
||||||
|
// in runtime/debug.Stack()
|
||||||
|
func (frame *StackFrame) String() string {
|
||||||
|
str := fmt.Sprintf("%s:%d (0x%x)\n", frame.File, frame.LineNumber, frame.ProgramCounter)
|
||||||
|
|
||||||
|
source, err := frame.SourceLine()
|
||||||
|
if err != nil {
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
return str + fmt.Sprintf("\t%s: %s\n", frame.Name, source)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SourceLine gets the line of code (from File and Line) of the original source if possible.
|
||||||
|
func (frame *StackFrame) SourceLine() (string, error) {
|
||||||
|
data, err := ioutil.ReadFile(frame.File)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", New(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := bytes.Split(data, []byte{'\n'})
|
||||||
|
if frame.LineNumber <= 0 || frame.LineNumber >= len(lines) {
|
||||||
|
return "???", nil
|
||||||
|
}
|
||||||
|
// -1 because line-numbers are 1 based, but our array is 0 based
|
||||||
|
return string(bytes.Trim(lines[frame.LineNumber-1], " \t")), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func packageAndName(fn *runtime.Func) (string, string) {
|
||||||
|
name := fn.Name()
|
||||||
|
pkg := ""
|
||||||
|
|
||||||
|
// The name includes the path name to the package, which is unnecessary
|
||||||
|
// since the file name is already included. Plus, it has center dots.
|
||||||
|
// That is, we see
|
||||||
|
// runtime/debug.*T·ptrmethod
|
||||||
|
// and want
|
||||||
|
// *T.ptrmethod
|
||||||
|
// Since the package path might contains dots (e.g. code.google.com/...),
|
||||||
|
// we first remove the path prefix if there is one.
|
||||||
|
if lastslash := strings.LastIndex(name, "/"); lastslash >= 0 {
|
||||||
|
pkg += name[:lastslash] + "/"
|
||||||
|
name = name[lastslash+1:]
|
||||||
|
}
|
||||||
|
if period := strings.Index(name, "."); period >= 0 {
|
||||||
|
pkg += name[:period]
|
||||||
|
name = name[period+1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
name = strings.Replace(name, "·", ".", -1)
|
||||||
|
return pkg, name
|
||||||
|
}
|
||||||
|
|
@ -21,3 +21,19 @@ linters:
|
||||||
- lll
|
- lll
|
||||||
- gochecknoinits
|
- gochecknoinits
|
||||||
- gochecknoglobals
|
- gochecknoglobals
|
||||||
|
- funlen
|
||||||
|
- godox
|
||||||
|
- gocognit
|
||||||
|
- whitespace
|
||||||
|
- wsl
|
||||||
|
- wrapcheck
|
||||||
|
- testpackage
|
||||||
|
- nlreturn
|
||||||
|
- gomnd
|
||||||
|
- exhaustivestruct
|
||||||
|
- goerr113
|
||||||
|
- errorlint
|
||||||
|
- nestif
|
||||||
|
- godot
|
||||||
|
- gofumpt
|
||||||
|
- paralleltest
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,28 @@
|
||||||
after_success:
|
after_success:
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
- bash <(curl -s https://codecov.io/bash)
|
||||||
go:
|
go:
|
||||||
- 1.11.x
|
- 1.14.x
|
||||||
- 1.12.x
|
- 1.x
|
||||||
|
arch:
|
||||||
|
- amd64
|
||||||
|
jobs:
|
||||||
|
include:
|
||||||
|
# only run fast tests on ppc64le
|
||||||
|
- go: 1.x
|
||||||
|
arch: ppc64le
|
||||||
|
script:
|
||||||
|
- gotestsum -f short-verbose -- ./...
|
||||||
|
|
||||||
|
# include linting job, but only for latest go version and amd64 arch
|
||||||
|
- go: 1.x
|
||||||
|
arch: amd64
|
||||||
|
install:
|
||||||
|
go get github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||||
|
script:
|
||||||
|
- golangci-lint run --new-from-rev master
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- GO111MODULE=off go get -u gotest.tools/gotestsum
|
- GO111MODULE=off go get -u gotest.tools/gotestsum
|
||||||
env:
|
|
||||||
- GO111MODULE=on
|
|
||||||
language: go
|
language: go
|
||||||
notifications:
|
notifications:
|
||||||
slack:
|
slack:
|
||||||
|
|
|
||||||
|
|
@ -2,9 +2,29 @@
|
||||||
|
|
||||||
[](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE)
|
[](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE)
|
||||||
[](http://godoc.org/github.com/go-openapi/spec)
|
[](http://godoc.org/github.com/go-openapi/spec)
|
||||||
[](https://golangci.com)
|
|
||||||
[](https://goreportcard.com/report/github.com/go-openapi/spec)
|
[](https://goreportcard.com/report/github.com/go-openapi/spec)
|
||||||
|
|
||||||
The object model for OpenAPI specification documents.
|
The object model for OpenAPI specification documents.
|
||||||
|
|
||||||
Currently supports Swagger 2.0.
|
### FAQ
|
||||||
|
|
||||||
|
* What does this do?
|
||||||
|
|
||||||
|
> 1. This package knows how to marshal and unmarshal Swagger API specifications into a golang object model
|
||||||
|
> 2. It knows how to resolve $ref and expand them to make a single root documment
|
||||||
|
|
||||||
|
* How does it play with the rest of the go-openapi packages ?
|
||||||
|
|
||||||
|
> 1. This package is at the core of the go-openapi suite of packages and [code generator](https://github.com/go-swagger/go-swagger)
|
||||||
|
> 2. There is a [spec loading package](https://github.com/go-openapi/loads) to fetch specs as JSON or YAML from local or remote locations
|
||||||
|
> 3. There is a [spec validation package](https://github.com/go-openapi/validate) built on top of it
|
||||||
|
> 4. There is a [spec analysis package](https://github.com/go-openapi/analysis) built on top of it, to analyze, flatten, fix and merge spec documents
|
||||||
|
|
||||||
|
* Does this library support OpenAPI 3?
|
||||||
|
|
||||||
|
> No.
|
||||||
|
> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
|
||||||
|
> There is no plan to make it evolve toward supporting OpenAPI 3.x.
|
||||||
|
> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
|
||||||
|
>
|
||||||
|
> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
version: "0.1.{build}"
|
||||||
|
|
||||||
|
clone_folder: C:\go-openapi\spec
|
||||||
|
shallow_clone: true # for startup speed
|
||||||
|
pull_requests:
|
||||||
|
do_not_increment_build_number: true
|
||||||
|
|
||||||
|
#skip_tags: true
|
||||||
|
#skip_branch_with_pr: true
|
||||||
|
|
||||||
|
# appveyor.yml
|
||||||
|
build: off
|
||||||
|
|
||||||
|
environment:
|
||||||
|
GOPATH: c:\gopath
|
||||||
|
|
||||||
|
stack: go 1.12
|
||||||
|
|
||||||
|
test_script:
|
||||||
|
- echo "test disabled for now"
|
||||||
|
#- go test -v -timeout 20m ./...
|
||||||
|
#artifacts:
|
||||||
|
# - path: '%GOPATH%\bin\*.exe'
|
||||||
|
deploy: off
|
||||||
|
|
||||||
|
notifications:
|
||||||
|
- provider: Slack
|
||||||
|
incoming_webhook: https://hooks.slack.com/services/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
|
||||||
|
auth_token:
|
||||||
|
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
|
||||||
|
channel: bots
|
||||||
|
on_build_success: false
|
||||||
|
on_build_failure: true
|
||||||
|
on_build_status_changed: true
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -14,7 +14,9 @@
|
||||||
|
|
||||||
package spec
|
package spec
|
||||||
|
|
||||||
import "sync"
|
import (
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
// ResolutionCache a cache for resolving urls
|
// ResolutionCache a cache for resolving urls
|
||||||
type ResolutionCache interface {
|
type ResolutionCache interface {
|
||||||
|
|
@ -27,12 +29,23 @@ type simpleCache struct {
|
||||||
store map[string]interface{}
|
store map[string]interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *simpleCache) ShallowClone() ResolutionCache {
|
||||||
|
store := make(map[string]interface{}, len(s.store))
|
||||||
|
s.lock.RLock()
|
||||||
|
for k, v := range s.store {
|
||||||
|
store[k] = v
|
||||||
|
}
|
||||||
|
s.lock.RUnlock()
|
||||||
|
|
||||||
|
return &simpleCache{
|
||||||
|
store: store,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Get retrieves a cached URI
|
// Get retrieves a cached URI
|
||||||
func (s *simpleCache) Get(uri string) (interface{}, bool) {
|
func (s *simpleCache) Get(uri string) (interface{}, bool) {
|
||||||
debugLog("getting %q from resolution cache", uri)
|
|
||||||
s.lock.RLock()
|
s.lock.RLock()
|
||||||
v, ok := s.store[uri]
|
v, ok := s.store[uri]
|
||||||
debugLog("got %q from resolution cache: %t", uri, ok)
|
|
||||||
|
|
||||||
s.lock.RUnlock()
|
s.lock.RUnlock()
|
||||||
return v, ok
|
return v, ok
|
||||||
|
|
@ -45,16 +58,41 @@ func (s *simpleCache) Set(uri string, data interface{}) {
|
||||||
s.lock.Unlock()
|
s.lock.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
var resCache ResolutionCache
|
var (
|
||||||
|
// resCache is a package level cache for $ref resolution and expansion.
|
||||||
|
// It is initialized lazily by methods that have the need for it: no
|
||||||
|
// memory is allocated unless some expander methods are called.
|
||||||
|
//
|
||||||
|
// It is initialized with JSON schema and swagger schema,
|
||||||
|
// which do not mutate during normal operations.
|
||||||
|
//
|
||||||
|
// All subsequent utilizations of this cache are produced from a shallow
|
||||||
|
// clone of this initial version.
|
||||||
|
resCache *simpleCache
|
||||||
|
onceCache sync.Once
|
||||||
|
|
||||||
func init() {
|
_ ResolutionCache = &simpleCache{}
|
||||||
resCache = initResolutionCache()
|
)
|
||||||
|
|
||||||
|
// initResolutionCache initializes the URI resolution cache. To be wrapped in a sync.Once.Do call.
|
||||||
|
func initResolutionCache() {
|
||||||
|
resCache = defaultResolutionCache()
|
||||||
}
|
}
|
||||||
|
|
||||||
// initResolutionCache initializes the URI resolution cache
|
func defaultResolutionCache() *simpleCache {
|
||||||
func initResolutionCache() ResolutionCache {
|
|
||||||
return &simpleCache{store: map[string]interface{}{
|
return &simpleCache{store: map[string]interface{}{
|
||||||
"http://swagger.io/v2/schema.json": MustLoadSwagger20Schema(),
|
"http://swagger.io/v2/schema.json": MustLoadSwagger20Schema(),
|
||||||
"http://json-schema.org/draft-04/schema": MustLoadJSONSchemaDraft04(),
|
"http://json-schema.org/draft-04/schema": MustLoadJSONSchemaDraft04(),
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func cacheOrDefault(cache ResolutionCache) ResolutionCache {
|
||||||
|
onceCache.Do(initResolutionCache)
|
||||||
|
|
||||||
|
if cache != nil {
|
||||||
|
return cache
|
||||||
|
}
|
||||||
|
|
||||||
|
// get a shallow clone of the base cache with swagger and json schema
|
||||||
|
return resCache.ShallowClone()
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,11 +14,44 @@
|
||||||
|
|
||||||
package spec
|
package spec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/go-openapi/swag"
|
||||||
|
)
|
||||||
|
|
||||||
// ContactInfo contact information for the exposed API.
|
// ContactInfo contact information for the exposed API.
|
||||||
//
|
//
|
||||||
// For more information: http://goo.gl/8us55a#contactObject
|
// For more information: http://goo.gl/8us55a#contactObject
|
||||||
type ContactInfo struct {
|
type ContactInfo struct {
|
||||||
|
ContactInfoProps
|
||||||
|
VendorExtensible
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContactInfoProps hold the properties of a ContactInfo object
|
||||||
|
type ContactInfoProps struct {
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
URL string `json:"url,omitempty"`
|
URL string `json:"url,omitempty"`
|
||||||
Email string `json:"email,omitempty"`
|
Email string `json:"email,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UnmarshalJSON hydrates ContactInfo from json
|
||||||
|
func (c *ContactInfo) UnmarshalJSON(data []byte) error {
|
||||||
|
if err := json.Unmarshal(data, &c.ContactInfoProps); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return json.Unmarshal(data, &c.VendorExtensible)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalJSON produces ContactInfo as json
|
||||||
|
func (c ContactInfo) MarshalJSON() ([]byte, error) {
|
||||||
|
b1, err := json.Marshal(c.ContactInfoProps)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
b2, err := json.Marshal(c.VendorExtensible)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return swag.ConcatJSON(b1, b2), nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
package spec
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
var (
|
||||||
|
// ErrUnknownTypeForReference indicates that a resolved reference was found in an unsupported container type
|
||||||
|
ErrUnknownTypeForReference = errors.New("unknown type for the resolved reference")
|
||||||
|
|
||||||
|
// ErrResolveRefNeedsAPointer indicates that a $ref target must be a valid JSON pointer
|
||||||
|
ErrResolveRefNeedsAPointer = errors.New("resolve ref: target needs to be a pointer")
|
||||||
|
|
||||||
|
// ErrDerefUnsupportedType indicates that a resolved reference was found in an unsupported container type.
|
||||||
|
// At the moment, $ref are supported only inside: schemas, parameters, responses, path items
|
||||||
|
ErrDerefUnsupportedType = errors.New("deref: unsupported type")
|
||||||
|
|
||||||
|
// ErrExpandUnsupportedType indicates that $ref expansion is attempted on some invalid type
|
||||||
|
ErrExpandUnsupportedType = errors.New("expand: unsupported type. Input should be of type *Parameter or *Response")
|
||||||
|
)
|
||||||
|
|
@ -17,140 +17,21 @@ package spec
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ExpandOptions provides options for spec expand
|
// ExpandOptions provides options for the spec expander.
|
||||||
type ExpandOptions struct {
|
type ExpandOptions struct {
|
||||||
RelativeBase string
|
RelativeBase string
|
||||||
SkipSchemas bool
|
SkipSchemas bool
|
||||||
ContinueOnError bool
|
ContinueOnError bool
|
||||||
|
PathLoader func(string) (json.RawMessage, error) `json:"-"`
|
||||||
|
|
||||||
AbsoluteCircularRef bool
|
AbsoluteCircularRef bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// ResolveRefWithBase resolves a reference against a context root with preservation of base path
|
|
||||||
func ResolveRefWithBase(root interface{}, ref *Ref, opts *ExpandOptions) (*Schema, error) {
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
specBasePath := ""
|
|
||||||
if opts != nil && opts.RelativeBase != "" {
|
|
||||||
specBasePath, _ = absPath(opts.RelativeBase)
|
|
||||||
}
|
|
||||||
|
|
||||||
result := new(Schema)
|
|
||||||
if err := resolver.Resolve(ref, result, specBasePath); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveRef resolves a reference against a context root
|
|
||||||
// ref is guaranteed to be in root (no need to go to external files)
|
|
||||||
// ResolveRef is ONLY called from the code generation module
|
|
||||||
func ResolveRef(root interface{}, ref *Ref) (*Schema, error) {
|
|
||||||
res, _, err := ref.GetPointer().Get(root)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
switch sch := res.(type) {
|
|
||||||
case Schema:
|
|
||||||
return &sch, nil
|
|
||||||
case *Schema:
|
|
||||||
return sch, nil
|
|
||||||
case map[string]interface{}:
|
|
||||||
b, _ := json.Marshal(sch)
|
|
||||||
newSch := new(Schema)
|
|
||||||
_ = json.Unmarshal(b, newSch)
|
|
||||||
return newSch, nil
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unknown type for the resolved reference")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveParameter resolves a parameter reference against a context root
|
|
||||||
func ResolveParameter(root interface{}, ref Ref) (*Parameter, error) {
|
|
||||||
return ResolveParameterWithBase(root, ref, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveParameterWithBase resolves a parameter reference against a context root and base path
|
|
||||||
func ResolveParameterWithBase(root interface{}, ref Ref, opts *ExpandOptions) (*Parameter, error) {
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result := new(Parameter)
|
|
||||||
if err := resolver.Resolve(&ref, result, ""); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveResponse resolves response a reference against a context root
|
|
||||||
func ResolveResponse(root interface{}, ref Ref) (*Response, error) {
|
|
||||||
return ResolveResponseWithBase(root, ref, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveResponseWithBase resolves response a reference against a context root and base path
|
|
||||||
func ResolveResponseWithBase(root interface{}, ref Ref, opts *ExpandOptions) (*Response, error) {
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result := new(Response)
|
|
||||||
if err := resolver.Resolve(&ref, result, ""); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveItems resolves parameter items reference against a context root and base path.
|
|
||||||
//
|
|
||||||
// NOTE: stricly speaking, this construct is not supported by Swagger 2.0.
|
|
||||||
// Similarly, $ref are forbidden in response headers.
|
|
||||||
func ResolveItems(root interface{}, ref Ref, opts *ExpandOptions) (*Items, error) {
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
basePath := ""
|
|
||||||
if opts.RelativeBase != "" {
|
|
||||||
basePath = opts.RelativeBase
|
|
||||||
}
|
|
||||||
result := new(Items)
|
|
||||||
if err := resolver.Resolve(&ref, result, basePath); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolvePathItem resolves response a path item against a context root and base path
|
|
||||||
func ResolvePathItem(root interface{}, ref Ref, opts *ExpandOptions) (*PathItem, error) {
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
basePath := ""
|
|
||||||
if opts.RelativeBase != "" {
|
|
||||||
basePath = opts.RelativeBase
|
|
||||||
}
|
|
||||||
result := new(PathItem)
|
|
||||||
if err := resolver.Resolve(&ref, result, basePath); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExpandSpec expands the references in a swagger spec
|
// ExpandSpec expands the references in a swagger spec
|
||||||
func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
|
func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
|
||||||
resolver, err := defaultSchemaLoader(spec, options, nil, nil)
|
resolver := defaultSchemaLoader(spec, options, nil, nil)
|
||||||
// Just in case this ever returns an error.
|
|
||||||
if resolver.shouldStopOnError(err) {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// getting the base path of the spec to adjust all subsequent reference resolutions
|
// getting the base path of the spec to adjust all subsequent reference resolutions
|
||||||
specBasePath := ""
|
specBasePath := ""
|
||||||
|
|
@ -160,9 +41,10 @@ func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
|
||||||
|
|
||||||
if options == nil || !options.SkipSchemas {
|
if options == nil || !options.SkipSchemas {
|
||||||
for key, definition := range spec.Definitions {
|
for key, definition := range spec.Definitions {
|
||||||
var def *Schema
|
parentRefs := make([]string, 0, 10)
|
||||||
var err error
|
parentRefs = append(parentRefs, fmt.Sprintf("#/definitions/%s", key))
|
||||||
if def, err = expandSchema(definition, []string{fmt.Sprintf("#/definitions/%s", key)}, resolver, specBasePath); resolver.shouldStopOnError(err) {
|
def, err := expandSchema(definition, parentRefs, resolver, specBasePath)
|
||||||
|
if resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if def != nil {
|
if def != nil {
|
||||||
|
|
@ -189,157 +71,140 @@ func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
|
||||||
|
|
||||||
if spec.Paths != nil {
|
if spec.Paths != nil {
|
||||||
for key := range spec.Paths.Paths {
|
for key := range spec.Paths.Paths {
|
||||||
path := spec.Paths.Paths[key]
|
pth := spec.Paths.Paths[key]
|
||||||
if err := expandPathItem(&path, resolver, specBasePath); resolver.shouldStopOnError(err) {
|
if err := expandPathItem(&pth, resolver, specBasePath); resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
spec.Paths.Paths[key] = path
|
spec.Paths.Paths[key] = pth
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// baseForRoot loads in the cache the root document and produces a fake "root" base path entry
|
const rootBase = ".root"
|
||||||
|
|
||||||
|
// baseForRoot loads in the cache the root document and produces a fake ".root" base path entry
|
||||||
// for further $ref resolution
|
// for further $ref resolution
|
||||||
|
//
|
||||||
|
// Setting the cache is optional and this parameter may safely be left to nil.
|
||||||
func baseForRoot(root interface{}, cache ResolutionCache) string {
|
func baseForRoot(root interface{}, cache ResolutionCache) string {
|
||||||
// cache the root document to resolve $ref's
|
if root == nil {
|
||||||
const rootBase = "root"
|
return ""
|
||||||
if root != nil {
|
|
||||||
base, _ := absPath(rootBase)
|
|
||||||
normalizedBase := normalizeAbsPath(base)
|
|
||||||
debugLog("setting root doc in cache at: %s", normalizedBase)
|
|
||||||
if cache == nil {
|
|
||||||
cache = resCache
|
|
||||||
}
|
|
||||||
cache.Set(normalizedBase, root)
|
|
||||||
return rootBase
|
|
||||||
}
|
}
|
||||||
return ""
|
|
||||||
|
// cache the root document to resolve $ref's
|
||||||
|
base, _ := absPath(rootBase)
|
||||||
|
normalizedBase := normalizeAbsPath(base)
|
||||||
|
cache.Set(normalizedBase, root)
|
||||||
|
|
||||||
|
return normalizedBase
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpandSchema expands the refs in the schema object with reference to the root object
|
// ExpandSchema expands the refs in the schema object with reference to the root object.
|
||||||
// go-openapi/validate uses this function
|
//
|
||||||
// notice that it is impossible to reference a json schema in a different file other than root
|
// go-openapi/validate uses this function.
|
||||||
|
//
|
||||||
|
// Notice that it is impossible to reference a json schema in a different document other than root
|
||||||
|
// (use ExpandSchemaWithBasePath to resolve external references).
|
||||||
|
//
|
||||||
|
// Setting the cache is optional and this parameter may safely be left to nil.
|
||||||
func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error {
|
func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error {
|
||||||
|
cache = cacheOrDefault(cache)
|
||||||
|
if root == nil {
|
||||||
|
root = schema
|
||||||
|
}
|
||||||
|
|
||||||
opts := &ExpandOptions{
|
opts := &ExpandOptions{
|
||||||
// when a root is specified, cache the root as an in-memory document for $ref retrieval
|
// when a root is specified, cache the root as an in-memory document for $ref retrieval
|
||||||
RelativeBase: baseForRoot(root, cache),
|
RelativeBase: baseForRoot(root, cache),
|
||||||
SkipSchemas: false,
|
SkipSchemas: false,
|
||||||
ContinueOnError: false,
|
ContinueOnError: false,
|
||||||
// when no base path is specified, remaining $ref (circular) are rendered with an absolute path
|
|
||||||
AbsoluteCircularRef: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ExpandSchemaWithBasePath(schema, cache, opts)
|
return ExpandSchemaWithBasePath(schema, cache, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpandSchemaWithBasePath expands the refs in the schema object, base path configured through expand options
|
// ExpandSchemaWithBasePath expands the refs in the schema object, base path configured through expand options.
|
||||||
|
//
|
||||||
|
// Setting the cache is optional and this parameter may safely be left to nil.
|
||||||
func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *ExpandOptions) error {
|
func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *ExpandOptions) error {
|
||||||
if schema == nil {
|
if schema == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cache = cacheOrDefault(cache)
|
||||||
|
|
||||||
var basePath string
|
var basePath string
|
||||||
if opts.RelativeBase != "" {
|
if opts.RelativeBase != "" {
|
||||||
basePath, _ = absPath(opts.RelativeBase)
|
basePath, _ = absPath(opts.RelativeBase)
|
||||||
}
|
}
|
||||||
|
|
||||||
resolver, err := defaultSchemaLoader(nil, opts, cache, nil)
|
resolver := defaultSchemaLoader(nil, opts, cache, nil)
|
||||||
|
|
||||||
|
parentRefs := make([]string, 0, 10)
|
||||||
|
s, err := expandSchema(*schema, parentRefs, resolver, basePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if s != nil {
|
||||||
refs := []string{""}
|
// guard for when continuing on error
|
||||||
var s *Schema
|
*schema = *s
|
||||||
if s, err = expandSchema(*schema, refs, resolver, basePath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
*schema = *s
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func expandItems(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
|
func expandItems(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
|
||||||
if target.Items != nil {
|
if target.Items == nil {
|
||||||
if target.Items.Schema != nil {
|
return &target, nil
|
||||||
t, err := expandSchema(*target.Items.Schema, parentRefs, resolver, basePath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
*target.Items.Schema = *t
|
|
||||||
}
|
|
||||||
for i := range target.Items.Schemas {
|
|
||||||
t, err := expandSchema(target.Items.Schemas[i], parentRefs, resolver, basePath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
target.Items.Schemas[i] = *t
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// array
|
||||||
|
if target.Items.Schema != nil {
|
||||||
|
t, err := expandSchema(*target.Items.Schema, parentRefs, resolver, basePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
*target.Items.Schema = *t
|
||||||
|
}
|
||||||
|
|
||||||
|
// tuple
|
||||||
|
for i := range target.Items.Schemas {
|
||||||
|
t, err := expandSchema(target.Items.Schemas[i], parentRefs, resolver, basePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
target.Items.Schemas[i] = *t
|
||||||
|
}
|
||||||
|
|
||||||
return &target, nil
|
return &target, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
|
func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
|
||||||
if target.Ref.String() == "" && target.Ref.IsRoot() {
|
if target.Ref.String() == "" && target.Ref.IsRoot() {
|
||||||
// normalizing is important
|
|
||||||
newRef := normalizeFileRef(&target.Ref, basePath)
|
newRef := normalizeFileRef(&target.Ref, basePath)
|
||||||
target.Ref = *newRef
|
target.Ref = *newRef
|
||||||
return &target, nil
|
return &target, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// change the base path of resolution when an ID is encountered
|
// change the base path of resolution when an ID is encountered
|
||||||
// otherwise the basePath should inherit the parent's
|
// otherwise the basePath should inherit the parent's
|
||||||
// important: ID can be relative path
|
|
||||||
if target.ID != "" {
|
if target.ID != "" {
|
||||||
debugLog("schema has ID: %s", target.ID)
|
basePath, _ = resolver.setSchemaID(target, target.ID, basePath)
|
||||||
// handling the case when id is a folder
|
|
||||||
// remember that basePath has to be a file
|
|
||||||
refPath := target.ID
|
|
||||||
if strings.HasSuffix(target.ID, "/") {
|
|
||||||
// path.Clean here would not work correctly if basepath is http
|
|
||||||
refPath = fmt.Sprintf("%s%s", refPath, "placeholder.json")
|
|
||||||
}
|
|
||||||
basePath = normalizePaths(refPath, basePath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var t *Schema
|
|
||||||
// if Ref is found, everything else doesn't matter
|
|
||||||
// Ref also changes the resolution scope of children expandSchema
|
|
||||||
if target.Ref.String() != "" {
|
if target.Ref.String() != "" {
|
||||||
// here the resolution scope is changed because a $ref was encountered
|
return expandSchemaRef(target, parentRefs, resolver, basePath)
|
||||||
normalizedRef := normalizeFileRef(&target.Ref, basePath)
|
}
|
||||||
normalizedBasePath := normalizedRef.RemoteURI()
|
|
||||||
|
|
||||||
if resolver.isCircular(normalizedRef, basePath, parentRefs...) {
|
for k := range target.Definitions {
|
||||||
// this means there is a cycle in the recursion tree: return the Ref
|
tt, err := expandSchema(target.Definitions[k], parentRefs, resolver, basePath)
|
||||||
// - circular refs cannot be expanded. We leave them as ref.
|
if resolver.shouldStopOnError(err) {
|
||||||
// - denormalization means that a new local file ref is set relative to the original basePath
|
return &target, err
|
||||||
debugLog("shortcut circular ref: basePath: %s, normalizedPath: %s, normalized ref: %s",
|
|
||||||
basePath, normalizedBasePath, normalizedRef.String())
|
|
||||||
if !resolver.options.AbsoluteCircularRef {
|
|
||||||
target.Ref = *denormalizeFileRef(normalizedRef, normalizedBasePath, resolver.context.basePath)
|
|
||||||
} else {
|
|
||||||
target.Ref = *normalizedRef
|
|
||||||
}
|
|
||||||
return &target, nil
|
|
||||||
}
|
}
|
||||||
|
if tt != nil {
|
||||||
debugLog("basePath: %s: calling Resolve with target: %#v", basePath, target)
|
target.Definitions[k] = *tt
|
||||||
if err := resolver.Resolve(&target.Ref, &t, basePath); resolver.shouldStopOnError(err) {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if t != nil {
|
|
||||||
parentRefs = append(parentRefs, normalizedRef.String())
|
|
||||||
var err error
|
|
||||||
transitiveResolver, err := resolver.transitiveResolver(basePath, target.Ref)
|
|
||||||
if transitiveResolver.shouldStopOnError(err) {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
basePath = resolver.updateBasePath(transitiveResolver, normalizedBasePath)
|
|
||||||
|
|
||||||
return expandSchema(*t, parentRefs, transitiveResolver, basePath)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -356,15 +221,21 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
return &target, err
|
return &target, err
|
||||||
}
|
}
|
||||||
target.AllOf[i] = *t
|
if t != nil {
|
||||||
|
target.AllOf[i] = *t
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range target.AnyOf {
|
for i := range target.AnyOf {
|
||||||
t, err := expandSchema(target.AnyOf[i], parentRefs, resolver, basePath)
|
t, err := expandSchema(target.AnyOf[i], parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
return &target, err
|
return &target, err
|
||||||
}
|
}
|
||||||
target.AnyOf[i] = *t
|
if t != nil {
|
||||||
|
target.AnyOf[i] = *t
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range target.OneOf {
|
for i := range target.OneOf {
|
||||||
t, err := expandSchema(target.OneOf[i], parentRefs, resolver, basePath)
|
t, err := expandSchema(target.OneOf[i], parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
|
|
@ -374,6 +245,7 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
target.OneOf[i] = *t
|
target.OneOf[i] = *t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.Not != nil {
|
if target.Not != nil {
|
||||||
t, err := expandSchema(*target.Not, parentRefs, resolver, basePath)
|
t, err := expandSchema(*target.Not, parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
|
|
@ -383,6 +255,7 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
*target.Not = *t
|
*target.Not = *t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for k := range target.Properties {
|
for k := range target.Properties {
|
||||||
t, err := expandSchema(target.Properties[k], parentRefs, resolver, basePath)
|
t, err := expandSchema(target.Properties[k], parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
|
|
@ -392,6 +265,7 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
target.Properties[k] = *t
|
target.Properties[k] = *t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.AdditionalProperties != nil && target.AdditionalProperties.Schema != nil {
|
if target.AdditionalProperties != nil && target.AdditionalProperties.Schema != nil {
|
||||||
t, err := expandSchema(*target.AdditionalProperties.Schema, parentRefs, resolver, basePath)
|
t, err := expandSchema(*target.AdditionalProperties.Schema, parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
|
|
@ -401,6 +275,7 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
*target.AdditionalProperties.Schema = *t
|
*target.AdditionalProperties.Schema = *t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for k := range target.PatternProperties {
|
for k := range target.PatternProperties {
|
||||||
t, err := expandSchema(target.PatternProperties[k], parentRefs, resolver, basePath)
|
t, err := expandSchema(target.PatternProperties[k], parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
|
|
@ -410,6 +285,7 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
target.PatternProperties[k] = *t
|
target.PatternProperties[k] = *t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for k := range target.Dependencies {
|
for k := range target.Dependencies {
|
||||||
if target.Dependencies[k].Schema != nil {
|
if target.Dependencies[k].Schema != nil {
|
||||||
t, err := expandSchema(*target.Dependencies[k].Schema, parentRefs, resolver, basePath)
|
t, err := expandSchema(*target.Dependencies[k].Schema, parentRefs, resolver, basePath)
|
||||||
|
|
@ -421,6 +297,7 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.AdditionalItems != nil && target.AdditionalItems.Schema != nil {
|
if target.AdditionalItems != nil && target.AdditionalItems.Schema != nil {
|
||||||
t, err := expandSchema(*target.AdditionalItems.Schema, parentRefs, resolver, basePath)
|
t, err := expandSchema(*target.AdditionalItems.Schema, parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
|
|
@ -430,41 +307,73 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
|
||||||
*target.AdditionalItems.Schema = *t
|
*target.AdditionalItems.Schema = *t
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for k := range target.Definitions {
|
|
||||||
t, err := expandSchema(target.Definitions[k], parentRefs, resolver, basePath)
|
|
||||||
if resolver.shouldStopOnError(err) {
|
|
||||||
return &target, err
|
|
||||||
}
|
|
||||||
if t != nil {
|
|
||||||
target.Definitions[k] = *t
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &target, nil
|
return &target, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func expandSchemaRef(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
|
||||||
|
// if a Ref is found, all sibling fields are skipped
|
||||||
|
// Ref also changes the resolution scope of children expandSchema
|
||||||
|
|
||||||
|
// here the resolution scope is changed because a $ref was encountered
|
||||||
|
normalizedRef := normalizeFileRef(&target.Ref, basePath)
|
||||||
|
normalizedBasePath := normalizedRef.RemoteURI()
|
||||||
|
|
||||||
|
if resolver.isCircular(normalizedRef, basePath, parentRefs...) {
|
||||||
|
// this means there is a cycle in the recursion tree: return the Ref
|
||||||
|
// - circular refs cannot be expanded. We leave them as ref.
|
||||||
|
// - denormalization means that a new local file ref is set relative to the original basePath
|
||||||
|
debugLog("short circuit circular ref: basePath: %s, normalizedPath: %s, normalized ref: %s",
|
||||||
|
basePath, normalizedBasePath, normalizedRef.String())
|
||||||
|
if !resolver.options.AbsoluteCircularRef {
|
||||||
|
target.Ref = *denormalizeFileRef(normalizedRef, normalizedBasePath, resolver.context.basePath)
|
||||||
|
} else {
|
||||||
|
target.Ref = *normalizedRef
|
||||||
|
}
|
||||||
|
return &target, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var t *Schema
|
||||||
|
err := resolver.Resolve(&target.Ref, &t, basePath)
|
||||||
|
if resolver.shouldStopOnError(err) {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if t == nil {
|
||||||
|
// guard for when continuing on error
|
||||||
|
return &target, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
parentRefs = append(parentRefs, normalizedRef.String())
|
||||||
|
transitiveResolver := resolver.transitiveResolver(basePath, target.Ref)
|
||||||
|
|
||||||
|
basePath = resolver.updateBasePath(transitiveResolver, normalizedBasePath)
|
||||||
|
|
||||||
|
return expandSchema(*t, parentRefs, transitiveResolver, basePath)
|
||||||
|
}
|
||||||
|
|
||||||
func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string) error {
|
func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string) error {
|
||||||
if pathItem == nil {
|
if pathItem == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
parentRefs := []string{}
|
parentRefs := make([]string, 0, 10)
|
||||||
if err := resolver.deref(pathItem, parentRefs, basePath); resolver.shouldStopOnError(err) {
|
if err := resolver.deref(pathItem, parentRefs, basePath); resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if pathItem.Ref.String() != "" {
|
|
||||||
var err error
|
|
||||||
resolver, err = resolver.transitiveResolver(basePath, pathItem.Ref)
|
|
||||||
if resolver.shouldStopOnError(err) {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pathItem.Ref = Ref{}
|
|
||||||
|
|
||||||
for idx := range pathItem.Parameters {
|
if pathItem.Ref.String() != "" {
|
||||||
if err := expandParameterOrResponse(&(pathItem.Parameters[idx]), resolver, basePath); resolver.shouldStopOnError(err) {
|
transitiveResolver := resolver.transitiveResolver(basePath, pathItem.Ref)
|
||||||
|
basePath = transitiveResolver.updateBasePath(resolver, basePath)
|
||||||
|
resolver = transitiveResolver
|
||||||
|
}
|
||||||
|
|
||||||
|
pathItem.Ref = Ref{}
|
||||||
|
for i := range pathItem.Parameters {
|
||||||
|
if err := expandParameterOrResponse(&(pathItem.Parameters[i]), resolver, basePath); resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ops := []*Operation{
|
ops := []*Operation{
|
||||||
pathItem.Get,
|
pathItem.Get,
|
||||||
pathItem.Head,
|
pathItem.Head,
|
||||||
|
|
@ -479,6 +388,7 @@ func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -495,42 +405,47 @@ func expandOperation(op *Operation, resolver *schemaLoader, basePath string) err
|
||||||
op.Parameters[i] = param
|
op.Parameters[i] = param
|
||||||
}
|
}
|
||||||
|
|
||||||
if op.Responses != nil {
|
if op.Responses == nil {
|
||||||
responses := op.Responses
|
return nil
|
||||||
if err := expandParameterOrResponse(responses.Default, resolver, basePath); resolver.shouldStopOnError(err) {
|
}
|
||||||
|
|
||||||
|
responses := op.Responses
|
||||||
|
if err := expandParameterOrResponse(responses.Default, resolver, basePath); resolver.shouldStopOnError(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for code := range responses.StatusCodeResponses {
|
||||||
|
response := responses.StatusCodeResponses[code]
|
||||||
|
if err := expandParameterOrResponse(&response, resolver, basePath); resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
for code := range responses.StatusCodeResponses {
|
responses.StatusCodeResponses[code] = response
|
||||||
response := responses.StatusCodeResponses[code]
|
|
||||||
if err := expandParameterOrResponse(&response, resolver, basePath); resolver.shouldStopOnError(err) {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
responses.StatusCodeResponses[code] = response
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpandResponseWithRoot expands a response based on a root document, not a fetchable document
|
// ExpandResponseWithRoot expands a response based on a root document, not a fetchable document
|
||||||
|
//
|
||||||
|
// Notice that it is impossible to reference a json schema in a different document other than root
|
||||||
|
// (use ExpandResponse to resolve external references).
|
||||||
|
//
|
||||||
|
// Setting the cache is optional and this parameter may safely be left to nil.
|
||||||
func ExpandResponseWithRoot(response *Response, root interface{}, cache ResolutionCache) error {
|
func ExpandResponseWithRoot(response *Response, root interface{}, cache ResolutionCache) error {
|
||||||
|
cache = cacheOrDefault(cache)
|
||||||
opts := &ExpandOptions{
|
opts := &ExpandOptions{
|
||||||
RelativeBase: baseForRoot(root, cache),
|
RelativeBase: baseForRoot(root, cache),
|
||||||
SkipSchemas: false,
|
SkipSchemas: false,
|
||||||
ContinueOnError: false,
|
ContinueOnError: false,
|
||||||
// when no base path is specified, remaining $ref (circular) are rendered with an absolute path
|
|
||||||
AbsoluteCircularRef: true,
|
|
||||||
}
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
resolver := defaultSchemaLoader(root, opts, cache, nil)
|
||||||
|
|
||||||
return expandParameterOrResponse(response, resolver, opts.RelativeBase)
|
return expandParameterOrResponse(response, resolver, opts.RelativeBase)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpandResponse expands a response based on a basepath
|
// ExpandResponse expands a response based on a basepath
|
||||||
// This is the exported version of expandResponse
|
//
|
||||||
// all refs inside response will be resolved relative to basePath
|
// All refs inside response will be resolved relative to basePath
|
||||||
func ExpandResponse(response *Response, basePath string) error {
|
func ExpandResponse(response *Response, basePath string) error {
|
||||||
var specBasePath string
|
var specBasePath string
|
||||||
if basePath != "" {
|
if basePath != "" {
|
||||||
|
|
@ -539,27 +454,23 @@ func ExpandResponse(response *Response, basePath string) error {
|
||||||
opts := &ExpandOptions{
|
opts := &ExpandOptions{
|
||||||
RelativeBase: specBasePath,
|
RelativeBase: specBasePath,
|
||||||
}
|
}
|
||||||
resolver, err := defaultSchemaLoader(nil, opts, nil, nil)
|
resolver := defaultSchemaLoader(nil, opts, nil, nil)
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return expandParameterOrResponse(response, resolver, opts.RelativeBase)
|
return expandParameterOrResponse(response, resolver, opts.RelativeBase)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpandParameterWithRoot expands a parameter based on a root document, not a fetchable document
|
// ExpandParameterWithRoot expands a parameter based on a root document, not a fetchable document.
|
||||||
|
//
|
||||||
|
// Notice that it is impossible to reference a json schema in a different document other than root
|
||||||
|
// (use ExpandParameter to resolve external references).
|
||||||
func ExpandParameterWithRoot(parameter *Parameter, root interface{}, cache ResolutionCache) error {
|
func ExpandParameterWithRoot(parameter *Parameter, root interface{}, cache ResolutionCache) error {
|
||||||
|
cache = cacheOrDefault(cache)
|
||||||
opts := &ExpandOptions{
|
opts := &ExpandOptions{
|
||||||
RelativeBase: baseForRoot(root, cache),
|
RelativeBase: baseForRoot(root, cache),
|
||||||
SkipSchemas: false,
|
SkipSchemas: false,
|
||||||
ContinueOnError: false,
|
ContinueOnError: false,
|
||||||
// when no base path is specified, remaining $ref (circular) are rendered with an absolute path
|
|
||||||
AbsoluteCircularRef: true,
|
|
||||||
}
|
|
||||||
resolver, err := defaultSchemaLoader(root, opts, nil, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
resolver := defaultSchemaLoader(root, opts, cache, nil)
|
||||||
|
|
||||||
return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
|
return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
|
||||||
}
|
}
|
||||||
|
|
@ -575,17 +486,17 @@ func ExpandParameter(parameter *Parameter, basePath string) error {
|
||||||
opts := &ExpandOptions{
|
opts := &ExpandOptions{
|
||||||
RelativeBase: specBasePath,
|
RelativeBase: specBasePath,
|
||||||
}
|
}
|
||||||
resolver, err := defaultSchemaLoader(nil, opts, nil, nil)
|
resolver := defaultSchemaLoader(nil, opts, nil, nil)
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
|
return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRefAndSchema(input interface{}) (*Ref, *Schema, error) {
|
func getRefAndSchema(input interface{}) (*Ref, *Schema, error) {
|
||||||
var ref *Ref
|
var (
|
||||||
var sch *Schema
|
ref *Ref
|
||||||
|
sch *Schema
|
||||||
|
)
|
||||||
|
|
||||||
switch refable := input.(type) {
|
switch refable := input.(type) {
|
||||||
case *Parameter:
|
case *Parameter:
|
||||||
if refable == nil {
|
if refable == nil {
|
||||||
|
|
@ -600,8 +511,9 @@ func getRefAndSchema(input interface{}) (*Ref, *Schema, error) {
|
||||||
ref = &refable.Ref
|
ref = &refable.Ref
|
||||||
sch = refable.Schema
|
sch = refable.Schema
|
||||||
default:
|
default:
|
||||||
return nil, nil, fmt.Errorf("expand: unsupported type %T. Input should be of type *Parameter or *Response", input)
|
return nil, nil, fmt.Errorf("unsupported type: %T: %w", input, ErrExpandUnsupportedType)
|
||||||
}
|
}
|
||||||
|
|
||||||
return ref, sch, nil
|
return ref, sch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -610,41 +522,70 @@ func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePa
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if ref == nil {
|
if ref == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
parentRefs := []string{}
|
|
||||||
if err := resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) {
|
parentRefs := make([]string, 0, 10)
|
||||||
|
if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
ref, sch, _ := getRefAndSchema(input)
|
ref, sch, _ := getRefAndSchema(input)
|
||||||
if ref.String() != "" {
|
if ref.String() != "" {
|
||||||
transitiveResolver, err := resolver.transitiveResolver(basePath, *ref)
|
transitiveResolver := resolver.transitiveResolver(basePath, *ref)
|
||||||
if transitiveResolver.shouldStopOnError(err) {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
basePath = resolver.updateBasePath(transitiveResolver, basePath)
|
basePath = resolver.updateBasePath(transitiveResolver, basePath)
|
||||||
resolver = transitiveResolver
|
resolver = transitiveResolver
|
||||||
}
|
}
|
||||||
|
|
||||||
if sch != nil && sch.Ref.String() != "" {
|
if sch == nil {
|
||||||
// schema expanded to a $ref in another root
|
// nothing to be expanded
|
||||||
var ern error
|
if ref != nil {
|
||||||
sch.Ref, ern = NewRef(normalizePaths(sch.Ref.String(), ref.RemoteURI()))
|
*ref = Ref{}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if sch.Ref.String() != "" {
|
||||||
|
rebasedRef, ern := NewRef(normalizePaths(sch.Ref.String(), basePath))
|
||||||
if ern != nil {
|
if ern != nil {
|
||||||
return ern
|
return ern
|
||||||
}
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case resolver.isCircular(&rebasedRef, basePath, parentRefs...):
|
||||||
|
// this is a circular $ref: stop expansion
|
||||||
|
if !resolver.options.AbsoluteCircularRef {
|
||||||
|
sch.Ref = *denormalizeFileRef(&rebasedRef, basePath, resolver.context.basePath)
|
||||||
|
} else {
|
||||||
|
sch.Ref = rebasedRef
|
||||||
|
}
|
||||||
|
case !resolver.options.SkipSchemas:
|
||||||
|
// schema expanded to a $ref in another root
|
||||||
|
sch.Ref = rebasedRef
|
||||||
|
default:
|
||||||
|
// skip schema expansion but rebase $ref to schema
|
||||||
|
sch.Ref = *denormalizeFileRef(&rebasedRef, basePath, resolver.context.basePath)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ref != nil {
|
if ref != nil {
|
||||||
*ref = Ref{}
|
*ref = Ref{}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !resolver.options.SkipSchemas && sch != nil {
|
// expand schema
|
||||||
|
if !resolver.options.SkipSchemas {
|
||||||
s, err := expandSchema(*sch, parentRefs, resolver, basePath)
|
s, err := expandSchema(*sch, parentRefs, resolver, basePath)
|
||||||
if resolver.shouldStopOnError(err) {
|
if resolver.shouldStopOnError(err) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if s == nil {
|
||||||
|
// guard for when continuing on error
|
||||||
|
return nil
|
||||||
|
}
|
||||||
*sch = *s
|
*sch = *s
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,13 @@
|
||||||
module github.com/go-openapi/spec
|
module github.com/go-openapi/spec
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/go-openapi/jsonpointer v0.19.3
|
github.com/go-openapi/jsonpointer v0.19.5
|
||||||
github.com/go-openapi/jsonreference v0.19.2
|
github.com/go-openapi/jsonreference v0.19.5
|
||||||
github.com/go-openapi/swag v0.19.5
|
github.com/go-openapi/swag v0.19.13
|
||||||
github.com/kr/pty v1.1.5 // indirect
|
github.com/stretchr/testify v1.6.1
|
||||||
github.com/stretchr/objx v0.2.0 // indirect
|
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 // indirect
|
||||||
github.com/stretchr/testify v1.3.0
|
golang.org/x/text v0.3.5 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8 // indirect
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 // indirect
|
|
||||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f // indirect
|
|
||||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59 // indirect
|
|
||||||
gopkg.in/yaml.v2 v2.2.2
|
|
||||||
)
|
)
|
||||||
|
|
||||||
go 1.13
|
go 1.13
|
||||||
|
|
|
||||||
|
|
@ -1,74 +1,70 @@
|
||||||
github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4=
|
|
||||||
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
|
||||||
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
||||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||||
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/go-openapi/jsonpointer v0.17.0 h1:nH6xp8XdXHx8dqveo0ZuJBluCO2qGrPbDNZ0dwoRHP0=
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
|
||||||
github.com/go-openapi/jsonpointer v0.19.0 h1:FTUMcX77w5rQkClIzDtTxvn6Bsa894CcrzNj2MMfeg8=
|
|
||||||
github.com/go-openapi/jsonpointer v0.19.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
|
||||||
github.com/go-openapi/jsonpointer v0.19.2 h1:A9+F4Dc/MCNB5jibxf6rRvOvR/iFgQdyNx9eIhnGqq0=
|
|
||||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
|
||||||
github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=
|
github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=
|
||||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
github.com/go-openapi/jsonreference v0.19.0 h1:BqWKpV1dFd+AuiKlgtddwVIFQsuMpxfBDBHGfM2yNpk=
|
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
|
||||||
github.com/go-openapi/jsonreference v0.19.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||||
github.com/go-openapi/jsonreference v0.19.2 h1:o20suLFB4Ri0tuzpWtyHlh7E7HnkqTNLq6aR6WVNS1w=
|
github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM=
|
||||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
|
||||||
github.com/go-openapi/swag v0.17.0 h1:iqrgMg7Q7SvtbWLlltPrkMs0UBJI6oTSs79JFRUi880=
|
|
||||||
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
|
||||||
github.com/go-openapi/swag v0.19.2 h1:jvO6bCMBEilGwMfHhrd61zIID4oIFdwb76V17SM88dE=
|
|
||||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
|
||||||
github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY=
|
github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY=
|
||||||
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||||
|
github.com/go-openapi/swag v0.19.13 h1:233UVgMy1DlmCYYfOiFpta6e2urloh+sEs5id6lyzog=
|
||||||
|
github.com/go-openapi/swag v0.19.13/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
|
||||||
|
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||||
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
|
||||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4=
|
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4=
|
||||||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8=
|
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8=
|
||||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||||
|
github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA=
|
||||||
|
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
|
||||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
|
||||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
|
||||||
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980 h1:dfGZHvZk057jK2MCeWus/TowKpJ8y4AmooUzdBSR9GU=
|
|
||||||
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
|
||||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM=
|
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM=
|
||||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew=
|
||||||
|
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ=
|
||||||
|
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|
|
||||||
|
|
@ -141,6 +141,12 @@ func (h *Header) AllowDuplicates() *Header {
|
||||||
return h
|
return h
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WithValidations is a fluent method to set header validations
|
||||||
|
func (h *Header) WithValidations(val CommonValidations) *Header {
|
||||||
|
h.SetValidations(SchemaValidations{CommonValidations: val})
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
// MarshalJSON marshal this to JSON
|
// MarshalJSON marshal this to JSON
|
||||||
func (h Header) MarshalJSON() ([]byte, error) {
|
func (h Header) MarshalJSON() ([]byte, error) {
|
||||||
b1, err := json.Marshal(h.CommonValidations)
|
b1, err := json.Marshal(h.CommonValidations)
|
||||||
|
|
|
||||||
|
|
@ -53,22 +53,6 @@ func (s *SimpleSchema) ItemsTypeName() string {
|
||||||
return s.Items.TypeName()
|
return s.Items.TypeName()
|
||||||
}
|
}
|
||||||
|
|
||||||
// CommonValidations describe common JSON-schema validations
|
|
||||||
type CommonValidations struct {
|
|
||||||
Maximum *float64 `json:"maximum,omitempty"`
|
|
||||||
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
|
|
||||||
Minimum *float64 `json:"minimum,omitempty"`
|
|
||||||
ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
|
|
||||||
MaxLength *int64 `json:"maxLength,omitempty"`
|
|
||||||
MinLength *int64 `json:"minLength,omitempty"`
|
|
||||||
Pattern string `json:"pattern,omitempty"`
|
|
||||||
MaxItems *int64 `json:"maxItems,omitempty"`
|
|
||||||
MinItems *int64 `json:"minItems,omitempty"`
|
|
||||||
UniqueItems bool `json:"uniqueItems,omitempty"`
|
|
||||||
MultipleOf *float64 `json:"multipleOf,omitempty"`
|
|
||||||
Enum []interface{} `json:"enum,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Items a limited subset of JSON-Schema's items object.
|
// Items a limited subset of JSON-Schema's items object.
|
||||||
// It is used by parameter definitions that are not located in "body".
|
// It is used by parameter definitions that are not located in "body".
|
||||||
//
|
//
|
||||||
|
|
@ -180,6 +164,12 @@ func (i *Items) AllowDuplicates() *Items {
|
||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WithValidations is a fluent method to set Items validations
|
||||||
|
func (i *Items) WithValidations(val CommonValidations) *Items {
|
||||||
|
i.SetValidations(SchemaValidations{CommonValidations: val})
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
// UnmarshalJSON hydrates this items instance with the data from JSON
|
// UnmarshalJSON hydrates this items instance with the data from JSON
|
||||||
func (i *Items) UnmarshalJSON(data []byte) error {
|
func (i *Items) UnmarshalJSON(data []byte) error {
|
||||||
var validations CommonValidations
|
var validations CommonValidations
|
||||||
|
|
|
||||||
|
|
@ -14,10 +14,43 @@
|
||||||
|
|
||||||
package spec
|
package spec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/go-openapi/swag"
|
||||||
|
)
|
||||||
|
|
||||||
// License information for the exposed API.
|
// License information for the exposed API.
|
||||||
//
|
//
|
||||||
// For more information: http://goo.gl/8us55a#licenseObject
|
// For more information: http://goo.gl/8us55a#licenseObject
|
||||||
type License struct {
|
type License struct {
|
||||||
|
LicenseProps
|
||||||
|
VendorExtensible
|
||||||
|
}
|
||||||
|
|
||||||
|
// LicenseProps holds the properties of a License object
|
||||||
|
type LicenseProps struct {
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
URL string `json:"url,omitempty"`
|
URL string `json:"url,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UnmarshalJSON hydrates License from json
|
||||||
|
func (l *License) UnmarshalJSON(data []byte) error {
|
||||||
|
if err := json.Unmarshal(data, &l.LicenseProps); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return json.Unmarshal(data, &l.VendorExtensible)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalJSON produces License as json
|
||||||
|
func (l License) MarshalJSON() ([]byte, error) {
|
||||||
|
b1, err := json.Marshal(l.LicenseProps)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
b2, err := json.Marshal(l.VendorExtensible)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return swag.ConcatJSON(b1, b2), nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,9 +20,12 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const windowsOS = "windows"
|
||||||
|
|
||||||
// normalize absolute path for cache.
|
// normalize absolute path for cache.
|
||||||
// on Windows, drive letters should be converted to lower as scheme in net/url.URL
|
// on Windows, drive letters should be converted to lower as scheme in net/url.URL
|
||||||
func normalizeAbsPath(path string) string {
|
func normalizeAbsPath(path string) string {
|
||||||
|
|
@ -71,27 +74,51 @@ func normalizePaths(refPath, base string) string {
|
||||||
return baseURL.String()
|
return baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// isRoot is a temporary hack to discern windows file ref for ref.IsRoot().
|
||||||
|
// TODO: a more thorough change is needed to handle windows file refs.
|
||||||
|
func isRoot(ref *Ref) bool {
|
||||||
|
if runtime.GOOS != windowsOS {
|
||||||
|
return ref.IsRoot()
|
||||||
|
}
|
||||||
|
return !filepath.IsAbs(ref.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
// isAbs is a temporary hack to discern windows file ref for url IsAbs().
|
||||||
|
// TODO: a more thorough change is needed to handle windows file refs.
|
||||||
|
func isAbs(u *url.URL) bool {
|
||||||
|
if runtime.GOOS != windowsOS {
|
||||||
|
return u.IsAbs()
|
||||||
|
}
|
||||||
|
if len(u.Scheme) <= 1 {
|
||||||
|
// drive letter got caught as URI scheme
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return u.IsAbs()
|
||||||
|
}
|
||||||
|
|
||||||
// denormalizePaths returns to simplest notation on file $ref,
|
// denormalizePaths returns to simplest notation on file $ref,
|
||||||
// i.e. strips the absolute path and sets a path relative to the base path.
|
// i.e. strips the absolute path and sets a path relative to the base path.
|
||||||
//
|
//
|
||||||
// This is currently used when we rewrite ref after a circular ref has been detected
|
// This is currently used when we rewrite ref after a circular ref has been detected
|
||||||
func denormalizeFileRef(ref *Ref, relativeBase, originalRelativeBase string) *Ref {
|
func denormalizeFileRef(ref *Ref, relativeBase, originalRelativeBase string) *Ref {
|
||||||
debugLog("denormalizeFileRef for: %s", ref.String())
|
debugLog("denormalizeFileRef for: %s (relative: %s, original: %s)", ref.String(),
|
||||||
|
relativeBase, originalRelativeBase)
|
||||||
|
|
||||||
if ref.String() == "" || ref.IsRoot() || ref.HasFragmentOnly {
|
// log.Printf("denormalize: %s, IsRoot: %t,HasFragmentOnly: %t, HasFullURL: %t", ref.String(), ref.IsRoot(), ref.HasFragmentOnly, ref.HasFullURL)
|
||||||
|
if ref.String() == "" || isRoot(ref) || ref.HasFragmentOnly {
|
||||||
return ref
|
return ref
|
||||||
}
|
}
|
||||||
// strip relativeBase from URI
|
// strip relativeBase from URI
|
||||||
relativeBaseURL, _ := url.Parse(relativeBase)
|
relativeBaseURL, _ := url.Parse(relativeBase)
|
||||||
relativeBaseURL.Fragment = ""
|
relativeBaseURL.Fragment = ""
|
||||||
|
|
||||||
if relativeBaseURL.IsAbs() && strings.HasPrefix(ref.String(), relativeBase) {
|
if isAbs(relativeBaseURL) && strings.HasPrefix(ref.String(), relativeBase) {
|
||||||
// this should work for absolute URI (e.g. http://...): we have an exact match, just trim prefix
|
// this should work for absolute URI (e.g. http://...): we have an exact match, just trim prefix
|
||||||
r, _ := NewRef(strings.TrimPrefix(ref.String(), relativeBase))
|
r, _ := NewRef(strings.TrimPrefix(ref.String(), relativeBase))
|
||||||
return &r
|
return &r
|
||||||
}
|
}
|
||||||
|
|
||||||
if relativeBaseURL.IsAbs() {
|
if isAbs(relativeBaseURL) {
|
||||||
// other absolute URL get unchanged (i.e. with a non-empty scheme)
|
// other absolute URL get unchanged (i.e. with a non-empty scheme)
|
||||||
return ref
|
return ref
|
||||||
}
|
}
|
||||||
|
|
@ -111,7 +138,7 @@ func denormalizeFileRef(ref *Ref, relativeBase, originalRelativeBase string) *Re
|
||||||
// my normalized ref points to: /mypath/item.json#/target
|
// my normalized ref points to: /mypath/item.json#/target
|
||||||
// expected result: item.json#/target
|
// expected result: item.json#/target
|
||||||
parts := strings.Split(ref.String(), "#")
|
parts := strings.Split(ref.String(), "#")
|
||||||
relativePath, err := filepath.Rel(path.Dir(originalRelativeBaseURL.String()), parts[0])
|
relativePath, err := filepath.Rel(filepath.Dir(originalRelativeBaseURL.String()), parts[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// there is no common ancestor (e.g. different drives on windows)
|
// there is no common ancestor (e.g. different drives on windows)
|
||||||
// leaves the ref unchanged
|
// leaves the ref unchanged
|
||||||
|
|
@ -132,8 +159,6 @@ func normalizeFileRef(ref *Ref, relativeBase string) *Ref {
|
||||||
return &r
|
return &r
|
||||||
}
|
}
|
||||||
|
|
||||||
debugLog("normalizing %s against %s", ref.String(), relativeBase)
|
|
||||||
|
|
||||||
s := normalizePaths(ref.String(), relativeBase)
|
s := normalizePaths(ref.String(), relativeBase)
|
||||||
r, _ := NewRef(s)
|
r, _ := NewRef(s)
|
||||||
return &r
|
return &r
|
||||||
|
|
@ -148,5 +173,5 @@ func absPath(fname string) (string, error) {
|
||||||
return fname, nil
|
return fname, nil
|
||||||
}
|
}
|
||||||
wd, err := os.Getwd()
|
wd, err := os.Getwd()
|
||||||
return filepath.Join(wd, fname), err
|
return normalizeAbsPath(filepath.Join(wd, fname)), err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
//gob.Register(map[string][]interface{}{})
|
|
||||||
gob.Register(map[string]interface{}{})
|
gob.Register(map[string]interface{}{})
|
||||||
gob.Register([]interface{}{})
|
gob.Register([]interface{}{})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -39,8 +39,7 @@ func PathParam(name string) *Parameter {
|
||||||
|
|
||||||
// BodyParam creates a body parameter
|
// BodyParam creates a body parameter
|
||||||
func BodyParam(name string, schema *Schema) *Parameter {
|
func BodyParam(name string, schema *Schema) *Parameter {
|
||||||
return &Parameter{ParamProps: ParamProps{Name: name, In: "body", Schema: schema},
|
return &Parameter{ParamProps: ParamProps{Name: name, In: "body", Schema: schema}}
|
||||||
SimpleSchema: SimpleSchema{Type: "object"}}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// FormDataParam creates a body parameter
|
// FormDataParam creates a body parameter
|
||||||
|
|
@ -58,7 +57,7 @@ func FileParam(name string) *Parameter {
|
||||||
func SimpleArrayParam(name, tpe, fmt string) *Parameter {
|
func SimpleArrayParam(name, tpe, fmt string) *Parameter {
|
||||||
return &Parameter{ParamProps: ParamProps{Name: name},
|
return &Parameter{ParamProps: ParamProps{Name: name},
|
||||||
SimpleSchema: SimpleSchema{Type: jsonArray, CollectionFormat: "csv",
|
SimpleSchema: SimpleSchema{Type: jsonArray, CollectionFormat: "csv",
|
||||||
Items: &Items{SimpleSchema: SimpleSchema{Type: "string", Format: fmt}}}}
|
Items: &Items{SimpleSchema: SimpleSchema{Type: tpe, Format: fmt}}}}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParamRef creates a parameter that's a json reference
|
// ParamRef creates a parameter that's a json reference
|
||||||
|
|
@ -278,6 +277,12 @@ func (p *Parameter) AllowDuplicates() *Parameter {
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WithValidations is a fluent method to set parameter validations
|
||||||
|
func (p *Parameter) WithValidations(val CommonValidations) *Parameter {
|
||||||
|
p.SetValidations(SchemaValidations{CommonValidations: val})
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
// UnmarshalJSON hydrates this items instance with the data from JSON
|
// UnmarshalJSON hydrates this items instance with the data from JSON
|
||||||
func (p *Parameter) UnmarshalJSON(data []byte) error {
|
func (p *Parameter) UnmarshalJSON(data []byte) error {
|
||||||
if err := json.Unmarshal(data, &p.CommonValidations); err != nil {
|
if err := json.Unmarshal(data, &p.CommonValidations); err != nil {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,91 @@
|
||||||
|
package spec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OrderSchemaItem holds a named schema (e.g. from a property of an object)
|
||||||
|
type OrderSchemaItem struct {
|
||||||
|
Name string
|
||||||
|
Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// OrderSchemaItems is a sortable slice of named schemas.
|
||||||
|
// The ordering is defined by the x-order schema extension.
|
||||||
|
type OrderSchemaItems []OrderSchemaItem
|
||||||
|
|
||||||
|
// MarshalJSON produces a json object with keys defined by the name schemas
|
||||||
|
// of the OrderSchemaItems slice, keeping the original order of the slice.
|
||||||
|
func (items OrderSchemaItems) MarshalJSON() ([]byte, error) {
|
||||||
|
buf := bytes.NewBuffer(nil)
|
||||||
|
buf.WriteString("{")
|
||||||
|
for i := range items {
|
||||||
|
if i > 0 {
|
||||||
|
buf.WriteString(",")
|
||||||
|
}
|
||||||
|
buf.WriteString("\"")
|
||||||
|
buf.WriteString(items[i].Name)
|
||||||
|
buf.WriteString("\":")
|
||||||
|
bs, err := json.Marshal(&items[i].Schema)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
buf.Write(bs)
|
||||||
|
}
|
||||||
|
buf.WriteString("}")
|
||||||
|
return buf.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (items OrderSchemaItems) Len() int { return len(items) }
|
||||||
|
func (items OrderSchemaItems) Swap(i, j int) { items[i], items[j] = items[j], items[i] }
|
||||||
|
func (items OrderSchemaItems) Less(i, j int) (ret bool) {
|
||||||
|
ii, oki := items[i].Extensions.GetString("x-order")
|
||||||
|
ij, okj := items[j].Extensions.GetString("x-order")
|
||||||
|
if oki {
|
||||||
|
if okj {
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
defer func() {
|
||||||
|
if err = recover(); err != nil {
|
||||||
|
ret = items[i].Name < items[j].Name
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
ret = reflect.ValueOf(ii).String() < reflect.ValueOf(ij).String()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
return reflect.ValueOf(ii).Int() < reflect.ValueOf(ij).Int()
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
} else if okj {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return items[i].Name < items[j].Name
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaProperties is a map representing the properties of a Schema object.
|
||||||
|
// It knows how to transform its keys into an ordered slice.
|
||||||
|
type SchemaProperties map[string]Schema
|
||||||
|
|
||||||
|
// ToOrderedSchemaItems transforms the map of properties into a sortable slice
|
||||||
|
func (properties SchemaProperties) ToOrderedSchemaItems() OrderSchemaItems {
|
||||||
|
items := make(OrderSchemaItems, 0, len(properties))
|
||||||
|
for k, v := range properties {
|
||||||
|
items = append(items, OrderSchemaItem{
|
||||||
|
Name: k,
|
||||||
|
Schema: v,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
sort.Sort(items)
|
||||||
|
return items
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalJSON produces properties as json, keeping their order.
|
||||||
|
func (properties SchemaProperties) MarshalJSON() ([]byte, error) {
|
||||||
|
if properties == nil {
|
||||||
|
return []byte("null"), nil
|
||||||
|
}
|
||||||
|
return json.Marshal(properties.ToOrderedSchemaItems())
|
||||||
|
}
|
||||||
|
|
@ -48,7 +48,7 @@ type Ref struct {
|
||||||
// RemoteURI gets the remote uri part of the ref
|
// RemoteURI gets the remote uri part of the ref
|
||||||
func (r *Ref) RemoteURI() string {
|
func (r *Ref) RemoteURI() string {
|
||||||
if r.String() == "" {
|
if r.String() == "" {
|
||||||
return r.String()
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
u := *r.GetURL()
|
u := *r.GetURL()
|
||||||
|
|
@ -68,10 +68,12 @@ func (r *Ref) IsValidURI(basepaths ...string) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.HasFullURL {
|
if r.HasFullURL {
|
||||||
|
//nolint:noctx,gosec
|
||||||
rr, err := http.Get(v)
|
rr, err := http.Get(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
defer rr.Body.Close()
|
||||||
|
|
||||||
return rr.StatusCode/100 == 2
|
return rr.StatusCode/100 == 2
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,128 @@
|
||||||
|
package spec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/go-openapi/swag"
|
||||||
|
)
|
||||||
|
|
||||||
|
func resolveAnyWithBase(root interface{}, ref *Ref, result interface{}, options *ExpandOptions) error {
|
||||||
|
resolver := defaultSchemaLoader(root, options, nil, nil)
|
||||||
|
|
||||||
|
basePath := ""
|
||||||
|
if options != nil && options.RelativeBase != "" {
|
||||||
|
basePath, _ = absPath(options.RelativeBase)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := resolver.Resolve(ref, result, basePath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveRefWithBase resolves a reference against a context root with preservation of base path
|
||||||
|
func ResolveRefWithBase(root interface{}, ref *Ref, options *ExpandOptions) (*Schema, error) {
|
||||||
|
result := new(Schema)
|
||||||
|
err := resolveAnyWithBase(root, ref, result, options)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveRef resolves a reference for a schema against a context root
|
||||||
|
// ref is guaranteed to be in root (no need to go to external files)
|
||||||
|
//
|
||||||
|
// ResolveRef is ONLY called from the code generation module
|
||||||
|
func ResolveRef(root interface{}, ref *Ref) (*Schema, error) {
|
||||||
|
res, _, err := ref.GetPointer().Get(root)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch sch := res.(type) {
|
||||||
|
case Schema:
|
||||||
|
return &sch, nil
|
||||||
|
case *Schema:
|
||||||
|
return sch, nil
|
||||||
|
case map[string]interface{}:
|
||||||
|
newSch := new(Schema)
|
||||||
|
if err = swag.DynamicJSONToStruct(sch, newSch); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return newSch, nil
|
||||||
|
default:
|
||||||
|
return nil, ErrUnknownTypeForReference
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveParameterWithBase resolves a parameter reference against a context root and base path
|
||||||
|
func ResolveParameterWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Parameter, error) {
|
||||||
|
result := new(Parameter)
|
||||||
|
err := resolveAnyWithBase(root, &ref, result, options)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveParameter resolves a parameter reference against a context root
|
||||||
|
func ResolveParameter(root interface{}, ref Ref) (*Parameter, error) {
|
||||||
|
return ResolveParameterWithBase(root, ref, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveResponseWithBase resolves response a reference against a context root and base path
|
||||||
|
func ResolveResponseWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Response, error) {
|
||||||
|
result := new(Response)
|
||||||
|
err := resolveAnyWithBase(root, &ref, result, options)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveResponse resolves response a reference against a context root
|
||||||
|
func ResolveResponse(root interface{}, ref Ref) (*Response, error) {
|
||||||
|
return ResolveResponseWithBase(root, ref, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolvePathItemWithBase resolves response a path item against a context root and base path
|
||||||
|
func ResolvePathItemWithBase(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) {
|
||||||
|
result := new(PathItem)
|
||||||
|
err := resolveAnyWithBase(root, &ref, result, options)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolvePathItem resolves response a path item against a context root and base path
|
||||||
|
//
|
||||||
|
// Deprecated: use ResolvePathItemWithBase instead
|
||||||
|
func ResolvePathItem(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) {
|
||||||
|
return ResolvePathItemWithBase(root, ref, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveItemsWithBase resolves parameter items reference against a context root and base path.
|
||||||
|
//
|
||||||
|
// NOTE: stricly speaking, this construct is not supported by Swagger 2.0.
|
||||||
|
// Similarly, $ref are forbidden in response headers.
|
||||||
|
func ResolveItemsWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) {
|
||||||
|
result := new(Items)
|
||||||
|
err := resolveAnyWithBase(root, &ref, result, options)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveItems resolves parameter items reference against a context root and base path.
|
||||||
|
//
|
||||||
|
// Deprecated: use ResolveItemsWithBase instead
|
||||||
|
func ResolveItems(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) {
|
||||||
|
return ResolveItemsWithBase(root, ref, options)
|
||||||
|
}
|
||||||
|
|
@ -23,7 +23,7 @@ import (
|
||||||
|
|
||||||
// ResponseProps properties specific to a response
|
// ResponseProps properties specific to a response
|
||||||
type ResponseProps struct {
|
type ResponseProps struct {
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description"`
|
||||||
Schema *Schema `json:"schema,omitempty"`
|
Schema *Schema `json:"schema,omitempty"`
|
||||||
Headers map[string]Header `json:"headers,omitempty"`
|
Headers map[string]Header `json:"headers,omitempty"`
|
||||||
Examples map[string]interface{} `json:"examples,omitempty"`
|
Examples map[string]interface{} `json:"examples,omitempty"`
|
||||||
|
|
@ -63,10 +63,31 @@ func (r *Response) UnmarshalJSON(data []byte) error {
|
||||||
|
|
||||||
// MarshalJSON converts this items object to JSON
|
// MarshalJSON converts this items object to JSON
|
||||||
func (r Response) MarshalJSON() ([]byte, error) {
|
func (r Response) MarshalJSON() ([]byte, error) {
|
||||||
b1, err := json.Marshal(r.ResponseProps)
|
var (
|
||||||
|
b1 []byte
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
if r.Ref.String() == "" {
|
||||||
|
// when there is no $ref, empty description is rendered as an empty string
|
||||||
|
b1, err = json.Marshal(r.ResponseProps)
|
||||||
|
} else {
|
||||||
|
// when there is $ref inside the schema, description should be omitempty-ied
|
||||||
|
b1, err = json.Marshal(struct {
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Schema *Schema `json:"schema,omitempty"`
|
||||||
|
Headers map[string]Header `json:"headers,omitempty"`
|
||||||
|
Examples map[string]interface{} `json:"examples,omitempty"`
|
||||||
|
}{
|
||||||
|
Description: r.ResponseProps.Description,
|
||||||
|
Schema: r.ResponseProps.Schema,
|
||||||
|
Examples: r.ResponseProps.Examples,
|
||||||
|
})
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b2, err := json.Marshal(r.Refable)
|
b2, err := json.Marshal(r.Refable)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
||||||
|
|
@ -158,41 +158,41 @@ func (r *SchemaURL) fromMap(v map[string]interface{}) error {
|
||||||
|
|
||||||
// SchemaProps describes a JSON schema (draft 4)
|
// SchemaProps describes a JSON schema (draft 4)
|
||||||
type SchemaProps struct {
|
type SchemaProps struct {
|
||||||
ID string `json:"id,omitempty"`
|
ID string `json:"id,omitempty"`
|
||||||
Ref Ref `json:"-"`
|
Ref Ref `json:"-"`
|
||||||
Schema SchemaURL `json:"-"`
|
Schema SchemaURL `json:"-"`
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description,omitempty"`
|
||||||
Type StringOrArray `json:"type,omitempty"`
|
Type StringOrArray `json:"type,omitempty"`
|
||||||
Nullable bool `json:"nullable,omitempty"`
|
Nullable bool `json:"nullable,omitempty"`
|
||||||
Format string `json:"format,omitempty"`
|
Format string `json:"format,omitempty"`
|
||||||
Title string `json:"title,omitempty"`
|
Title string `json:"title,omitempty"`
|
||||||
Default interface{} `json:"default,omitempty"`
|
Default interface{} `json:"default,omitempty"`
|
||||||
Maximum *float64 `json:"maximum,omitempty"`
|
Maximum *float64 `json:"maximum,omitempty"`
|
||||||
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
|
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
|
||||||
Minimum *float64 `json:"minimum,omitempty"`
|
Minimum *float64 `json:"minimum,omitempty"`
|
||||||
ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
|
ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
|
||||||
MaxLength *int64 `json:"maxLength,omitempty"`
|
MaxLength *int64 `json:"maxLength,omitempty"`
|
||||||
MinLength *int64 `json:"minLength,omitempty"`
|
MinLength *int64 `json:"minLength,omitempty"`
|
||||||
Pattern string `json:"pattern,omitempty"`
|
Pattern string `json:"pattern,omitempty"`
|
||||||
MaxItems *int64 `json:"maxItems,omitempty"`
|
MaxItems *int64 `json:"maxItems,omitempty"`
|
||||||
MinItems *int64 `json:"minItems,omitempty"`
|
MinItems *int64 `json:"minItems,omitempty"`
|
||||||
UniqueItems bool `json:"uniqueItems,omitempty"`
|
UniqueItems bool `json:"uniqueItems,omitempty"`
|
||||||
MultipleOf *float64 `json:"multipleOf,omitempty"`
|
MultipleOf *float64 `json:"multipleOf,omitempty"`
|
||||||
Enum []interface{} `json:"enum,omitempty"`
|
Enum []interface{} `json:"enum,omitempty"`
|
||||||
MaxProperties *int64 `json:"maxProperties,omitempty"`
|
MaxProperties *int64 `json:"maxProperties,omitempty"`
|
||||||
MinProperties *int64 `json:"minProperties,omitempty"`
|
MinProperties *int64 `json:"minProperties,omitempty"`
|
||||||
Required []string `json:"required,omitempty"`
|
Required []string `json:"required,omitempty"`
|
||||||
Items *SchemaOrArray `json:"items,omitempty"`
|
Items *SchemaOrArray `json:"items,omitempty"`
|
||||||
AllOf []Schema `json:"allOf,omitempty"`
|
AllOf []Schema `json:"allOf,omitempty"`
|
||||||
OneOf []Schema `json:"oneOf,omitempty"`
|
OneOf []Schema `json:"oneOf,omitempty"`
|
||||||
AnyOf []Schema `json:"anyOf,omitempty"`
|
AnyOf []Schema `json:"anyOf,omitempty"`
|
||||||
Not *Schema `json:"not,omitempty"`
|
Not *Schema `json:"not,omitempty"`
|
||||||
Properties map[string]Schema `json:"properties,omitempty"`
|
Properties SchemaProperties `json:"properties,omitempty"`
|
||||||
AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"`
|
AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"`
|
||||||
PatternProperties map[string]Schema `json:"patternProperties,omitempty"`
|
PatternProperties SchemaProperties `json:"patternProperties,omitempty"`
|
||||||
Dependencies Dependencies `json:"dependencies,omitempty"`
|
Dependencies Dependencies `json:"dependencies,omitempty"`
|
||||||
AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"`
|
AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"`
|
||||||
Definitions Definitions `json:"definitions,omitempty"`
|
Definitions Definitions `json:"definitions,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SwaggerSchemaProps are additional properties supported by swagger schemas, but not JSON-schema (draft 4)
|
// SwaggerSchemaProps are additional properties supported by swagger schemas, but not JSON-schema (draft 4)
|
||||||
|
|
@ -513,6 +513,56 @@ func (s *Schema) AsUnwrappedXML() *Schema {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetValidations defines all schema validations.
|
||||||
|
//
|
||||||
|
// NOTE: Required, ReadOnly, AllOf, AnyOf, OneOf and Not are not considered.
|
||||||
|
func (s *Schema) SetValidations(val SchemaValidations) {
|
||||||
|
s.Maximum = val.Maximum
|
||||||
|
s.ExclusiveMaximum = val.ExclusiveMaximum
|
||||||
|
s.Minimum = val.Minimum
|
||||||
|
s.ExclusiveMinimum = val.ExclusiveMinimum
|
||||||
|
s.MaxLength = val.MaxLength
|
||||||
|
s.MinLength = val.MinLength
|
||||||
|
s.Pattern = val.Pattern
|
||||||
|
s.MaxItems = val.MaxItems
|
||||||
|
s.MinItems = val.MinItems
|
||||||
|
s.UniqueItems = val.UniqueItems
|
||||||
|
s.MultipleOf = val.MultipleOf
|
||||||
|
s.Enum = val.Enum
|
||||||
|
s.MinProperties = val.MinProperties
|
||||||
|
s.MaxProperties = val.MaxProperties
|
||||||
|
s.PatternProperties = val.PatternProperties
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithValidations is a fluent method to set schema validations
|
||||||
|
func (s *Schema) WithValidations(val SchemaValidations) *Schema {
|
||||||
|
s.SetValidations(val)
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validations returns a clone of the validations for this schema
|
||||||
|
func (s Schema) Validations() SchemaValidations {
|
||||||
|
return SchemaValidations{
|
||||||
|
CommonValidations: CommonValidations{
|
||||||
|
Maximum: s.Maximum,
|
||||||
|
ExclusiveMaximum: s.ExclusiveMaximum,
|
||||||
|
Minimum: s.Minimum,
|
||||||
|
ExclusiveMinimum: s.ExclusiveMinimum,
|
||||||
|
MaxLength: s.MaxLength,
|
||||||
|
MinLength: s.MinLength,
|
||||||
|
Pattern: s.Pattern,
|
||||||
|
MaxItems: s.MaxItems,
|
||||||
|
MinItems: s.MinItems,
|
||||||
|
UniqueItems: s.UniqueItems,
|
||||||
|
MultipleOf: s.MultipleOf,
|
||||||
|
Enum: s.Enum,
|
||||||
|
},
|
||||||
|
MinProperties: s.MinProperties,
|
||||||
|
MaxProperties: s.MaxProperties,
|
||||||
|
PatternProperties: s.PatternProperties,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// MarshalJSON marshal this to JSON
|
// MarshalJSON marshal this to JSON
|
||||||
func (s Schema) MarshalJSON() ([]byte, error) {
|
func (s Schema) MarshalJSON() ([]byte, error) {
|
||||||
b1, err := json.Marshal(s.SchemaProps)
|
b1, err := json.Marshal(s.SchemaProps)
|
||||||
|
|
|
||||||
|
|
@ -25,35 +25,49 @@ import (
|
||||||
"github.com/go-openapi/swag"
|
"github.com/go-openapi/swag"
|
||||||
)
|
)
|
||||||
|
|
||||||
// PathLoader function to use when loading remote refs
|
// PathLoader is a function to use when loading remote refs.
|
||||||
var PathLoader func(string) (json.RawMessage, error)
|
//
|
||||||
|
// This is a package level default. It may be overridden or bypassed by
|
||||||
func init() {
|
// specifying the loader in ExpandOptions.
|
||||||
PathLoader = func(path string) (json.RawMessage, error) {
|
//
|
||||||
data, err := swag.LoadFromFileOrHTTP(path)
|
// NOTE: if you are using the go-openapi/loads package, it will override
|
||||||
if err != nil {
|
// this value with its own default (a loader to retrieve YAML documents as
|
||||||
return nil, err
|
// well as JSON ones).
|
||||||
}
|
var PathLoader = func(pth string) (json.RawMessage, error) {
|
||||||
return json.RawMessage(data), nil
|
data, err := swag.LoadFromFileOrHTTP(pth)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
return json.RawMessage(data), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// resolverContext allows to share a context during spec processing.
|
// resolverContext allows to share a context during spec processing.
|
||||||
// At the moment, it just holds the index of circular references found.
|
// At the moment, it just holds the index of circular references found.
|
||||||
type resolverContext struct {
|
type resolverContext struct {
|
||||||
// circulars holds all visited circular references, which allows shortcuts.
|
// circulars holds all visited circular references, to shortcircuit $ref resolution.
|
||||||
// NOTE: this is not just a performance improvement: it is required to figure out
|
//
|
||||||
// circular references which participate several cycles.
|
|
||||||
// This structure is privately instantiated and needs not be locked against
|
// This structure is privately instantiated and needs not be locked against
|
||||||
// concurrent access, unless we chose to implement a parallel spec walking.
|
// concurrent access, unless we chose to implement a parallel spec walking.
|
||||||
circulars map[string]bool
|
circulars map[string]bool
|
||||||
basePath string
|
basePath string
|
||||||
|
loadDoc func(string) (json.RawMessage, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func newResolverContext(originalBasePath string) *resolverContext {
|
func newResolverContext(expandOptions *ExpandOptions) *resolverContext {
|
||||||
|
absBase, _ := absPath(expandOptions.RelativeBase)
|
||||||
|
|
||||||
|
// path loader may be overridden from option
|
||||||
|
var loader func(string) (json.RawMessage, error)
|
||||||
|
if expandOptions.PathLoader == nil {
|
||||||
|
loader = PathLoader
|
||||||
|
} else {
|
||||||
|
loader = expandOptions.PathLoader
|
||||||
|
}
|
||||||
|
|
||||||
return &resolverContext{
|
return &resolverContext{
|
||||||
circulars: make(map[string]bool),
|
circulars: make(map[string]bool),
|
||||||
basePath: originalBasePath, // keep the root base path in context
|
basePath: absBase, // keep the root base path in context
|
||||||
|
loadDoc: loader,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -62,21 +76,20 @@ type schemaLoader struct {
|
||||||
options *ExpandOptions
|
options *ExpandOptions
|
||||||
cache ResolutionCache
|
cache ResolutionCache
|
||||||
context *resolverContext
|
context *resolverContext
|
||||||
loadDoc func(string) (json.RawMessage, error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) (*schemaLoader, error) {
|
func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) *schemaLoader {
|
||||||
if ref.IsRoot() || ref.HasFragmentOnly {
|
if ref.IsRoot() || ref.HasFragmentOnly {
|
||||||
return r, nil
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
baseRef, _ := NewRef(basePath)
|
baseRef := MustCreateRef(basePath)
|
||||||
currentRef := normalizeFileRef(&ref, basePath)
|
currentRef := normalizeFileRef(&ref, basePath)
|
||||||
if strings.HasPrefix(currentRef.String(), baseRef.String()) {
|
if strings.HasPrefix(currentRef.String(), baseRef.String()) {
|
||||||
return r, nil
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set a new root to resolve against
|
// set a new root against which to resolve
|
||||||
rootURL := currentRef.GetURL()
|
rootURL := currentRef.GetURL()
|
||||||
rootURL.Fragment = ""
|
rootURL.Fragment = ""
|
||||||
root, _ := r.cache.Get(rootURL.String())
|
root, _ := r.cache.Get(rootURL.String())
|
||||||
|
|
@ -85,21 +98,13 @@ func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) (*schemaLoad
|
||||||
// traversing multiple documents
|
// traversing multiple documents
|
||||||
newOptions := r.options
|
newOptions := r.options
|
||||||
newOptions.RelativeBase = rootURL.String()
|
newOptions.RelativeBase = rootURL.String()
|
||||||
debugLog("setting new root: %s", newOptions.RelativeBase)
|
return defaultSchemaLoader(root, newOptions, r.cache, r.context)
|
||||||
resolver, err := defaultSchemaLoader(root, newOptions, r.cache, r.context)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return resolver, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string {
|
func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string {
|
||||||
if transitive != r {
|
if transitive != r {
|
||||||
debugLog("got a new resolver")
|
|
||||||
if transitive.options != nil && transitive.options.RelativeBase != "" {
|
if transitive.options != nil && transitive.options.RelativeBase != "" {
|
||||||
basePath, _ = absPath(transitive.options.RelativeBase)
|
basePath, _ = absPath(transitive.options.RelativeBase)
|
||||||
debugLog("new basePath = %s", basePath)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return basePath
|
return basePath
|
||||||
|
|
@ -108,17 +113,19 @@ func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string)
|
||||||
func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string) error {
|
func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string) error {
|
||||||
tgt := reflect.ValueOf(target)
|
tgt := reflect.ValueOf(target)
|
||||||
if tgt.Kind() != reflect.Ptr {
|
if tgt.Kind() != reflect.Ptr {
|
||||||
return fmt.Errorf("resolve ref: target needs to be a pointer")
|
return ErrResolveRefNeedsAPointer
|
||||||
}
|
}
|
||||||
|
|
||||||
refURL := ref.GetURL()
|
if ref.GetURL() == nil {
|
||||||
if refURL == nil {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var res interface{}
|
var (
|
||||||
var data interface{}
|
res interface{}
|
||||||
var err error
|
data interface{}
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
// Resolve against the root if it isn't nil, and if ref is pointing at the root, or has a fragment only which means
|
// Resolve against the root if it isn't nil, and if ref is pointing at the root, or has a fragment only which means
|
||||||
// it is pointing somewhere in the root.
|
// it is pointing somewhere in the root.
|
||||||
root := r.root
|
root := r.root
|
||||||
|
|
@ -127,12 +134,11 @@ func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string)
|
||||||
root, _, _, _ = r.load(baseRef.GetURL())
|
root, _, _, _ = r.load(baseRef.GetURL())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ref.IsRoot() || ref.HasFragmentOnly) && root != nil {
|
if (ref.IsRoot() || ref.HasFragmentOnly) && root != nil {
|
||||||
data = root
|
data = root
|
||||||
} else {
|
} else {
|
||||||
baseRef := normalizeFileRef(ref, basePath)
|
baseRef := normalizeFileRef(ref, basePath)
|
||||||
debugLog("current ref is: %s", ref.String())
|
|
||||||
debugLog("current ref normalized file: %s", baseRef.String())
|
|
||||||
data, _, _, err = r.load(baseRef.GetURL())
|
data, _, _, err = r.load(baseRef.GetURL())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
@ -154,20 +160,30 @@ func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error)
|
||||||
toFetch := *refURL
|
toFetch := *refURL
|
||||||
toFetch.Fragment = ""
|
toFetch.Fragment = ""
|
||||||
|
|
||||||
normalized := normalizeAbsPath(toFetch.String())
|
var err error
|
||||||
|
pth := toFetch.String()
|
||||||
|
if pth == rootBase {
|
||||||
|
pth, err = absPath(rootBase)
|
||||||
|
if err != nil {
|
||||||
|
return nil, url.URL{}, false, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
normalized := normalizeAbsPath(pth)
|
||||||
|
|
||||||
data, fromCache := r.cache.Get(normalized)
|
data, fromCache := r.cache.Get(normalized)
|
||||||
if !fromCache {
|
if !fromCache {
|
||||||
b, err := r.loadDoc(normalized)
|
b, err := r.context.loadDoc(normalized)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
debugLog("unable to load the document: %v", err)
|
return nil, url.URL{}, false, fmt.Errorf("%s [%s]: %w", pth, normalized, err)
|
||||||
return nil, url.URL{}, false, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := json.Unmarshal(b, &data); err != nil {
|
var doc interface{}
|
||||||
|
if err := json.Unmarshal(b, &doc); err != nil {
|
||||||
return nil, url.URL{}, false, err
|
return nil, url.URL{}, false, err
|
||||||
}
|
}
|
||||||
r.cache.Set(normalized, data)
|
r.cache.Set(normalized, doc)
|
||||||
|
|
||||||
|
return doc, toFetch, fromCache, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return data, toFetch, fromCache, nil
|
return data, toFetch, fromCache, nil
|
||||||
|
|
@ -182,17 +198,20 @@ func (r *schemaLoader) isCircular(ref *Ref, basePath string, parentRefs ...strin
|
||||||
foundCycle = true
|
foundCycle = true
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
foundCycle = swag.ContainsStringsCI(parentRefs, normalizedRef)
|
foundCycle = swag.ContainsStringsCI(parentRefs, normalizedRef) // TODO(fred): normalize windows url and remove CI equality
|
||||||
if foundCycle {
|
if foundCycle {
|
||||||
r.context.circulars[normalizedRef] = true
|
r.context.circulars[normalizedRef] = true
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve resolves a reference against basePath and stores the result in target
|
// Resolve resolves a reference against basePath and stores the result in target.
|
||||||
// Resolve is not in charge of following references, it only resolves ref by following its URL
|
//
|
||||||
// if the schema that ref is referring to has more refs in it. Resolve doesn't resolve them
|
// Resolve is not in charge of following references: it only resolves ref by following its URL.
|
||||||
// if basePath is an empty string, ref is resolved against the root schema stored in the schemaLoader struct
|
//
|
||||||
|
// If the schema the ref is referring to holds nested refs, Resolve doesn't resolve them.
|
||||||
|
//
|
||||||
|
// If basePath is an empty string, ref is resolved against the root schema stored in the schemaLoader struct
|
||||||
func (r *schemaLoader) Resolve(ref *Ref, target interface{}, basePath string) error {
|
func (r *schemaLoader) Resolve(ref *Ref, target interface{}, basePath string) error {
|
||||||
return r.resolveRef(ref, target, basePath)
|
return r.resolveRef(ref, target, basePath)
|
||||||
}
|
}
|
||||||
|
|
@ -209,30 +228,32 @@ func (r *schemaLoader) deref(input interface{}, parentRefs []string, basePath st
|
||||||
case *PathItem:
|
case *PathItem:
|
||||||
ref = &refable.Ref
|
ref = &refable.Ref
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("deref: unsupported type %T", input)
|
return fmt.Errorf("unsupported type: %T: %w", input, ErrDerefUnsupportedType)
|
||||||
}
|
}
|
||||||
|
|
||||||
curRef := ref.String()
|
curRef := ref.String()
|
||||||
if curRef != "" {
|
if curRef == "" {
|
||||||
normalizedRef := normalizeFileRef(ref, basePath)
|
return nil
|
||||||
normalizedBasePath := normalizedRef.RemoteURI()
|
|
||||||
|
|
||||||
if r.isCircular(normalizedRef, basePath, parentRefs...) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.resolveRef(ref, input, basePath); r.shouldStopOnError(err) {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOTE(fredbi): removed basePath check => needs more testing
|
|
||||||
if ref.String() != "" && ref.String() != curRef {
|
|
||||||
parentRefs = append(parentRefs, normalizedRef.String())
|
|
||||||
return r.deref(input, parentRefs, normalizedBasePath)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
normalizedRef := normalizeFileRef(ref, basePath)
|
||||||
|
normalizedBasePath := normalizedRef.RemoteURI()
|
||||||
|
|
||||||
|
if r.isCircular(normalizedRef, basePath, parentRefs...) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := r.resolveRef(ref, input, basePath); r.shouldStopOnError(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ref.String() == "" || ref.String() == curRef {
|
||||||
|
// done with rereferencing
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
parentRefs = append(parentRefs, normalizedRef.String())
|
||||||
|
return r.deref(input, parentRefs, normalizedBasePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *schemaLoader) shouldStopOnError(err error) bool {
|
func (r *schemaLoader) shouldStopOnError(err error) bool {
|
||||||
|
|
@ -247,30 +268,48 @@ func (r *schemaLoader) shouldStopOnError(err error) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *schemaLoader) setSchemaID(target interface{}, id, basePath string) (string, string) {
|
||||||
|
debugLog("schema has ID: %s", id)
|
||||||
|
|
||||||
|
// handling the case when id is a folder
|
||||||
|
// remember that basePath has to point to a file
|
||||||
|
var refPath string
|
||||||
|
if strings.HasSuffix(id, "/") {
|
||||||
|
// path.Clean here would not work correctly if there is a scheme (e.g. https://...)
|
||||||
|
refPath = fmt.Sprintf("%s%s", id, "placeholder.json")
|
||||||
|
} else {
|
||||||
|
refPath = id
|
||||||
|
}
|
||||||
|
|
||||||
|
// updates the current base path
|
||||||
|
// * important: ID can be a relative path
|
||||||
|
// * registers target to be fetchable from the new base proposed by this id
|
||||||
|
newBasePath := normalizePaths(refPath, basePath)
|
||||||
|
|
||||||
|
// store found IDs for possible future reuse in $ref
|
||||||
|
r.cache.Set(newBasePath, target)
|
||||||
|
|
||||||
|
return newBasePath, refPath
|
||||||
|
}
|
||||||
|
|
||||||
func defaultSchemaLoader(
|
func defaultSchemaLoader(
|
||||||
root interface{},
|
root interface{},
|
||||||
expandOptions *ExpandOptions,
|
expandOptions *ExpandOptions,
|
||||||
cache ResolutionCache,
|
cache ResolutionCache,
|
||||||
context *resolverContext) (*schemaLoader, error) {
|
context *resolverContext) *schemaLoader {
|
||||||
|
|
||||||
if cache == nil {
|
|
||||||
cache = resCache
|
|
||||||
}
|
|
||||||
if expandOptions == nil {
|
if expandOptions == nil {
|
||||||
expandOptions = &ExpandOptions{}
|
expandOptions = &ExpandOptions{}
|
||||||
}
|
}
|
||||||
absBase, _ := absPath(expandOptions.RelativeBase)
|
|
||||||
if context == nil {
|
if context == nil {
|
||||||
context = newResolverContext(absBase)
|
context = newResolverContext(expandOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &schemaLoader{
|
return &schemaLoader{
|
||||||
root: root,
|
root: root,
|
||||||
options: expandOptions,
|
options: expandOptions,
|
||||||
cache: cache,
|
cache: cacheOrDefault(cache),
|
||||||
context: context,
|
context: context,
|
||||||
loadDoc: func(path string) (json.RawMessage, error) {
|
}
|
||||||
debugLog("fetching document at %q", path)
|
|
||||||
return PathLoader(path)
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -82,12 +82,12 @@ func OAuth2AccessToken(authorizationURL, tokenURL string) *SecurityScheme {
|
||||||
type SecuritySchemeProps struct {
|
type SecuritySchemeProps struct {
|
||||||
Description string `json:"description,omitempty"`
|
Description string `json:"description,omitempty"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Name string `json:"name,omitempty"` // api key
|
Name string `json:"name,omitempty"` // api key
|
||||||
In string `json:"in,omitempty"` // api key
|
In string `json:"in,omitempty"` // api key
|
||||||
Flow string `json:"flow,omitempty"` // oauth2
|
Flow string `json:"flow,omitempty"` // oauth2
|
||||||
AuthorizationURL string `json:"authorizationUrl,omitempty"` // oauth2
|
AuthorizationURL string `json:"authorizationUrl"` // oauth2
|
||||||
TokenURL string `json:"tokenUrl,omitempty"` // oauth2
|
TokenURL string `json:"tokenUrl,omitempty"` // oauth2
|
||||||
Scopes map[string]string `json:"scopes,omitempty"` // oauth2
|
Scopes map[string]string `json:"scopes,omitempty"` // oauth2
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddScope adds a scope to this security scheme
|
// AddScope adds a scope to this security scheme
|
||||||
|
|
@ -120,10 +120,40 @@ func (s SecurityScheme) JSONLookup(token string) (interface{}, error) {
|
||||||
|
|
||||||
// MarshalJSON marshal this to JSON
|
// MarshalJSON marshal this to JSON
|
||||||
func (s SecurityScheme) MarshalJSON() ([]byte, error) {
|
func (s SecurityScheme) MarshalJSON() ([]byte, error) {
|
||||||
b1, err := json.Marshal(s.SecuritySchemeProps)
|
var (
|
||||||
|
b1 []byte
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
if s.Type == oauth2 {
|
||||||
|
// when oauth2, empty AuthorizationURL is added as empty string
|
||||||
|
b1, err = json.Marshal(s.SecuritySchemeProps)
|
||||||
|
} else {
|
||||||
|
// when not oauth2, empty AuthorizationURL should be omitted
|
||||||
|
b1, err = json.Marshal(struct {
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Name string `json:"name,omitempty"` // api key
|
||||||
|
In string `json:"in,omitempty"` // api key
|
||||||
|
Flow string `json:"flow,omitempty"` // oauth2
|
||||||
|
AuthorizationURL string `json:"authorizationUrl,omitempty"` // oauth2
|
||||||
|
TokenURL string `json:"tokenUrl,omitempty"` // oauth2
|
||||||
|
Scopes map[string]string `json:"scopes,omitempty"` // oauth2
|
||||||
|
}{
|
||||||
|
Description: s.Description,
|
||||||
|
Type: s.Type,
|
||||||
|
Name: s.Name,
|
||||||
|
In: s.In,
|
||||||
|
Flow: s.Flow,
|
||||||
|
AuthorizationURL: s.AuthorizationURL,
|
||||||
|
TokenURL: s.TokenURL,
|
||||||
|
Scopes: s.Scopes,
|
||||||
|
})
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
b2, err := json.Marshal(s.VendorExtensible)
|
b2, err := json.Marshal(s.VendorExtensible)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,9 @@
|
||||||
|
|
||||||
package spec
|
package spec
|
||||||
|
|
||||||
import "encoding/json"
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
)
|
||||||
|
|
||||||
//go:generate curl -L --progress -o ./schemas/v2/schema.json http://swagger.io/v2/schema.json
|
//go:generate curl -L --progress -o ./schemas/v2/schema.json http://swagger.io/v2/schema.json
|
||||||
//go:generate curl -L --progress -o ./schemas/jsonschema-draft-04.json http://json-schema.org/draft-04/schema
|
//go:generate curl -L --progress -o ./schemas/jsonschema-draft-04.json http://json-schema.org/draft-04/schema
|
||||||
|
|
@ -28,16 +30,6 @@ const (
|
||||||
JSONSchemaURL = "http://json-schema.org/draft-04/schema#"
|
JSONSchemaURL = "http://json-schema.org/draft-04/schema#"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
jsonSchema *Schema
|
|
||||||
swaggerSchema *Schema
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
jsonSchema = MustLoadJSONSchemaDraft04()
|
|
||||||
swaggerSchema = MustLoadSwagger20Schema()
|
|
||||||
}
|
|
||||||
|
|
||||||
// MustLoadJSONSchemaDraft04 panics when Swagger20Schema returns an error
|
// MustLoadJSONSchemaDraft04 panics when Swagger20Schema returns an error
|
||||||
func MustLoadJSONSchemaDraft04() *Schema {
|
func MustLoadJSONSchemaDraft04() *Schema {
|
||||||
d, e := JSONSchemaDraft04()
|
d, e := JSONSchemaDraft04()
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,215 @@
|
||||||
|
package spec
|
||||||
|
|
||||||
|
// CommonValidations describe common JSON-schema validations
|
||||||
|
type CommonValidations struct {
|
||||||
|
Maximum *float64 `json:"maximum,omitempty"`
|
||||||
|
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
|
||||||
|
Minimum *float64 `json:"minimum,omitempty"`
|
||||||
|
ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
|
||||||
|
MaxLength *int64 `json:"maxLength,omitempty"`
|
||||||
|
MinLength *int64 `json:"minLength,omitempty"`
|
||||||
|
Pattern string `json:"pattern,omitempty"`
|
||||||
|
MaxItems *int64 `json:"maxItems,omitempty"`
|
||||||
|
MinItems *int64 `json:"minItems,omitempty"`
|
||||||
|
UniqueItems bool `json:"uniqueItems,omitempty"`
|
||||||
|
MultipleOf *float64 `json:"multipleOf,omitempty"`
|
||||||
|
Enum []interface{} `json:"enum,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetValidations defines all validations for a simple schema.
|
||||||
|
//
|
||||||
|
// NOTE: the input is the larger set of validations available for schemas.
|
||||||
|
// For simple schemas, MinProperties and MaxProperties are ignored.
|
||||||
|
func (v *CommonValidations) SetValidations(val SchemaValidations) {
|
||||||
|
v.Maximum = val.Maximum
|
||||||
|
v.ExclusiveMaximum = val.ExclusiveMaximum
|
||||||
|
v.Minimum = val.Minimum
|
||||||
|
v.ExclusiveMinimum = val.ExclusiveMinimum
|
||||||
|
v.MaxLength = val.MaxLength
|
||||||
|
v.MinLength = val.MinLength
|
||||||
|
v.Pattern = val.Pattern
|
||||||
|
v.MaxItems = val.MaxItems
|
||||||
|
v.MinItems = val.MinItems
|
||||||
|
v.UniqueItems = val.UniqueItems
|
||||||
|
v.MultipleOf = val.MultipleOf
|
||||||
|
v.Enum = val.Enum
|
||||||
|
}
|
||||||
|
|
||||||
|
type clearedValidation struct {
|
||||||
|
Validation string
|
||||||
|
Value interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type clearedValidations []clearedValidation
|
||||||
|
|
||||||
|
func (c clearedValidations) apply(cbs []func(string, interface{})) {
|
||||||
|
for _, cb := range cbs {
|
||||||
|
for _, cleared := range c {
|
||||||
|
cb(cleared.Validation, cleared.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearNumberValidations clears all number validations.
|
||||||
|
//
|
||||||
|
// Some callbacks may be set by the caller to capture changed values.
|
||||||
|
func (v *CommonValidations) ClearNumberValidations(cbs ...func(string, interface{})) {
|
||||||
|
done := make(clearedValidations, 0, 5)
|
||||||
|
defer func() {
|
||||||
|
done.apply(cbs)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if v.Minimum != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "minimum", Value: v.Minimum})
|
||||||
|
v.Minimum = nil
|
||||||
|
}
|
||||||
|
if v.Maximum != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "maximum", Value: v.Maximum})
|
||||||
|
v.Maximum = nil
|
||||||
|
}
|
||||||
|
if v.ExclusiveMaximum {
|
||||||
|
done = append(done, clearedValidation{Validation: "exclusiveMaximum", Value: v.ExclusiveMaximum})
|
||||||
|
v.ExclusiveMaximum = false
|
||||||
|
}
|
||||||
|
if v.ExclusiveMinimum {
|
||||||
|
done = append(done, clearedValidation{Validation: "exclusiveMinimum", Value: v.ExclusiveMinimum})
|
||||||
|
v.ExclusiveMinimum = false
|
||||||
|
}
|
||||||
|
if v.MultipleOf != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "multipleOf", Value: v.MultipleOf})
|
||||||
|
v.MultipleOf = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearStringValidations clears all string validations.
|
||||||
|
//
|
||||||
|
// Some callbacks may be set by the caller to capture changed values.
|
||||||
|
func (v *CommonValidations) ClearStringValidations(cbs ...func(string, interface{})) {
|
||||||
|
done := make(clearedValidations, 0, 3)
|
||||||
|
defer func() {
|
||||||
|
done.apply(cbs)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if v.Pattern != "" {
|
||||||
|
done = append(done, clearedValidation{Validation: "pattern", Value: v.Pattern})
|
||||||
|
v.Pattern = ""
|
||||||
|
}
|
||||||
|
if v.MinLength != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "minLength", Value: v.MinLength})
|
||||||
|
v.MinLength = nil
|
||||||
|
}
|
||||||
|
if v.MaxLength != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "maxLength", Value: v.MaxLength})
|
||||||
|
v.MaxLength = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearArrayValidations clears all array validations.
|
||||||
|
//
|
||||||
|
// Some callbacks may be set by the caller to capture changed values.
|
||||||
|
func (v *CommonValidations) ClearArrayValidations(cbs ...func(string, interface{})) {
|
||||||
|
done := make(clearedValidations, 0, 3)
|
||||||
|
defer func() {
|
||||||
|
done.apply(cbs)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if v.MaxItems != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "maxItems", Value: v.MaxItems})
|
||||||
|
v.MaxItems = nil
|
||||||
|
}
|
||||||
|
if v.MinItems != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "minItems", Value: v.MinItems})
|
||||||
|
v.MinItems = nil
|
||||||
|
}
|
||||||
|
if v.UniqueItems {
|
||||||
|
done = append(done, clearedValidation{Validation: "uniqueItems", Value: v.UniqueItems})
|
||||||
|
v.UniqueItems = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validations returns a clone of the validations for a simple schema.
|
||||||
|
//
|
||||||
|
// NOTE: in the context of simple schema objects, MinProperties, MaxProperties
|
||||||
|
// and PatternProperties remain unset.
|
||||||
|
func (v CommonValidations) Validations() SchemaValidations {
|
||||||
|
return SchemaValidations{
|
||||||
|
CommonValidations: v,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasNumberValidations indicates if the validations are for numbers or integers
|
||||||
|
func (v CommonValidations) HasNumberValidations() bool {
|
||||||
|
return v.Maximum != nil || v.Minimum != nil || v.MultipleOf != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasStringValidations indicates if the validations are for strings
|
||||||
|
func (v CommonValidations) HasStringValidations() bool {
|
||||||
|
return v.MaxLength != nil || v.MinLength != nil || v.Pattern != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasArrayValidations indicates if the validations are for arrays
|
||||||
|
func (v CommonValidations) HasArrayValidations() bool {
|
||||||
|
return v.MaxItems != nil || v.MinItems != nil || v.UniqueItems
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasEnum indicates if the validation includes some enum constraint
|
||||||
|
func (v CommonValidations) HasEnum() bool {
|
||||||
|
return len(v.Enum) > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// SchemaValidations describes the validation properties of a schema
|
||||||
|
//
|
||||||
|
// NOTE: at this moment, this is not embedded in SchemaProps because this would induce a breaking change
|
||||||
|
// in the exported members: all initializers using litterals would fail.
|
||||||
|
type SchemaValidations struct {
|
||||||
|
CommonValidations
|
||||||
|
|
||||||
|
PatternProperties SchemaProperties `json:"patternProperties,omitempty"`
|
||||||
|
MaxProperties *int64 `json:"maxProperties,omitempty"`
|
||||||
|
MinProperties *int64 `json:"minProperties,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasObjectValidations indicates if the validations are for objects
|
||||||
|
func (v SchemaValidations) HasObjectValidations() bool {
|
||||||
|
return v.MaxProperties != nil || v.MinProperties != nil || v.PatternProperties != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetValidations for schema validations
|
||||||
|
func (v *SchemaValidations) SetValidations(val SchemaValidations) {
|
||||||
|
v.CommonValidations.SetValidations(val)
|
||||||
|
v.PatternProperties = val.PatternProperties
|
||||||
|
v.MaxProperties = val.MaxProperties
|
||||||
|
v.MinProperties = val.MinProperties
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validations for a schema
|
||||||
|
func (v SchemaValidations) Validations() SchemaValidations {
|
||||||
|
val := v.CommonValidations.Validations()
|
||||||
|
val.PatternProperties = v.PatternProperties
|
||||||
|
val.MinProperties = v.MinProperties
|
||||||
|
val.MaxProperties = v.MaxProperties
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearObjectValidations returns a clone of the validations with all object validations cleared.
|
||||||
|
//
|
||||||
|
// Some callbacks may be set by the caller to capture changed values.
|
||||||
|
func (v *SchemaValidations) ClearObjectValidations(cbs ...func(string, interface{})) {
|
||||||
|
done := make(clearedValidations, 0, 3)
|
||||||
|
defer func() {
|
||||||
|
done.apply(cbs)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if v.MaxProperties != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "maxProperties", Value: v.MaxProperties})
|
||||||
|
v.MaxProperties = nil
|
||||||
|
}
|
||||||
|
if v.MinProperties != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "minProperties", Value: v.MinProperties})
|
||||||
|
v.MinProperties = nil
|
||||||
|
}
|
||||||
|
if v.PatternProperties != nil {
|
||||||
|
done = append(done, clearedValidation{Validation: "patternProperties", Value: v.PatternProperties})
|
||||||
|
v.PatternProperties = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,202 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
go-shlex is a simple lexer for go that supports shell-style quoting,
|
||||||
|
commenting, and escaping.
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
module github.com/google/shlex
|
||||||
|
|
||||||
|
go 1.13
|
||||||
|
|
@ -0,0 +1,416 @@
|
||||||
|
/*
|
||||||
|
Copyright 2012 Google Inc. All Rights Reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package shlex implements a simple lexer which splits input in to tokens using
|
||||||
|
shell-style rules for quoting and commenting.
|
||||||
|
|
||||||
|
The basic use case uses the default ASCII lexer to split a string into sub-strings:
|
||||||
|
|
||||||
|
shlex.Split("one \"two three\" four") -> []string{"one", "two three", "four"}
|
||||||
|
|
||||||
|
To process a stream of strings:
|
||||||
|
|
||||||
|
l := NewLexer(os.Stdin)
|
||||||
|
for ; token, err := l.Next(); err != nil {
|
||||||
|
// process token
|
||||||
|
}
|
||||||
|
|
||||||
|
To access the raw token stream (which includes tokens for comments):
|
||||||
|
|
||||||
|
t := NewTokenizer(os.Stdin)
|
||||||
|
for ; token, err := t.Next(); err != nil {
|
||||||
|
// process token
|
||||||
|
}
|
||||||
|
|
||||||
|
*/
|
||||||
|
package shlex
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TokenType is a top-level token classification: A word, space, comment, unknown.
|
||||||
|
type TokenType int
|
||||||
|
|
||||||
|
// runeTokenClass is the type of a UTF-8 character classification: A quote, space, escape.
|
||||||
|
type runeTokenClass int
|
||||||
|
|
||||||
|
// the internal state used by the lexer state machine
|
||||||
|
type lexerState int
|
||||||
|
|
||||||
|
// Token is a (type, value) pair representing a lexographical token.
|
||||||
|
type Token struct {
|
||||||
|
tokenType TokenType
|
||||||
|
value string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Equal reports whether tokens a, and b, are equal.
|
||||||
|
// Two tokens are equal if both their types and values are equal. A nil token can
|
||||||
|
// never be equal to another token.
|
||||||
|
func (a *Token) Equal(b *Token) bool {
|
||||||
|
if a == nil || b == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if a.tokenType != b.tokenType {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return a.value == b.value
|
||||||
|
}
|
||||||
|
|
||||||
|
// Named classes of UTF-8 runes
|
||||||
|
const (
|
||||||
|
spaceRunes = " \t\r\n"
|
||||||
|
escapingQuoteRunes = `"`
|
||||||
|
nonEscapingQuoteRunes = "'"
|
||||||
|
escapeRunes = `\`
|
||||||
|
commentRunes = "#"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Classes of rune token
|
||||||
|
const (
|
||||||
|
unknownRuneClass runeTokenClass = iota
|
||||||
|
spaceRuneClass
|
||||||
|
escapingQuoteRuneClass
|
||||||
|
nonEscapingQuoteRuneClass
|
||||||
|
escapeRuneClass
|
||||||
|
commentRuneClass
|
||||||
|
eofRuneClass
|
||||||
|
)
|
||||||
|
|
||||||
|
// Classes of lexographic token
|
||||||
|
const (
|
||||||
|
UnknownToken TokenType = iota
|
||||||
|
WordToken
|
||||||
|
SpaceToken
|
||||||
|
CommentToken
|
||||||
|
)
|
||||||
|
|
||||||
|
// Lexer state machine states
|
||||||
|
const (
|
||||||
|
startState lexerState = iota // no runes have been seen
|
||||||
|
inWordState // processing regular runes in a word
|
||||||
|
escapingState // we have just consumed an escape rune; the next rune is literal
|
||||||
|
escapingQuotedState // we have just consumed an escape rune within a quoted string
|
||||||
|
quotingEscapingState // we are within a quoted string that supports escaping ("...")
|
||||||
|
quotingState // we are within a string that does not support escaping ('...')
|
||||||
|
commentState // we are within a comment (everything following an unquoted or unescaped #
|
||||||
|
)
|
||||||
|
|
||||||
|
// tokenClassifier is used for classifying rune characters.
|
||||||
|
type tokenClassifier map[rune]runeTokenClass
|
||||||
|
|
||||||
|
func (typeMap tokenClassifier) addRuneClass(runes string, tokenType runeTokenClass) {
|
||||||
|
for _, runeChar := range runes {
|
||||||
|
typeMap[runeChar] = tokenType
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newDefaultClassifier creates a new classifier for ASCII characters.
|
||||||
|
func newDefaultClassifier() tokenClassifier {
|
||||||
|
t := tokenClassifier{}
|
||||||
|
t.addRuneClass(spaceRunes, spaceRuneClass)
|
||||||
|
t.addRuneClass(escapingQuoteRunes, escapingQuoteRuneClass)
|
||||||
|
t.addRuneClass(nonEscapingQuoteRunes, nonEscapingQuoteRuneClass)
|
||||||
|
t.addRuneClass(escapeRunes, escapeRuneClass)
|
||||||
|
t.addRuneClass(commentRunes, commentRuneClass)
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClassifyRune classifiees a rune
|
||||||
|
func (t tokenClassifier) ClassifyRune(runeVal rune) runeTokenClass {
|
||||||
|
return t[runeVal]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Lexer turns an input stream into a sequence of tokens. Whitespace and comments are skipped.
|
||||||
|
type Lexer Tokenizer
|
||||||
|
|
||||||
|
// NewLexer creates a new lexer from an input stream.
|
||||||
|
func NewLexer(r io.Reader) *Lexer {
|
||||||
|
|
||||||
|
return (*Lexer)(NewTokenizer(r))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next word, or an error. If there are no more words,
|
||||||
|
// the error will be io.EOF.
|
||||||
|
func (l *Lexer) Next() (string, error) {
|
||||||
|
for {
|
||||||
|
token, err := (*Tokenizer)(l).Next()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
switch token.tokenType {
|
||||||
|
case WordToken:
|
||||||
|
return token.value, nil
|
||||||
|
case CommentToken:
|
||||||
|
// skip comments
|
||||||
|
default:
|
||||||
|
return "", fmt.Errorf("Unknown token type: %v", token.tokenType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tokenizer turns an input stream into a sequence of typed tokens
|
||||||
|
type Tokenizer struct {
|
||||||
|
input bufio.Reader
|
||||||
|
classifier tokenClassifier
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTokenizer creates a new tokenizer from an input stream.
|
||||||
|
func NewTokenizer(r io.Reader) *Tokenizer {
|
||||||
|
input := bufio.NewReader(r)
|
||||||
|
classifier := newDefaultClassifier()
|
||||||
|
return &Tokenizer{
|
||||||
|
input: *input,
|
||||||
|
classifier: classifier}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanStream scans the stream for the next token using the internal state machine.
|
||||||
|
// It will panic if it encounters a rune which it does not know how to handle.
|
||||||
|
func (t *Tokenizer) scanStream() (*Token, error) {
|
||||||
|
state := startState
|
||||||
|
var tokenType TokenType
|
||||||
|
var value []rune
|
||||||
|
var nextRune rune
|
||||||
|
var nextRuneType runeTokenClass
|
||||||
|
var err error
|
||||||
|
|
||||||
|
for {
|
||||||
|
nextRune, _, err = t.input.ReadRune()
|
||||||
|
nextRuneType = t.classifier.ClassifyRune(nextRune)
|
||||||
|
|
||||||
|
if err == io.EOF {
|
||||||
|
nextRuneType = eofRuneClass
|
||||||
|
err = nil
|
||||||
|
} else if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch state {
|
||||||
|
case startState: // no runes read yet
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
return nil, io.EOF
|
||||||
|
}
|
||||||
|
case spaceRuneClass:
|
||||||
|
{
|
||||||
|
}
|
||||||
|
case escapingQuoteRuneClass:
|
||||||
|
{
|
||||||
|
tokenType = WordToken
|
||||||
|
state = quotingEscapingState
|
||||||
|
}
|
||||||
|
case nonEscapingQuoteRuneClass:
|
||||||
|
{
|
||||||
|
tokenType = WordToken
|
||||||
|
state = quotingState
|
||||||
|
}
|
||||||
|
case escapeRuneClass:
|
||||||
|
{
|
||||||
|
tokenType = WordToken
|
||||||
|
state = escapingState
|
||||||
|
}
|
||||||
|
case commentRuneClass:
|
||||||
|
{
|
||||||
|
tokenType = CommentToken
|
||||||
|
state = commentState
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
tokenType = WordToken
|
||||||
|
value = append(value, nextRune)
|
||||||
|
state = inWordState
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case inWordState: // in a regular word
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
case spaceRuneClass:
|
||||||
|
{
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
case escapingQuoteRuneClass:
|
||||||
|
{
|
||||||
|
state = quotingEscapingState
|
||||||
|
}
|
||||||
|
case nonEscapingQuoteRuneClass:
|
||||||
|
{
|
||||||
|
state = quotingState
|
||||||
|
}
|
||||||
|
case escapeRuneClass:
|
||||||
|
{
|
||||||
|
state = escapingState
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case escapingState: // the rune after an escape character
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
err = fmt.Errorf("EOF found after escape character")
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
state = inWordState
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case escapingQuotedState: // the next rune after an escape character, in double quotes
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
err = fmt.Errorf("EOF found after escape character")
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
state = quotingEscapingState
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case quotingEscapingState: // in escaping double quotes
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
err = fmt.Errorf("EOF found when expecting closing quote")
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
case escapingQuoteRuneClass:
|
||||||
|
{
|
||||||
|
state = inWordState
|
||||||
|
}
|
||||||
|
case escapeRuneClass:
|
||||||
|
{
|
||||||
|
state = escapingQuotedState
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case quotingState: // in non-escaping single quotes
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
err = fmt.Errorf("EOF found when expecting closing quote")
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
case nonEscapingQuoteRuneClass:
|
||||||
|
{
|
||||||
|
state = inWordState
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case commentState: // in a comment
|
||||||
|
{
|
||||||
|
switch nextRuneType {
|
||||||
|
case eofRuneClass:
|
||||||
|
{
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
}
|
||||||
|
case spaceRuneClass:
|
||||||
|
{
|
||||||
|
if nextRune == '\n' {
|
||||||
|
state = startState
|
||||||
|
token := &Token{
|
||||||
|
tokenType: tokenType,
|
||||||
|
value: string(value)}
|
||||||
|
return token, err
|
||||||
|
} else {
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
value = append(value, nextRune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
return nil, fmt.Errorf("Unexpected state: %v", state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next token in the stream.
|
||||||
|
func (t *Tokenizer) Next() (*Token, error) {
|
||||||
|
return t.scanStream()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split partitions a string into a slice of strings.
|
||||||
|
func Split(s string) ([]string, error) {
|
||||||
|
l := NewLexer(strings.NewReader(s))
|
||||||
|
subStrings := make([]string, 0)
|
||||||
|
for {
|
||||||
|
word, err := l.Next()
|
||||||
|
if err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
return subStrings, nil
|
||||||
|
}
|
||||||
|
return subStrings, err
|
||||||
|
}
|
||||||
|
subStrings = append(subStrings, word)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
language: go
|
||||||
|
go:
|
||||||
|
- 1.14.x
|
||||||
|
- master
|
||||||
|
script:
|
||||||
|
- go test -v ./...
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) [2015] [go-gitignore]
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,95 @@
|
||||||
|
# go-gitignore [](https://travis-ci.org/monochromegane/go-gitignore)
|
||||||
|
|
||||||
|
A fast gitignore matching library for Go.
|
||||||
|
|
||||||
|
This library use simple tree index for matching, so keep fast if gitignore file has many pattern.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```go
|
||||||
|
gitignore, _ := gitignore.NewGitIgnore("/path/to/gitignore")
|
||||||
|
|
||||||
|
path := "/path/to/file"
|
||||||
|
isDir := false
|
||||||
|
gitignore.Match(path, isDir)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Specify base directory
|
||||||
|
|
||||||
|
go-gitignore treat `path` as a base directory.
|
||||||
|
If you want to specify other base (e.g. current directory and Global gitignore), you can like the following.
|
||||||
|
|
||||||
|
```go
|
||||||
|
gitignore, _ := gitignore.NewGitIgnore("/home/you/.gitignore", ".")
|
||||||
|
```
|
||||||
|
|
||||||
|
### From io.Reader
|
||||||
|
|
||||||
|
go-gitignore can initialize from io.Reader.
|
||||||
|
|
||||||
|
```go
|
||||||
|
gitignore, _ := gitignore.NewGitIgnoreFromReader(base, reader)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Simple tree index
|
||||||
|
|
||||||
|
go-gitignore parse gitignore file, and generate a simple tree index for matching like the following.
|
||||||
|
|
||||||
|
```
|
||||||
|
.
|
||||||
|
├── accept
|
||||||
|
│ ├── absolute
|
||||||
|
│ │ └── depth
|
||||||
|
│ │ ├── initial
|
||||||
|
│ │ └── other
|
||||||
|
│ └── relative
|
||||||
|
│ └── depth
|
||||||
|
│ ├── initial
|
||||||
|
│ └── other
|
||||||
|
└── ignore
|
||||||
|
├── absolute
|
||||||
|
│ └── depth
|
||||||
|
│ ├── initial
|
||||||
|
│ └── other
|
||||||
|
└── relative
|
||||||
|
└── depth
|
||||||
|
├── initial
|
||||||
|
└── other
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Support absolute path (/path/to/ignore)
|
||||||
|
- Support relative path (path/to/ignore)
|
||||||
|
- Support accept pattern (!path/to/accept)
|
||||||
|
- Support directory pattern (path/to/directory/)
|
||||||
|
- Support glob pattern (path/to/\*.txt)
|
||||||
|
|
||||||
|
*note: glob pattern*
|
||||||
|
|
||||||
|
go-gitignore use [filepath.Match](https://golang.org/pkg/path/filepath/#Match) for matching meta char pattern, so not support recursive pattern (path/`**`/file).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ go get github.com/monochromegane/go-gitignore
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contribution
|
||||||
|
|
||||||
|
1. Fork it
|
||||||
|
2. Create a feature branch
|
||||||
|
3. Commit your changes
|
||||||
|
4. Rebase your local changes against the master branch
|
||||||
|
5. Run test suite with the `go test ./...` command and confirm that it passes
|
||||||
|
6. Run `gofmt -s`
|
||||||
|
7. Create new Pull Request
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
[MIT](https://github.com/monochromegane/go-gitignore/blob/master/LICENSE)
|
||||||
|
|
||||||
|
## Author
|
||||||
|
|
||||||
|
[monochromegane](https://github.com/monochromegane)
|
||||||
|
|
||||||
|
|
@ -0,0 +1,79 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
const (
|
||||||
|
asc = iota
|
||||||
|
desc
|
||||||
|
)
|
||||||
|
|
||||||
|
type depthPatternHolder struct {
|
||||||
|
patterns depthPatterns
|
||||||
|
order int
|
||||||
|
}
|
||||||
|
|
||||||
|
func newDepthPatternHolder(order int) depthPatternHolder {
|
||||||
|
return depthPatternHolder{
|
||||||
|
patterns: depthPatterns{m: map[int]initialPatternHolder{}},
|
||||||
|
order: order,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *depthPatternHolder) add(pattern string) {
|
||||||
|
count := strings.Count(strings.Trim(pattern, "/"), "/")
|
||||||
|
h.patterns.set(count+1, pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h depthPatternHolder) match(path string, isDir bool) bool {
|
||||||
|
if h.patterns.size() == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for depth := 1; ; depth++ {
|
||||||
|
var part string
|
||||||
|
var isLast, isDirCurrent bool
|
||||||
|
if h.order == asc {
|
||||||
|
part, isLast = cutN(path, depth)
|
||||||
|
if isLast {
|
||||||
|
isDirCurrent = isDir
|
||||||
|
} else {
|
||||||
|
isDirCurrent = false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
part, isLast = cutLastN(path, depth)
|
||||||
|
isDirCurrent = isDir
|
||||||
|
}
|
||||||
|
if patterns, ok := h.patterns.get(depth); ok {
|
||||||
|
if patterns.match(part, isDirCurrent) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isLast {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type depthPatterns struct {
|
||||||
|
m map[int]initialPatternHolder
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *depthPatterns) set(depth int, pattern string) {
|
||||||
|
if ps, ok := p.m[depth]; ok {
|
||||||
|
ps.add(pattern)
|
||||||
|
} else {
|
||||||
|
holder := newInitialPatternHolder()
|
||||||
|
holder.add(pattern)
|
||||||
|
p.m[depth] = holder
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p depthPatterns) get(depth int) (initialPatternHolder, bool) {
|
||||||
|
patterns, ok := p.m[depth]
|
||||||
|
return patterns, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p depthPatterns) size() int {
|
||||||
|
return len(p.m)
|
||||||
|
}
|
||||||
31
vendor/github.com/monochromegane/go-gitignore/full_scan_patterns.go
generated
vendored
Normal file
31
vendor/github.com/monochromegane/go-gitignore/full_scan_patterns.go
generated
vendored
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
// Only benchmark use
|
||||||
|
type fullScanPatterns struct {
|
||||||
|
absolute patterns
|
||||||
|
relative patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFullScanPatterns() *fullScanPatterns {
|
||||||
|
return &fullScanPatterns{
|
||||||
|
absolute: patterns{},
|
||||||
|
relative: patterns{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps *fullScanPatterns) add(pattern string) {
|
||||||
|
if strings.HasPrefix(pattern, "/") {
|
||||||
|
ps.absolute.add(newPattern(pattern))
|
||||||
|
} else {
|
||||||
|
ps.relative.add(newPattern(pattern))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps fullScanPatterns) match(path string, isDir bool) bool {
|
||||||
|
if ps.absolute.match(path, isDir) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return ps.relative.match(path, isDir)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,80 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IgnoreMatcher interface {
|
||||||
|
Match(path string, isDir bool) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type DummyIgnoreMatcher bool
|
||||||
|
|
||||||
|
func (d DummyIgnoreMatcher) Match(path string, isDir bool) bool {
|
||||||
|
return bool(d)
|
||||||
|
}
|
||||||
|
|
||||||
|
type gitIgnore struct {
|
||||||
|
ignorePatterns scanStrategy
|
||||||
|
acceptPatterns scanStrategy
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGitIgnore(gitignore string, base ...string) (IgnoreMatcher, error) {
|
||||||
|
var path string
|
||||||
|
if len(base) > 0 {
|
||||||
|
path = base[0]
|
||||||
|
} else {
|
||||||
|
path = filepath.Dir(gitignore)
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := os.Open(gitignore)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
return NewGitIgnoreFromReader(path, file), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGitIgnoreFromReader(path string, r io.Reader) IgnoreMatcher {
|
||||||
|
g := gitIgnore{
|
||||||
|
ignorePatterns: newIndexScanPatterns(),
|
||||||
|
acceptPatterns: newIndexScanPatterns(),
|
||||||
|
path: path,
|
||||||
|
}
|
||||||
|
scanner := bufio.NewScanner(r)
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.Trim(scanner.Text(), " ")
|
||||||
|
if len(line) == 0 || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(line, `\#`) {
|
||||||
|
line = strings.TrimPrefix(line, `\`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(line, "!") {
|
||||||
|
g.acceptPatterns.add(strings.TrimPrefix(line, "!"))
|
||||||
|
} else {
|
||||||
|
g.ignorePatterns.add(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return g
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g gitIgnore) Match(path string, isDir bool) bool {
|
||||||
|
relativePath, err := filepath.Rel(g.path, path)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
relativePath = filepath.ToSlash(relativePath)
|
||||||
|
|
||||||
|
if g.acceptPatterns.match(relativePath, isDir) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return g.ignorePatterns.match(relativePath, isDir)
|
||||||
|
}
|
||||||
35
vendor/github.com/monochromegane/go-gitignore/index_scan_patterns.go
generated
vendored
Normal file
35
vendor/github.com/monochromegane/go-gitignore/index_scan_patterns.go
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type indexScanPatterns struct {
|
||||||
|
absolute depthPatternHolder
|
||||||
|
relative depthPatternHolder
|
||||||
|
}
|
||||||
|
|
||||||
|
func newIndexScanPatterns() *indexScanPatterns {
|
||||||
|
return &indexScanPatterns{
|
||||||
|
absolute: newDepthPatternHolder(asc),
|
||||||
|
relative: newDepthPatternHolder(desc),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps *indexScanPatterns) add(pattern string) {
|
||||||
|
if strings.HasPrefix(pattern, "/") {
|
||||||
|
ps.absolute.add(pattern)
|
||||||
|
} else {
|
||||||
|
ps.relative.add(pattern)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps indexScanPatterns) match(path string, isDir bool) bool {
|
||||||
|
if ps.absolute.match(path, isDir) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return ps.relative.match(path, isDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
type scanStrategy interface {
|
||||||
|
add(pattern string)
|
||||||
|
match(path string, isDir bool) bool
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,62 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
const initials = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ."
|
||||||
|
|
||||||
|
type initialPatternHolder struct {
|
||||||
|
patterns initialPatterns
|
||||||
|
otherPatterns *patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
func newInitialPatternHolder() initialPatternHolder {
|
||||||
|
return initialPatternHolder{
|
||||||
|
patterns: initialPatterns{m: map[byte]*patterns{}},
|
||||||
|
otherPatterns: &patterns{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *initialPatternHolder) add(pattern string) {
|
||||||
|
trimedPattern := strings.TrimPrefix(pattern, "/")
|
||||||
|
if strings.IndexAny(trimedPattern[0:1], initials) != -1 {
|
||||||
|
h.patterns.set(trimedPattern[0], newPatternForEqualizedPath(pattern))
|
||||||
|
} else {
|
||||||
|
h.otherPatterns.add(newPatternForEqualizedPath(pattern))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h initialPatternHolder) match(path string, isDir bool) bool {
|
||||||
|
if h.patterns.size() == 0 && h.otherPatterns.size() == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if patterns, ok := h.patterns.get(path[0]); ok {
|
||||||
|
if patterns.match(path, isDir) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return h.otherPatterns.match(path, isDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
type initialPatterns struct {
|
||||||
|
m map[byte]*patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *initialPatterns) set(initial byte, pattern pattern) {
|
||||||
|
if ps, ok := p.m[initial]; ok {
|
||||||
|
ps.add(pattern)
|
||||||
|
} else {
|
||||||
|
patterns := &patterns{}
|
||||||
|
patterns.add(pattern)
|
||||||
|
p.m[initial] = patterns
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p initialPatterns) get(initial byte) (*patterns, bool) {
|
||||||
|
patterns, ok := p.m[initial]
|
||||||
|
return patterns, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p initialPatterns) size() int {
|
||||||
|
return len(p.m)
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,24 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import "path/filepath"
|
||||||
|
|
||||||
|
type pathMatcher interface {
|
||||||
|
match(path string) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type simpleMatcher struct {
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m simpleMatcher) match(path string) bool {
|
||||||
|
return m.path == path
|
||||||
|
}
|
||||||
|
|
||||||
|
type filepathMatcher struct {
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m filepathMatcher) match(path string) bool {
|
||||||
|
match, _ := filepath.Match(m.path, path)
|
||||||
|
return match
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,69 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Separator = string(filepath.Separator)
|
||||||
|
|
||||||
|
type pattern struct {
|
||||||
|
hasRootPrefix bool
|
||||||
|
hasDirSuffix bool
|
||||||
|
pathDepth int
|
||||||
|
matcher pathMatcher
|
||||||
|
onlyEqualizedPath bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPattern(path string) pattern {
|
||||||
|
hasRootPrefix := path[0] == '/'
|
||||||
|
hasDirSuffix := path[len(path)-1] == '/'
|
||||||
|
|
||||||
|
var pathDepth int
|
||||||
|
if !hasRootPrefix {
|
||||||
|
pathDepth = strings.Count(path, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
var matcher pathMatcher
|
||||||
|
matchingPath := strings.Trim(path, "/")
|
||||||
|
if hasMeta(path) {
|
||||||
|
matcher = filepathMatcher{path: matchingPath}
|
||||||
|
} else {
|
||||||
|
matcher = simpleMatcher{path: matchingPath}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pattern{
|
||||||
|
hasRootPrefix: hasRootPrefix,
|
||||||
|
hasDirSuffix: hasDirSuffix,
|
||||||
|
pathDepth: pathDepth,
|
||||||
|
matcher: matcher,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPatternForEqualizedPath(path string) pattern {
|
||||||
|
pattern := newPattern(path)
|
||||||
|
pattern.onlyEqualizedPath = true
|
||||||
|
return pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p pattern) match(path string, isDir bool) bool {
|
||||||
|
|
||||||
|
if p.hasDirSuffix && !isDir {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var targetPath string
|
||||||
|
if p.hasRootPrefix || p.onlyEqualizedPath {
|
||||||
|
// absolute pattern or only equalized path mode
|
||||||
|
targetPath = path
|
||||||
|
} else {
|
||||||
|
// relative pattern
|
||||||
|
targetPath = p.equalizeDepth(path)
|
||||||
|
}
|
||||||
|
return p.matcher.match(targetPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p pattern) equalizeDepth(path string) string {
|
||||||
|
equalizedPath, _ := cutLastN(path, p.pathDepth+1)
|
||||||
|
return equalizedPath
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,22 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
type patterns struct {
|
||||||
|
patterns []pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps *patterns) add(pattern pattern) {
|
||||||
|
ps.patterns = append(ps.patterns, pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps *patterns) size() int {
|
||||||
|
return len(ps.patterns)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps patterns) match(path string, isDir bool) bool {
|
||||||
|
for _, p := range ps.patterns {
|
||||||
|
if match := p.match(path, isDir); match {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,45 @@
|
||||||
|
package gitignore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func cutN(path string, n int) (string, bool) {
|
||||||
|
isLast := true
|
||||||
|
|
||||||
|
var i, count int
|
||||||
|
for i < len(path)-1 {
|
||||||
|
if os.IsPathSeparator(path[i]) {
|
||||||
|
count++
|
||||||
|
if count >= n {
|
||||||
|
isLast = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return path[:i+1], isLast
|
||||||
|
}
|
||||||
|
|
||||||
|
func cutLastN(path string, n int) (string, bool) {
|
||||||
|
isLast := true
|
||||||
|
i := len(path) - 1
|
||||||
|
|
||||||
|
var count int
|
||||||
|
for i >= 0 {
|
||||||
|
if os.IsPathSeparator(path[i]) {
|
||||||
|
count++
|
||||||
|
if count >= n {
|
||||||
|
isLast = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i--
|
||||||
|
}
|
||||||
|
return path[i+1:], isLast
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasMeta(path string) bool {
|
||||||
|
return strings.IndexAny(path, "*?[") >= 0
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
Copyright (c) 2013, Patrick Mezard
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
The names of its contributors may not be used to endorse or promote
|
||||||
|
products derived from this software without specific prior written
|
||||||
|
permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
||||||
|
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
@ -0,0 +1,772 @@
|
||||||
|
// Package difflib is a partial port of Python difflib module.
|
||||||
|
//
|
||||||
|
// It provides tools to compare sequences of strings and generate textual diffs.
|
||||||
|
//
|
||||||
|
// The following class and functions have been ported:
|
||||||
|
//
|
||||||
|
// - SequenceMatcher
|
||||||
|
//
|
||||||
|
// - unified_diff
|
||||||
|
//
|
||||||
|
// - context_diff
|
||||||
|
//
|
||||||
|
// Getting unified diffs was the main goal of the port. Keep in mind this code
|
||||||
|
// is mostly suitable to output text differences in a human friendly way, there
|
||||||
|
// are no guarantees generated diffs are consumable by patch(1).
|
||||||
|
package difflib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func min(a, b int) int {
|
||||||
|
if a < b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func max(a, b int) int {
|
||||||
|
if a > b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculateRatio(matches, length int) float64 {
|
||||||
|
if length > 0 {
|
||||||
|
return 2.0 * float64(matches) / float64(length)
|
||||||
|
}
|
||||||
|
return 1.0
|
||||||
|
}
|
||||||
|
|
||||||
|
type Match struct {
|
||||||
|
A int
|
||||||
|
B int
|
||||||
|
Size int
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpCode struct {
|
||||||
|
Tag byte
|
||||||
|
I1 int
|
||||||
|
I2 int
|
||||||
|
J1 int
|
||||||
|
J2 int
|
||||||
|
}
|
||||||
|
|
||||||
|
// SequenceMatcher compares sequence of strings. The basic
|
||||||
|
// algorithm predates, and is a little fancier than, an algorithm
|
||||||
|
// published in the late 1980's by Ratcliff and Obershelp under the
|
||||||
|
// hyperbolic name "gestalt pattern matching". The basic idea is to find
|
||||||
|
// the longest contiguous matching subsequence that contains no "junk"
|
||||||
|
// elements (R-O doesn't address junk). The same idea is then applied
|
||||||
|
// recursively to the pieces of the sequences to the left and to the right
|
||||||
|
// of the matching subsequence. This does not yield minimal edit
|
||||||
|
// sequences, but does tend to yield matches that "look right" to people.
|
||||||
|
//
|
||||||
|
// SequenceMatcher tries to compute a "human-friendly diff" between two
|
||||||
|
// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the
|
||||||
|
// longest *contiguous* & junk-free matching subsequence. That's what
|
||||||
|
// catches peoples' eyes. The Windows(tm) windiff has another interesting
|
||||||
|
// notion, pairing up elements that appear uniquely in each sequence.
|
||||||
|
// That, and the method here, appear to yield more intuitive difference
|
||||||
|
// reports than does diff. This method appears to be the least vulnerable
|
||||||
|
// to synching up on blocks of "junk lines", though (like blank lines in
|
||||||
|
// ordinary text files, or maybe "<P>" lines in HTML files). That may be
|
||||||
|
// because this is the only method of the 3 that has a *concept* of
|
||||||
|
// "junk" <wink>.
|
||||||
|
//
|
||||||
|
// Timing: Basic R-O is cubic time worst case and quadratic time expected
|
||||||
|
// case. SequenceMatcher is quadratic time for the worst case and has
|
||||||
|
// expected-case behavior dependent in a complicated way on how many
|
||||||
|
// elements the sequences have in common; best case time is linear.
|
||||||
|
type SequenceMatcher struct {
|
||||||
|
a []string
|
||||||
|
b []string
|
||||||
|
b2j map[string][]int
|
||||||
|
IsJunk func(string) bool
|
||||||
|
autoJunk bool
|
||||||
|
bJunk map[string]struct{}
|
||||||
|
matchingBlocks []Match
|
||||||
|
fullBCount map[string]int
|
||||||
|
bPopular map[string]struct{}
|
||||||
|
opCodes []OpCode
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMatcher(a, b []string) *SequenceMatcher {
|
||||||
|
m := SequenceMatcher{autoJunk: true}
|
||||||
|
m.SetSeqs(a, b)
|
||||||
|
return &m
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMatcherWithJunk(a, b []string, autoJunk bool,
|
||||||
|
isJunk func(string) bool) *SequenceMatcher {
|
||||||
|
|
||||||
|
m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk}
|
||||||
|
m.SetSeqs(a, b)
|
||||||
|
return &m
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set two sequences to be compared.
|
||||||
|
func (m *SequenceMatcher) SetSeqs(a, b []string) {
|
||||||
|
m.SetSeq1(a)
|
||||||
|
m.SetSeq2(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the first sequence to be compared. The second sequence to be compared is
|
||||||
|
// not changed.
|
||||||
|
//
|
||||||
|
// SequenceMatcher computes and caches detailed information about the second
|
||||||
|
// sequence, so if you want to compare one sequence S against many sequences,
|
||||||
|
// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other
|
||||||
|
// sequences.
|
||||||
|
//
|
||||||
|
// See also SetSeqs() and SetSeq2().
|
||||||
|
func (m *SequenceMatcher) SetSeq1(a []string) {
|
||||||
|
if &a == &m.a {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
m.a = a
|
||||||
|
m.matchingBlocks = nil
|
||||||
|
m.opCodes = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the second sequence to be compared. The first sequence to be compared is
|
||||||
|
// not changed.
|
||||||
|
func (m *SequenceMatcher) SetSeq2(b []string) {
|
||||||
|
if &b == &m.b {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
m.b = b
|
||||||
|
m.matchingBlocks = nil
|
||||||
|
m.opCodes = nil
|
||||||
|
m.fullBCount = nil
|
||||||
|
m.chainB()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *SequenceMatcher) chainB() {
|
||||||
|
// Populate line -> index mapping
|
||||||
|
b2j := map[string][]int{}
|
||||||
|
for i, s := range m.b {
|
||||||
|
indices := b2j[s]
|
||||||
|
indices = append(indices, i)
|
||||||
|
b2j[s] = indices
|
||||||
|
}
|
||||||
|
|
||||||
|
// Purge junk elements
|
||||||
|
m.bJunk = map[string]struct{}{}
|
||||||
|
if m.IsJunk != nil {
|
||||||
|
junk := m.bJunk
|
||||||
|
for s, _ := range b2j {
|
||||||
|
if m.IsJunk(s) {
|
||||||
|
junk[s] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for s, _ := range junk {
|
||||||
|
delete(b2j, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Purge remaining popular elements
|
||||||
|
popular := map[string]struct{}{}
|
||||||
|
n := len(m.b)
|
||||||
|
if m.autoJunk && n >= 200 {
|
||||||
|
ntest := n/100 + 1
|
||||||
|
for s, indices := range b2j {
|
||||||
|
if len(indices) > ntest {
|
||||||
|
popular[s] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for s, _ := range popular {
|
||||||
|
delete(b2j, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
m.bPopular = popular
|
||||||
|
m.b2j = b2j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *SequenceMatcher) isBJunk(s string) bool {
|
||||||
|
_, ok := m.bJunk[s]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find longest matching block in a[alo:ahi] and b[blo:bhi].
|
||||||
|
//
|
||||||
|
// If IsJunk is not defined:
|
||||||
|
//
|
||||||
|
// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
|
||||||
|
// alo <= i <= i+k <= ahi
|
||||||
|
// blo <= j <= j+k <= bhi
|
||||||
|
// and for all (i',j',k') meeting those conditions,
|
||||||
|
// k >= k'
|
||||||
|
// i <= i'
|
||||||
|
// and if i == i', j <= j'
|
||||||
|
//
|
||||||
|
// In other words, of all maximal matching blocks, return one that
|
||||||
|
// starts earliest in a, and of all those maximal matching blocks that
|
||||||
|
// start earliest in a, return the one that starts earliest in b.
|
||||||
|
//
|
||||||
|
// If IsJunk is defined, first the longest matching block is
|
||||||
|
// determined as above, but with the additional restriction that no
|
||||||
|
// junk element appears in the block. Then that block is extended as
|
||||||
|
// far as possible by matching (only) junk elements on both sides. So
|
||||||
|
// the resulting block never matches on junk except as identical junk
|
||||||
|
// happens to be adjacent to an "interesting" match.
|
||||||
|
//
|
||||||
|
// If no blocks match, return (alo, blo, 0).
|
||||||
|
func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match {
|
||||||
|
// CAUTION: stripping common prefix or suffix would be incorrect.
|
||||||
|
// E.g.,
|
||||||
|
// ab
|
||||||
|
// acab
|
||||||
|
// Longest matching block is "ab", but if common prefix is
|
||||||
|
// stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
|
||||||
|
// strip, so ends up claiming that ab is changed to acab by
|
||||||
|
// inserting "ca" in the middle. That's minimal but unintuitive:
|
||||||
|
// "it's obvious" that someone inserted "ac" at the front.
|
||||||
|
// Windiff ends up at the same place as diff, but by pairing up
|
||||||
|
// the unique 'b's and then matching the first two 'a's.
|
||||||
|
besti, bestj, bestsize := alo, blo, 0
|
||||||
|
|
||||||
|
// find longest junk-free match
|
||||||
|
// during an iteration of the loop, j2len[j] = length of longest
|
||||||
|
// junk-free match ending with a[i-1] and b[j]
|
||||||
|
j2len := map[int]int{}
|
||||||
|
for i := alo; i != ahi; i++ {
|
||||||
|
// look at all instances of a[i] in b; note that because
|
||||||
|
// b2j has no junk keys, the loop is skipped if a[i] is junk
|
||||||
|
newj2len := map[int]int{}
|
||||||
|
for _, j := range m.b2j[m.a[i]] {
|
||||||
|
// a[i] matches b[j]
|
||||||
|
if j < blo {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if j >= bhi {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
k := j2len[j-1] + 1
|
||||||
|
newj2len[j] = k
|
||||||
|
if k > bestsize {
|
||||||
|
besti, bestj, bestsize = i-k+1, j-k+1, k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
j2len = newj2len
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extend the best by non-junk elements on each end. In particular,
|
||||||
|
// "popular" non-junk elements aren't in b2j, which greatly speeds
|
||||||
|
// the inner loop above, but also means "the best" match so far
|
||||||
|
// doesn't contain any junk *or* popular non-junk elements.
|
||||||
|
for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) &&
|
||||||
|
m.a[besti-1] == m.b[bestj-1] {
|
||||||
|
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
|
||||||
|
}
|
||||||
|
for besti+bestsize < ahi && bestj+bestsize < bhi &&
|
||||||
|
!m.isBJunk(m.b[bestj+bestsize]) &&
|
||||||
|
m.a[besti+bestsize] == m.b[bestj+bestsize] {
|
||||||
|
bestsize += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now that we have a wholly interesting match (albeit possibly
|
||||||
|
// empty!), we may as well suck up the matching junk on each
|
||||||
|
// side of it too. Can't think of a good reason not to, and it
|
||||||
|
// saves post-processing the (possibly considerable) expense of
|
||||||
|
// figuring out what to do with it. In the case of an empty
|
||||||
|
// interesting match, this is clearly the right thing to do,
|
||||||
|
// because no other kind of match is possible in the regions.
|
||||||
|
for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) &&
|
||||||
|
m.a[besti-1] == m.b[bestj-1] {
|
||||||
|
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
|
||||||
|
}
|
||||||
|
for besti+bestsize < ahi && bestj+bestsize < bhi &&
|
||||||
|
m.isBJunk(m.b[bestj+bestsize]) &&
|
||||||
|
m.a[besti+bestsize] == m.b[bestj+bestsize] {
|
||||||
|
bestsize += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return Match{A: besti, B: bestj, Size: bestsize}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return list of triples describing matching subsequences.
|
||||||
|
//
|
||||||
|
// Each triple is of the form (i, j, n), and means that
|
||||||
|
// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
|
||||||
|
// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are
|
||||||
|
// adjacent triples in the list, and the second is not the last triple in the
|
||||||
|
// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe
|
||||||
|
// adjacent equal blocks.
|
||||||
|
//
|
||||||
|
// The last triple is a dummy, (len(a), len(b), 0), and is the only
|
||||||
|
// triple with n==0.
|
||||||
|
func (m *SequenceMatcher) GetMatchingBlocks() []Match {
|
||||||
|
if m.matchingBlocks != nil {
|
||||||
|
return m.matchingBlocks
|
||||||
|
}
|
||||||
|
|
||||||
|
var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match
|
||||||
|
matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match {
|
||||||
|
match := m.findLongestMatch(alo, ahi, blo, bhi)
|
||||||
|
i, j, k := match.A, match.B, match.Size
|
||||||
|
if match.Size > 0 {
|
||||||
|
if alo < i && blo < j {
|
||||||
|
matched = matchBlocks(alo, i, blo, j, matched)
|
||||||
|
}
|
||||||
|
matched = append(matched, match)
|
||||||
|
if i+k < ahi && j+k < bhi {
|
||||||
|
matched = matchBlocks(i+k, ahi, j+k, bhi, matched)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matched
|
||||||
|
}
|
||||||
|
matched := matchBlocks(0, len(m.a), 0, len(m.b), nil)
|
||||||
|
|
||||||
|
// It's possible that we have adjacent equal blocks in the
|
||||||
|
// matching_blocks list now.
|
||||||
|
nonAdjacent := []Match{}
|
||||||
|
i1, j1, k1 := 0, 0, 0
|
||||||
|
for _, b := range matched {
|
||||||
|
// Is this block adjacent to i1, j1, k1?
|
||||||
|
i2, j2, k2 := b.A, b.B, b.Size
|
||||||
|
if i1+k1 == i2 && j1+k1 == j2 {
|
||||||
|
// Yes, so collapse them -- this just increases the length of
|
||||||
|
// the first block by the length of the second, and the first
|
||||||
|
// block so lengthened remains the block to compare against.
|
||||||
|
k1 += k2
|
||||||
|
} else {
|
||||||
|
// Not adjacent. Remember the first block (k1==0 means it's
|
||||||
|
// the dummy we started with), and make the second block the
|
||||||
|
// new block to compare against.
|
||||||
|
if k1 > 0 {
|
||||||
|
nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
|
||||||
|
}
|
||||||
|
i1, j1, k1 = i2, j2, k2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if k1 > 0 {
|
||||||
|
nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
|
||||||
|
}
|
||||||
|
|
||||||
|
nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0})
|
||||||
|
m.matchingBlocks = nonAdjacent
|
||||||
|
return m.matchingBlocks
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return list of 5-tuples describing how to turn a into b.
|
||||||
|
//
|
||||||
|
// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple
|
||||||
|
// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
|
||||||
|
// tuple preceding it, and likewise for j1 == the previous j2.
|
||||||
|
//
|
||||||
|
// The tags are characters, with these meanings:
|
||||||
|
//
|
||||||
|
// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2]
|
||||||
|
//
|
||||||
|
// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case.
|
||||||
|
//
|
||||||
|
// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case.
|
||||||
|
//
|
||||||
|
// 'e' (equal): a[i1:i2] == b[j1:j2]
|
||||||
|
func (m *SequenceMatcher) GetOpCodes() []OpCode {
|
||||||
|
if m.opCodes != nil {
|
||||||
|
return m.opCodes
|
||||||
|
}
|
||||||
|
i, j := 0, 0
|
||||||
|
matching := m.GetMatchingBlocks()
|
||||||
|
opCodes := make([]OpCode, 0, len(matching))
|
||||||
|
for _, m := range matching {
|
||||||
|
// invariant: we've pumped out correct diffs to change
|
||||||
|
// a[:i] into b[:j], and the next matching block is
|
||||||
|
// a[ai:ai+size] == b[bj:bj+size]. So we need to pump
|
||||||
|
// out a diff to change a[i:ai] into b[j:bj], pump out
|
||||||
|
// the matching block, and move (i,j) beyond the match
|
||||||
|
ai, bj, size := m.A, m.B, m.Size
|
||||||
|
tag := byte(0)
|
||||||
|
if i < ai && j < bj {
|
||||||
|
tag = 'r'
|
||||||
|
} else if i < ai {
|
||||||
|
tag = 'd'
|
||||||
|
} else if j < bj {
|
||||||
|
tag = 'i'
|
||||||
|
}
|
||||||
|
if tag > 0 {
|
||||||
|
opCodes = append(opCodes, OpCode{tag, i, ai, j, bj})
|
||||||
|
}
|
||||||
|
i, j = ai+size, bj+size
|
||||||
|
// the list of matching blocks is terminated by a
|
||||||
|
// sentinel with size 0
|
||||||
|
if size > 0 {
|
||||||
|
opCodes = append(opCodes, OpCode{'e', ai, i, bj, j})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
m.opCodes = opCodes
|
||||||
|
return m.opCodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// Isolate change clusters by eliminating ranges with no changes.
|
||||||
|
//
|
||||||
|
// Return a generator of groups with up to n lines of context.
|
||||||
|
// Each group is in the same format as returned by GetOpCodes().
|
||||||
|
func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode {
|
||||||
|
if n < 0 {
|
||||||
|
n = 3
|
||||||
|
}
|
||||||
|
codes := m.GetOpCodes()
|
||||||
|
if len(codes) == 0 {
|
||||||
|
codes = []OpCode{OpCode{'e', 0, 1, 0, 1}}
|
||||||
|
}
|
||||||
|
// Fixup leading and trailing groups if they show no changes.
|
||||||
|
if codes[0].Tag == 'e' {
|
||||||
|
c := codes[0]
|
||||||
|
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
|
||||||
|
codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2}
|
||||||
|
}
|
||||||
|
if codes[len(codes)-1].Tag == 'e' {
|
||||||
|
c := codes[len(codes)-1]
|
||||||
|
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
|
||||||
|
codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)}
|
||||||
|
}
|
||||||
|
nn := n + n
|
||||||
|
groups := [][]OpCode{}
|
||||||
|
group := []OpCode{}
|
||||||
|
for _, c := range codes {
|
||||||
|
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
|
||||||
|
// End the current group and start a new one whenever
|
||||||
|
// there is a large range with no changes.
|
||||||
|
if c.Tag == 'e' && i2-i1 > nn {
|
||||||
|
group = append(group, OpCode{c.Tag, i1, min(i2, i1+n),
|
||||||
|
j1, min(j2, j1+n)})
|
||||||
|
groups = append(groups, group)
|
||||||
|
group = []OpCode{}
|
||||||
|
i1, j1 = max(i1, i2-n), max(j1, j2-n)
|
||||||
|
}
|
||||||
|
group = append(group, OpCode{c.Tag, i1, i2, j1, j2})
|
||||||
|
}
|
||||||
|
if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') {
|
||||||
|
groups = append(groups, group)
|
||||||
|
}
|
||||||
|
return groups
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return a measure of the sequences' similarity (float in [0,1]).
|
||||||
|
//
|
||||||
|
// Where T is the total number of elements in both sequences, and
|
||||||
|
// M is the number of matches, this is 2.0*M / T.
|
||||||
|
// Note that this is 1 if the sequences are identical, and 0 if
|
||||||
|
// they have nothing in common.
|
||||||
|
//
|
||||||
|
// .Ratio() is expensive to compute if you haven't already computed
|
||||||
|
// .GetMatchingBlocks() or .GetOpCodes(), in which case you may
|
||||||
|
// want to try .QuickRatio() or .RealQuickRation() first to get an
|
||||||
|
// upper bound.
|
||||||
|
func (m *SequenceMatcher) Ratio() float64 {
|
||||||
|
matches := 0
|
||||||
|
for _, m := range m.GetMatchingBlocks() {
|
||||||
|
matches += m.Size
|
||||||
|
}
|
||||||
|
return calculateRatio(matches, len(m.a)+len(m.b))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an upper bound on ratio() relatively quickly.
|
||||||
|
//
|
||||||
|
// This isn't defined beyond that it is an upper bound on .Ratio(), and
|
||||||
|
// is faster to compute.
|
||||||
|
func (m *SequenceMatcher) QuickRatio() float64 {
|
||||||
|
// viewing a and b as multisets, set matches to the cardinality
|
||||||
|
// of their intersection; this counts the number of matches
|
||||||
|
// without regard to order, so is clearly an upper bound
|
||||||
|
if m.fullBCount == nil {
|
||||||
|
m.fullBCount = map[string]int{}
|
||||||
|
for _, s := range m.b {
|
||||||
|
m.fullBCount[s] = m.fullBCount[s] + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// avail[x] is the number of times x appears in 'b' less the
|
||||||
|
// number of times we've seen it in 'a' so far ... kinda
|
||||||
|
avail := map[string]int{}
|
||||||
|
matches := 0
|
||||||
|
for _, s := range m.a {
|
||||||
|
n, ok := avail[s]
|
||||||
|
if !ok {
|
||||||
|
n = m.fullBCount[s]
|
||||||
|
}
|
||||||
|
avail[s] = n - 1
|
||||||
|
if n > 0 {
|
||||||
|
matches += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return calculateRatio(matches, len(m.a)+len(m.b))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return an upper bound on ratio() very quickly.
|
||||||
|
//
|
||||||
|
// This isn't defined beyond that it is an upper bound on .Ratio(), and
|
||||||
|
// is faster to compute than either .Ratio() or .QuickRatio().
|
||||||
|
func (m *SequenceMatcher) RealQuickRatio() float64 {
|
||||||
|
la, lb := len(m.a), len(m.b)
|
||||||
|
return calculateRatio(min(la, lb), la+lb)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert range to the "ed" format
|
||||||
|
func formatRangeUnified(start, stop int) string {
|
||||||
|
// Per the diff spec at http://www.unix.org/single_unix_specification/
|
||||||
|
beginning := start + 1 // lines start numbering with one
|
||||||
|
length := stop - start
|
||||||
|
if length == 1 {
|
||||||
|
return fmt.Sprintf("%d", beginning)
|
||||||
|
}
|
||||||
|
if length == 0 {
|
||||||
|
beginning -= 1 // empty ranges begin at line just before the range
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%d,%d", beginning, length)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unified diff parameters
|
||||||
|
type UnifiedDiff struct {
|
||||||
|
A []string // First sequence lines
|
||||||
|
FromFile string // First file name
|
||||||
|
FromDate string // First file time
|
||||||
|
B []string // Second sequence lines
|
||||||
|
ToFile string // Second file name
|
||||||
|
ToDate string // Second file time
|
||||||
|
Eol string // Headers end of line, defaults to LF
|
||||||
|
Context int // Number of context lines
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare two sequences of lines; generate the delta as a unified diff.
|
||||||
|
//
|
||||||
|
// Unified diffs are a compact way of showing line changes and a few
|
||||||
|
// lines of context. The number of context lines is set by 'n' which
|
||||||
|
// defaults to three.
|
||||||
|
//
|
||||||
|
// By default, the diff control lines (those with ---, +++, or @@) are
|
||||||
|
// created with a trailing newline. This is helpful so that inputs
|
||||||
|
// created from file.readlines() result in diffs that are suitable for
|
||||||
|
// file.writelines() since both the inputs and outputs have trailing
|
||||||
|
// newlines.
|
||||||
|
//
|
||||||
|
// For inputs that do not have trailing newlines, set the lineterm
|
||||||
|
// argument to "" so that the output will be uniformly newline free.
|
||||||
|
//
|
||||||
|
// The unidiff format normally has a header for filenames and modification
|
||||||
|
// times. Any or all of these may be specified using strings for
|
||||||
|
// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
|
||||||
|
// The modification times are normally expressed in the ISO 8601 format.
|
||||||
|
func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error {
|
||||||
|
buf := bufio.NewWriter(writer)
|
||||||
|
defer buf.Flush()
|
||||||
|
wf := func(format string, args ...interface{}) error {
|
||||||
|
_, err := buf.WriteString(fmt.Sprintf(format, args...))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ws := func(s string) error {
|
||||||
|
_, err := buf.WriteString(s)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(diff.Eol) == 0 {
|
||||||
|
diff.Eol = "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
started := false
|
||||||
|
m := NewMatcher(diff.A, diff.B)
|
||||||
|
for _, g := range m.GetGroupedOpCodes(diff.Context) {
|
||||||
|
if !started {
|
||||||
|
started = true
|
||||||
|
fromDate := ""
|
||||||
|
if len(diff.FromDate) > 0 {
|
||||||
|
fromDate = "\t" + diff.FromDate
|
||||||
|
}
|
||||||
|
toDate := ""
|
||||||
|
if len(diff.ToDate) > 0 {
|
||||||
|
toDate = "\t" + diff.ToDate
|
||||||
|
}
|
||||||
|
if diff.FromFile != "" || diff.ToFile != "" {
|
||||||
|
err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
first, last := g[0], g[len(g)-1]
|
||||||
|
range1 := formatRangeUnified(first.I1, last.I2)
|
||||||
|
range2 := formatRangeUnified(first.J1, last.J2)
|
||||||
|
if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, c := range g {
|
||||||
|
i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
|
||||||
|
if c.Tag == 'e' {
|
||||||
|
for _, line := range diff.A[i1:i2] {
|
||||||
|
if err := ws(" " + line); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if c.Tag == 'r' || c.Tag == 'd' {
|
||||||
|
for _, line := range diff.A[i1:i2] {
|
||||||
|
if err := ws("-" + line); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if c.Tag == 'r' || c.Tag == 'i' {
|
||||||
|
for _, line := range diff.B[j1:j2] {
|
||||||
|
if err := ws("+" + line); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Like WriteUnifiedDiff but returns the diff a string.
|
||||||
|
func GetUnifiedDiffString(diff UnifiedDiff) (string, error) {
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
err := WriteUnifiedDiff(w, diff)
|
||||||
|
return string(w.Bytes()), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert range to the "ed" format.
|
||||||
|
func formatRangeContext(start, stop int) string {
|
||||||
|
// Per the diff spec at http://www.unix.org/single_unix_specification/
|
||||||
|
beginning := start + 1 // lines start numbering with one
|
||||||
|
length := stop - start
|
||||||
|
if length == 0 {
|
||||||
|
beginning -= 1 // empty ranges begin at line just before the range
|
||||||
|
}
|
||||||
|
if length <= 1 {
|
||||||
|
return fmt.Sprintf("%d", beginning)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%d,%d", beginning, beginning+length-1)
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContextDiff UnifiedDiff
|
||||||
|
|
||||||
|
// Compare two sequences of lines; generate the delta as a context diff.
|
||||||
|
//
|
||||||
|
// Context diffs are a compact way of showing line changes and a few
|
||||||
|
// lines of context. The number of context lines is set by diff.Context
|
||||||
|
// which defaults to three.
|
||||||
|
//
|
||||||
|
// By default, the diff control lines (those with *** or ---) are
|
||||||
|
// created with a trailing newline.
|
||||||
|
//
|
||||||
|
// For inputs that do not have trailing newlines, set the diff.Eol
|
||||||
|
// argument to "" so that the output will be uniformly newline free.
|
||||||
|
//
|
||||||
|
// The context diff format normally has a header for filenames and
|
||||||
|
// modification times. Any or all of these may be specified using
|
||||||
|
// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate.
|
||||||
|
// The modification times are normally expressed in the ISO 8601 format.
|
||||||
|
// If not specified, the strings default to blanks.
|
||||||
|
func WriteContextDiff(writer io.Writer, diff ContextDiff) error {
|
||||||
|
buf := bufio.NewWriter(writer)
|
||||||
|
defer buf.Flush()
|
||||||
|
var diffErr error
|
||||||
|
wf := func(format string, args ...interface{}) {
|
||||||
|
_, err := buf.WriteString(fmt.Sprintf(format, args...))
|
||||||
|
if diffErr == nil && err != nil {
|
||||||
|
diffErr = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ws := func(s string) {
|
||||||
|
_, err := buf.WriteString(s)
|
||||||
|
if diffErr == nil && err != nil {
|
||||||
|
diffErr = err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(diff.Eol) == 0 {
|
||||||
|
diff.Eol = "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
prefix := map[byte]string{
|
||||||
|
'i': "+ ",
|
||||||
|
'd': "- ",
|
||||||
|
'r': "! ",
|
||||||
|
'e': " ",
|
||||||
|
}
|
||||||
|
|
||||||
|
started := false
|
||||||
|
m := NewMatcher(diff.A, diff.B)
|
||||||
|
for _, g := range m.GetGroupedOpCodes(diff.Context) {
|
||||||
|
if !started {
|
||||||
|
started = true
|
||||||
|
fromDate := ""
|
||||||
|
if len(diff.FromDate) > 0 {
|
||||||
|
fromDate = "\t" + diff.FromDate
|
||||||
|
}
|
||||||
|
toDate := ""
|
||||||
|
if len(diff.ToDate) > 0 {
|
||||||
|
toDate = "\t" + diff.ToDate
|
||||||
|
}
|
||||||
|
if diff.FromFile != "" || diff.ToFile != "" {
|
||||||
|
wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol)
|
||||||
|
wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
first, last := g[0], g[len(g)-1]
|
||||||
|
ws("***************" + diff.Eol)
|
||||||
|
|
||||||
|
range1 := formatRangeContext(first.I1, last.I2)
|
||||||
|
wf("*** %s ****%s", range1, diff.Eol)
|
||||||
|
for _, c := range g {
|
||||||
|
if c.Tag == 'r' || c.Tag == 'd' {
|
||||||
|
for _, cc := range g {
|
||||||
|
if cc.Tag == 'i' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, line := range diff.A[cc.I1:cc.I2] {
|
||||||
|
ws(prefix[cc.Tag] + line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
range2 := formatRangeContext(first.J1, last.J2)
|
||||||
|
wf("--- %s ----%s", range2, diff.Eol)
|
||||||
|
for _, c := range g {
|
||||||
|
if c.Tag == 'r' || c.Tag == 'i' {
|
||||||
|
for _, cc := range g {
|
||||||
|
if cc.Tag == 'd' {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, line := range diff.B[cc.J1:cc.J2] {
|
||||||
|
ws(prefix[cc.Tag] + line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return diffErr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Like WriteContextDiff but returns the diff a string.
|
||||||
|
func GetContextDiffString(diff ContextDiff) (string, error) {
|
||||||
|
w := &bytes.Buffer{}
|
||||||
|
err := WriteContextDiff(w, diff)
|
||||||
|
return string(w.Bytes()), err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split a string on "\n" while preserving them. The output can be used
|
||||||
|
// as input for UnifiedDiff and ContextDiff structures.
|
||||||
|
func SplitLines(s string) []string {
|
||||||
|
lines := strings.SplitAfter(s, "\n")
|
||||||
|
lines[len(lines)-1] += "\n"
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -0,0 +1,394 @@
|
||||||
|
package assert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CompareType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
compareLess CompareType = iota - 1
|
||||||
|
compareEqual
|
||||||
|
compareGreater
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
intType = reflect.TypeOf(int(1))
|
||||||
|
int8Type = reflect.TypeOf(int8(1))
|
||||||
|
int16Type = reflect.TypeOf(int16(1))
|
||||||
|
int32Type = reflect.TypeOf(int32(1))
|
||||||
|
int64Type = reflect.TypeOf(int64(1))
|
||||||
|
|
||||||
|
uintType = reflect.TypeOf(uint(1))
|
||||||
|
uint8Type = reflect.TypeOf(uint8(1))
|
||||||
|
uint16Type = reflect.TypeOf(uint16(1))
|
||||||
|
uint32Type = reflect.TypeOf(uint32(1))
|
||||||
|
uint64Type = reflect.TypeOf(uint64(1))
|
||||||
|
|
||||||
|
float32Type = reflect.TypeOf(float32(1))
|
||||||
|
float64Type = reflect.TypeOf(float64(1))
|
||||||
|
|
||||||
|
stringType = reflect.TypeOf("")
|
||||||
|
)
|
||||||
|
|
||||||
|
func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
|
||||||
|
obj1Value := reflect.ValueOf(obj1)
|
||||||
|
obj2Value := reflect.ValueOf(obj2)
|
||||||
|
|
||||||
|
// throughout this switch we try and avoid calling .Convert() if possible,
|
||||||
|
// as this has a pretty big performance impact
|
||||||
|
switch kind {
|
||||||
|
case reflect.Int:
|
||||||
|
{
|
||||||
|
intobj1, ok := obj1.(int)
|
||||||
|
if !ok {
|
||||||
|
intobj1 = obj1Value.Convert(intType).Interface().(int)
|
||||||
|
}
|
||||||
|
intobj2, ok := obj2.(int)
|
||||||
|
if !ok {
|
||||||
|
intobj2 = obj2Value.Convert(intType).Interface().(int)
|
||||||
|
}
|
||||||
|
if intobj1 > intobj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if intobj1 == intobj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if intobj1 < intobj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Int8:
|
||||||
|
{
|
||||||
|
int8obj1, ok := obj1.(int8)
|
||||||
|
if !ok {
|
||||||
|
int8obj1 = obj1Value.Convert(int8Type).Interface().(int8)
|
||||||
|
}
|
||||||
|
int8obj2, ok := obj2.(int8)
|
||||||
|
if !ok {
|
||||||
|
int8obj2 = obj2Value.Convert(int8Type).Interface().(int8)
|
||||||
|
}
|
||||||
|
if int8obj1 > int8obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if int8obj1 == int8obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if int8obj1 < int8obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Int16:
|
||||||
|
{
|
||||||
|
int16obj1, ok := obj1.(int16)
|
||||||
|
if !ok {
|
||||||
|
int16obj1 = obj1Value.Convert(int16Type).Interface().(int16)
|
||||||
|
}
|
||||||
|
int16obj2, ok := obj2.(int16)
|
||||||
|
if !ok {
|
||||||
|
int16obj2 = obj2Value.Convert(int16Type).Interface().(int16)
|
||||||
|
}
|
||||||
|
if int16obj1 > int16obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if int16obj1 == int16obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if int16obj1 < int16obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Int32:
|
||||||
|
{
|
||||||
|
int32obj1, ok := obj1.(int32)
|
||||||
|
if !ok {
|
||||||
|
int32obj1 = obj1Value.Convert(int32Type).Interface().(int32)
|
||||||
|
}
|
||||||
|
int32obj2, ok := obj2.(int32)
|
||||||
|
if !ok {
|
||||||
|
int32obj2 = obj2Value.Convert(int32Type).Interface().(int32)
|
||||||
|
}
|
||||||
|
if int32obj1 > int32obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if int32obj1 == int32obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if int32obj1 < int32obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Int64:
|
||||||
|
{
|
||||||
|
int64obj1, ok := obj1.(int64)
|
||||||
|
if !ok {
|
||||||
|
int64obj1 = obj1Value.Convert(int64Type).Interface().(int64)
|
||||||
|
}
|
||||||
|
int64obj2, ok := obj2.(int64)
|
||||||
|
if !ok {
|
||||||
|
int64obj2 = obj2Value.Convert(int64Type).Interface().(int64)
|
||||||
|
}
|
||||||
|
if int64obj1 > int64obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if int64obj1 == int64obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if int64obj1 < int64obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Uint:
|
||||||
|
{
|
||||||
|
uintobj1, ok := obj1.(uint)
|
||||||
|
if !ok {
|
||||||
|
uintobj1 = obj1Value.Convert(uintType).Interface().(uint)
|
||||||
|
}
|
||||||
|
uintobj2, ok := obj2.(uint)
|
||||||
|
if !ok {
|
||||||
|
uintobj2 = obj2Value.Convert(uintType).Interface().(uint)
|
||||||
|
}
|
||||||
|
if uintobj1 > uintobj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if uintobj1 == uintobj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if uintobj1 < uintobj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Uint8:
|
||||||
|
{
|
||||||
|
uint8obj1, ok := obj1.(uint8)
|
||||||
|
if !ok {
|
||||||
|
uint8obj1 = obj1Value.Convert(uint8Type).Interface().(uint8)
|
||||||
|
}
|
||||||
|
uint8obj2, ok := obj2.(uint8)
|
||||||
|
if !ok {
|
||||||
|
uint8obj2 = obj2Value.Convert(uint8Type).Interface().(uint8)
|
||||||
|
}
|
||||||
|
if uint8obj1 > uint8obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if uint8obj1 == uint8obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if uint8obj1 < uint8obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Uint16:
|
||||||
|
{
|
||||||
|
uint16obj1, ok := obj1.(uint16)
|
||||||
|
if !ok {
|
||||||
|
uint16obj1 = obj1Value.Convert(uint16Type).Interface().(uint16)
|
||||||
|
}
|
||||||
|
uint16obj2, ok := obj2.(uint16)
|
||||||
|
if !ok {
|
||||||
|
uint16obj2 = obj2Value.Convert(uint16Type).Interface().(uint16)
|
||||||
|
}
|
||||||
|
if uint16obj1 > uint16obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if uint16obj1 == uint16obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if uint16obj1 < uint16obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Uint32:
|
||||||
|
{
|
||||||
|
uint32obj1, ok := obj1.(uint32)
|
||||||
|
if !ok {
|
||||||
|
uint32obj1 = obj1Value.Convert(uint32Type).Interface().(uint32)
|
||||||
|
}
|
||||||
|
uint32obj2, ok := obj2.(uint32)
|
||||||
|
if !ok {
|
||||||
|
uint32obj2 = obj2Value.Convert(uint32Type).Interface().(uint32)
|
||||||
|
}
|
||||||
|
if uint32obj1 > uint32obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if uint32obj1 == uint32obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if uint32obj1 < uint32obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Uint64:
|
||||||
|
{
|
||||||
|
uint64obj1, ok := obj1.(uint64)
|
||||||
|
if !ok {
|
||||||
|
uint64obj1 = obj1Value.Convert(uint64Type).Interface().(uint64)
|
||||||
|
}
|
||||||
|
uint64obj2, ok := obj2.(uint64)
|
||||||
|
if !ok {
|
||||||
|
uint64obj2 = obj2Value.Convert(uint64Type).Interface().(uint64)
|
||||||
|
}
|
||||||
|
if uint64obj1 > uint64obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if uint64obj1 == uint64obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if uint64obj1 < uint64obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Float32:
|
||||||
|
{
|
||||||
|
float32obj1, ok := obj1.(float32)
|
||||||
|
if !ok {
|
||||||
|
float32obj1 = obj1Value.Convert(float32Type).Interface().(float32)
|
||||||
|
}
|
||||||
|
float32obj2, ok := obj2.(float32)
|
||||||
|
if !ok {
|
||||||
|
float32obj2 = obj2Value.Convert(float32Type).Interface().(float32)
|
||||||
|
}
|
||||||
|
if float32obj1 > float32obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if float32obj1 == float32obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if float32obj1 < float32obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.Float64:
|
||||||
|
{
|
||||||
|
float64obj1, ok := obj1.(float64)
|
||||||
|
if !ok {
|
||||||
|
float64obj1 = obj1Value.Convert(float64Type).Interface().(float64)
|
||||||
|
}
|
||||||
|
float64obj2, ok := obj2.(float64)
|
||||||
|
if !ok {
|
||||||
|
float64obj2 = obj2Value.Convert(float64Type).Interface().(float64)
|
||||||
|
}
|
||||||
|
if float64obj1 > float64obj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if float64obj1 == float64obj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if float64obj1 < float64obj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case reflect.String:
|
||||||
|
{
|
||||||
|
stringobj1, ok := obj1.(string)
|
||||||
|
if !ok {
|
||||||
|
stringobj1 = obj1Value.Convert(stringType).Interface().(string)
|
||||||
|
}
|
||||||
|
stringobj2, ok := obj2.(string)
|
||||||
|
if !ok {
|
||||||
|
stringobj2 = obj2Value.Convert(stringType).Interface().(string)
|
||||||
|
}
|
||||||
|
if stringobj1 > stringobj2 {
|
||||||
|
return compareGreater, true
|
||||||
|
}
|
||||||
|
if stringobj1 == stringobj2 {
|
||||||
|
return compareEqual, true
|
||||||
|
}
|
||||||
|
if stringobj1 < stringobj2 {
|
||||||
|
return compareLess, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return compareEqual, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Greater asserts that the first element is greater than the second
|
||||||
|
//
|
||||||
|
// assert.Greater(t, 2, 1)
|
||||||
|
// assert.Greater(t, float64(2), float64(1))
|
||||||
|
// assert.Greater(t, "b", "a")
|
||||||
|
func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool {
|
||||||
|
return compareTwoValues(t, e1, e2, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GreaterOrEqual asserts that the first element is greater than or equal to the second
|
||||||
|
//
|
||||||
|
// assert.GreaterOrEqual(t, 2, 1)
|
||||||
|
// assert.GreaterOrEqual(t, 2, 2)
|
||||||
|
// assert.GreaterOrEqual(t, "b", "a")
|
||||||
|
// assert.GreaterOrEqual(t, "b", "b")
|
||||||
|
func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool {
|
||||||
|
return compareTwoValues(t, e1, e2, []CompareType{compareGreater, compareEqual}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Less asserts that the first element is less than the second
|
||||||
|
//
|
||||||
|
// assert.Less(t, 1, 2)
|
||||||
|
// assert.Less(t, float64(1), float64(2))
|
||||||
|
// assert.Less(t, "a", "b")
|
||||||
|
func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool {
|
||||||
|
return compareTwoValues(t, e1, e2, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LessOrEqual asserts that the first element is less than or equal to the second
|
||||||
|
//
|
||||||
|
// assert.LessOrEqual(t, 1, 2)
|
||||||
|
// assert.LessOrEqual(t, 2, 2)
|
||||||
|
// assert.LessOrEqual(t, "a", "b")
|
||||||
|
// assert.LessOrEqual(t, "b", "b")
|
||||||
|
func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool {
|
||||||
|
return compareTwoValues(t, e1, e2, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Positive asserts that the specified element is positive
|
||||||
|
//
|
||||||
|
// assert.Positive(t, 1)
|
||||||
|
// assert.Positive(t, 1.23)
|
||||||
|
func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool {
|
||||||
|
zero := reflect.Zero(reflect.TypeOf(e))
|
||||||
|
return compareTwoValues(t, e, zero.Interface(), []CompareType{compareGreater}, "\"%v\" is not positive", msgAndArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Negative asserts that the specified element is negative
|
||||||
|
//
|
||||||
|
// assert.Negative(t, -1)
|
||||||
|
// assert.Negative(t, -1.23)
|
||||||
|
func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool {
|
||||||
|
zero := reflect.Zero(reflect.TypeOf(e))
|
||||||
|
return compareTwoValues(t, e, zero.Interface(), []CompareType{compareLess}, "\"%v\" is not negative", msgAndArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func compareTwoValues(t TestingT, e1 interface{}, e2 interface{}, allowedComparesResults []CompareType, failMessage string, msgAndArgs ...interface{}) bool {
|
||||||
|
if h, ok := t.(tHelper); ok {
|
||||||
|
h.Helper()
|
||||||
|
}
|
||||||
|
|
||||||
|
e1Kind := reflect.ValueOf(e1).Kind()
|
||||||
|
e2Kind := reflect.ValueOf(e2).Kind()
|
||||||
|
if e1Kind != e2Kind {
|
||||||
|
return Fail(t, "Elements should be the same type", msgAndArgs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
compareResult, isComparable := compare(e1, e2, e1Kind)
|
||||||
|
if !isComparable {
|
||||||
|
return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !containsValue(allowedComparesResults, compareResult) {
|
||||||
|
return Fail(t, fmt.Sprintf(failMessage, e1, e2), msgAndArgs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsValue(values []CompareType, value CompareType) bool {
|
||||||
|
for _, v := range values {
|
||||||
|
if v == value {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue