upgrade to latest dependencies (#914)

bumping google.golang.org/genproto/googleapis/rpc 8af14fe...796eee8:
  > 796eee8 chore(all): update all (# 1163)
  > c02fea0 chore: fix kokoro with safedir for git (# 1164)
  > 5fefd90 chore(all): update all (# 1162)
  > af27646 chore(all): update all (# 1161)
  > 9d4c2d2 chore(all): update all (# 1160)
bumping google.golang.org/genproto/googleapis/api 8af14fe...796eee8:
  > 796eee8 chore(all): update all (# 1163)
  > c02fea0 chore: fix kokoro with safedir for git (# 1164)
  > 5fefd90 chore(all): update all (# 1162)
  > af27646 chore(all): update all (# 1161)
  > 9d4c2d2 chore(all): update all (# 1160)
bumping k8s.io/client-go ece8c00...4b5b7fa:
  > 4b5b7fa Update dependencies to v0.31.4 tag
  > 5e3e8ea informers: add comment that Start does not block
  > f71a5cc Call non-blocking informerFactory.Start synchronously to avoid races
  > 4536e5a Merge pull request # 124012 from Jefftree/le-controller
  > 93c6a5b Merge pull request # 126353 from liggitt/fix-vendor
  > 825f52e Change PingTime to be persistent
  > 6a9911a revendor dependencies
  > f45c451 fix ordering issue in candidates
  > fe54892 Merge pull request # 126243 from SergeyKanzhelev/devicePluginFailures
  > 18dd587 feedback: leasecandidate clients
  > 79fd7ab generated files
  > 1f27757 Review feedback
  > ac9204c Merge pull request # 126145 from carlory/kep-3751-api
  > 2099375 CLE controller and client changes
  > dcfcc90 Merge pull request # 126125 from mprahl/stop-idempotent
  > 9dea255 Promote VolumeAttributesClass to beta
  > 8a2bbd0 Coordinated Leader Election Alpha API
  > bad8f77 Merge pull request # 126091 from seans3/ws-err-extra-info
  > 001900e Allow calling Stop multiple times on RetryWatcher
  > 3aff10e Adds extra error information from response to bad handshake error when possible
  > a9affb4 Merge pull request # 125488 from pohly/dra-1.31
  > a7db3ad DRA: new API for 1.31
  > e0bc24e DRA: remove "sharable" from claim allocation result
  > a7f430b DRA: remove immediate allocation
  > 91ff2f6 DRA: bump API v1alpha2 -> v1alpha3
  > 82af755 Merge pull request # 126231 from seans3/websocket-https-proxy-fix
  > 6e31289 moving for easier cherry-pick
  > 8108d41 Falls back to SPDY for gorilla/websocket https proxy error
  > 5130bd9 Merge pull request # 126207 from thockin/ingress-backend-port-atomic
  > 80707e5 Merge pull request # 125660 from saschagrunert/oci-volumesource-api
  > cd892da Make ServiceBackendPort an atomic struct
  > 5742b23 Merge pull request # 124859 from morlay/master
  > 485ae13 Add ImageVolumeSource API
  > 6dde7fe Merge pull request # 126073 from a7i/fake-apply-scale-subresource
  > 9ab93c0 Remove json:",omitempty" where json:",inline" specified.
  > 37121f3 Merge pull request # 125470 from everpeace/kep-3619-SupplementalGroupsPolicy-e2e
  > eab51c4 fix: fake clientset ApplyScale subresource from 'status' to 'scale'
  > 1ea671a KEP-3619: API: add NodeFeatures.SupplementalGroupsPolicy in NodeStatus
  > 59ef8e1 Merge pull request # 126034 from sohankunkerkar/add-usernamespaces
  > 21b1828 api: add user namespaces field to NodeRuntimeHandlerFeatures
  > 0724a99 Merge pull request # 125305 from p0lyn0mial/upstream-dynamic-client-uses-watch-list
  > 34751e1 Merge pull request # 126018 from aroradaman/bump-k8s-utils
  > 732dd28 dynamic client: add support for API streaming
  > b6a1b42 Merge pull request # 126057 from thockin/make-pod-ip-host-ip-required
  > 37c2760 bump k8s.io/utils
  > fa8c68e make PodIP.IP and HostIP.IP required.
  > 53b8be7 Merge pull request # 125802 from mmorel-35/testifylint/len+empty
  > 7f36d81 Merge pull request # 125944 from timofurrer/fix/error-msg-type
  > acc5917 fix: enable empty and len rules from testifylint on pkg package
  > 55c0e2c Fix typo in type name of watch decode error
  > 354ed1b Merge pull request # 125922 from dims/update_otel_27
  > 127e4c3 Merge pull request # 125887 from fxierh/fakediscovery-fix
  > e57f162 update OpenTelemetry dependencies and grpc
  > c45bc43 Return the error returned by Invokes so the FakeDiscovery client is able to simulate any error with reactors.
  > ab86e03 Merge pull request # 125021 from aojea/servicecidrbeta
  > 4dec662 Merge pull request # 125116 from pohly/dra-one-of-source
  > a146a0f make update
  > ae071bc Merge pull request # 125759 from dims/bump-prometheus/common-v0.55.0
  > fdffb52 DRA: remove "source" indirection from v1 Pod API
  > 4b5beb6 Merge pull request # 124917 from vinayakankugoyal/kep4633
  > 66473c1 Bump `prometheus/common to` v0.55.0
  > ef4b2f6 Merge pull request # 125766 from dims/update-moby/spdystream-to-v0.4.0
  > 1582c4c KEP-4633: Allow health-only anonymous auth mode.
  > 96c49b6 Merge pull request # 125600 from thockin/plus_default_deprecated_volumes
  > a2665af Update moby/spdystream to v0.4.0
  > b043b56 Merge pull request # 125745 from BenTheElder/ping-ping
  > af26305 Use +default for now deprecated ScaleIO volume
  > 11d6807 bump  github.com/moby/spdystream to v0.3.0
  > f9b8f88 Use +default for now deprecated AzureDisk volume
  > 88829a4 Merge pull request # 125731 from dashpole/revert_otel
  > 90902b5 Use +default for now deprecated ISCSI volume
  > 0fbd594 Revert "update OpenTelemetry dependencies"
  > 64e74f9 Use +default for now deprecated RBD volume
  > 4e7651c Merge pull request # 125706 from jpbetz/fix-fixture-test
  > 8ffa531 Merge pull request # 125537 from seans3/rc-fallback-log
  > dd94093 Remove test dependency on swwagger.json to fix client-go repo
  > 6090471 Merge pull request # 125669 from benluddy/cbor-bump-v2.7.0
  > 0b66457 Adds logging during remote command executor fallback
  > 9177465 Bump github.com/fxamacker/cbor/v2 to v2.7.0.
  > 430e755 Merge pull request # 125575 from dashpole/update_otel_27
  > 2923011 Merge pull request # 125560 from jpbetz/apply-gen-fake
  > fd8492c update OpenTelemetry dependencies
  > c4145a9 Generate code
  > 2c86652 Add field tracker support to client fake fixtures
  > 96f66e9 Support options for all client fake actions
  > b9309ac Merge pull request # 125531 from pohly/klog-update
  > db174bf dependencies: klog v2.130.1
  > e1202c7 Merge pull request # 121439 from skitt/generic-client-go
  > b31bc29 Run codegen
  > c834bcc Generify client-go
  > 6a88f2d Run codegen
  > 233a065 Run codegen
  > d2f5fba Merge pull request # 125528 from seans3/port-forward-beta
  > 6e44692 Graduate PortForwardWebsockets to Beta
  > cea1539 Merge pull request # 124509 from p0lyn0mial/upstream-watch-list-code-gen
  > e4e31fd Merge pull request # 125472 from karlkfi/karl-watch-comments
  > cc198ea make update
  > 5f43af4 Merge pull request # 125391 from karlkfi/karl-reflector-watch-stop
  > 48d8fc7 Add details to watch interface method comments
  > 39cc8de Merge pull request # 125435 from p0lyn0mial/upstream-rm-reflector-data-consistency-detector-test
  > f29a36d Refactor Reflector ListAndWatch
  > fb1003a Merge pull request # 125440 from p0lyn0mial/upstream-client-go-watchlist-can-use-watchlist-for-list-rq
  > 5347b09 client-go/reflector: remove reflector_data_consistency_detector_test.go
  > 9a760ef client-go/util/watchlist: intro CanUseWatchListForListRequest
  > 03443e7 Merge pull request # 125432 from p0lyn0mial/upstream-watch-list-data-consistency-detector
  > b681e77 client-go/reflector: use consistencydetector.IsDataConsistencyDetectionForWatchListEnabled
  > 14559c0 client-go/consistencydetector: introduce CheckWatchListFromCacheDataConsistencyIfRequested
  > 9116846 Merge pull request # 125408 from benluddy/bump-cbor-v2.7.0
  > 4d57b6a Bump fxamacker/cbor/v2 to v2.7.0-beta.
  > c415c76 Merge pull request # 125304 from p0lyn0mial/upstream-dynamic-client-uses-consistency-detector
  > 8fc5a64 Merge pull request # 125383 from p0lyn0mial/upstream-unstructured-testchecklistconsistency
  > 5d84e91 client-go/dynamic: use CheckListFromCacheDataConsistencyIfRequested
  > 503a090 client-go/util/consistencydetector: extend TestCheckListFromCacheDataConsistencyIfRequestedInternalHappyPath to work with unstructured list
  > 2d885a2 client-go/util/consistencydetector: refactor TestCheckListFromCacheDataConsistencyIfRequestedInternalHappyPath to work with unstructured data
  > e822473 Merge pull request # 125379 from p0lyn0mial/upstream-unstructured-testdataconsistencychecker
  > 94cabc8 Merge pull request # 125302 from karlkfi/karl-informer-watcher-test
  > 20d0a8e client-go/consistencydetector: extend TestDataConsistencyChecker to test unstructured data
  > 1fc4f8a Merge pull request # 124963 from p0lyn0mial/upstream-data-consistency-checker-for-list-requests
  > 7c5e903 Update TestNewInformerWatcher for WatchListClient
  > 8262774 client-go/consistencydetector: refactor TestDataConsistencyChecker to work with unstructured data
  > 1c075dc Merge pull request # 124642 from wojtek-t/resilient_watchcache_initialization
  > 9f33e96 make update
  > 098e7c5 Merge pull request # 125356 from p0lyn0mial/upstream-improve-testdrivechecklistfromcache-test
  > b444e6c Implement ResilientWatchCacheInitialization
  > bbe85a4 fix TestDriveCheckListFromCacheDataConsistencyIfRequested
  > b03e5b8 Merge pull request # 125335 from p0lyn0mial/upstream-consistency-decector-handles-legacy-case
  > c7d7068 client-go/consistencydetector: handles the watch cache legacy case
  > 7c6e307 Merge pull request # 125299 from karlkfi/karl-reflector-fix-2
  > 8c9cb88 Improve Reflector unit tests
  > 52e5651 Merge pull request # 122832 from benluddy/cbor-fuzz-native-to-unstructured-via
  > d81b8f6 Merge pull request # 123974 from p0lyn0mial/upstream-client-go-features-testing
  > 3aa51ce Update indirect dependencies with ./hack/update-vendor.sh.
  > 7adab2f KEP-3619: Fine-grained SupplementalGroups control (# 117842)
  > e89e40c client-go/rest: add TestWatchListWhenFeatureGateDisabled unit test
  > e8b5471 Merge pull request # 123339 from skitt/canonical-json-patch
  > e8eae94 client-go/features/testing: intro SetFeatureGatesDuringTest
  > 9c87eb0 Merge pull request # 125190 from pohly/record-event-panic
  > 16552d4 Use canonical json-patch v4 import
  > b4e1cfc client-go record: avoid panic when watch creation failed
  > ffda346 Update kubectl kustomize to kyaml/v0.17.1, cmd/config/v0.14.1, api/v0.17.2, kustomize/v5.4.2
  > 5d65011 Merge pull request # 125159 from p0lyn0mial/upstream-add-data-consistency-checker-for-list-requests
  > 3f130bd Merge pull request # 125166 from p0lyn0mial/upstream-improve-check-data-consistency
  > b7ea49a client-go/util/consistencydetector: add CheckListFromCacheDataConsistencyIfRequested
  > 5f741a5 Merge pull request # 125052 from p0lyn0mial/upstream-client-go-env-var-set
  > 785fca4 client-go/util/consistencydetector: improve validation of list parameters (RV, ListOptions)
  > b92b563 Merge pull request # 124635 from pohly/event-broadcaster-shutdown-fix
  > 98df4f7 client-go/features: add Set method to envvar impl
  > f53839a Merge pull request # 125045 from pohly/ginkgo-gomega-update
  > ebbf7d7 client-go/tools/record: fix and test Broadcaster shutdown + logging
  > c559583 Merge pull request # 125146 from p0lyn0mial/upstream-client-go-consistency-detector-move-to-new-package
  > 2f2b800 dependencies: ginkgo v2.19.0, gomega v1.33.1
  > 538b780 move checkWatchListDataConsistencyIfRequested back to client-go/tools/cache
  > e6e45e1 client-go/util/consistencydetector: make the detector public
  > 92f0985 client-go/util/consistencydetector: update after moving to the new package
  > e428fc2 move client-go/tools/cache/reflector_data_consistency_detector to client-go/util/consistencydetector
  > fd6b8e6 Merge pull request # 125144 from p0lyn0mial/upstream-client-go-consistency-detector-refactor-units
  > 00036b7 client-go/tools/cache/reflector_data_consistency_detector: refactor unit tests
  > 0123e78 Merge pull request # 124446 from p0lyn0mial/watch-list-consistency-detector-more-generic
  > 6bdde77 client-go/consistency-detector: change the signature of checkWatchListConsistencyIfRequested
  > c739619 Merge pull request # 122791 from p0lyn0mial/upstream-cleanup-watch-list-env-var
  > 4a34196 replace ENABLE_CLIENT_GO_WATCH_LIST_ALPHA with WatchListClient gate
  > 86f83bc Merge pull request # 124614 from p0lyn0mial/upstream-reflector-warn-no-bookmark-event
  > 0280901 client-go/reflector: warns when the bookmark event for initial events hasn't been received
  > 62f9597 Merge pull request # 124588 from jiuker/patch-2
  > 8a8d073 Merge pull request # 124757 from dims/update-to-latest-golang.org/x/oauth2-v0.20.0
  > f9eba8e fix: Hang when canceling leader election information
  > 988ddc2 Update to latest golang.org/x/oauth2 v0.20.0
  > 4ebe42d Merge pull request # 124600 from alvaroaleman/typed-wq
  > d3682da Merge pull request # 124519 from dims/drop-all-the-providery-things-take-2
  > 9aa3aae Use the generic/typed workqueue throughout
  > 35cab32 fix(api): make LocalObjectReference.Name and HostAlias.IP required (# 124553)
  > 64ff14b address comments during review
  > 049f231 Merge pull request # 124562 from sbueringer/pr-bump-sigs-yaml
  > af62623 Remove gcp in-tree cloud provider and credential provider
  > ea434df Bump sigs.k8s.io/yaml to v1.4.0
  > 6b47d7d Merge pull request # 124344 from wojtek-t/fix_transformer
  > 178bcf2 Merge pull request # 124469 from serathius/etcd-3.5.13
  > 2fe0574 Fix race in informer transformers
  > f3332a0 Merge pull request # 124263 from alvaroaleman/typed
  > e4f9b83 Upgrade etcd libraries to v3.5.13
  > 9433226 Merge pull request # 124075 from pohly/dra-api-comments
  > d67dddf Workqueue: Add generic versions that are properly typed
  > 4a5f039 Merge pull request # 122657 from p0lyn0mial/upstream-client-go-rest-client-watch-list
  > a457c5e DRA api: ResourceHandle.DriverName is required
  > 47f5582 Merge pull request # 124328 from jiahuif-forks/deps/cel-go
  > a1be94a client-go/rest: introduce watchlist
  > 66b378a Merge pull request # 124340 from benluddy/dynamic-client-golden-request-test
  > d071c08 generated: ./hack/update-vendor.sh
  > 02e371e Merge pull request # 121574 from skitt/generic-lister-gen
  > 4cd6b75 Generate HTTP request fixtures for dynamic client tests.
  > 34d7809 generated: ./hack/pin-dependency.sh github.com/google/cel-go v0.20.1
  > 5b2b83a Merge pull request # 124346 from jwcesign/master
  > 841e997 Improve the lister function documentation
  > b17c363 Add test to detect unintentional changes in dynamic client requests.
  > 5db05eb upgrade: upgrade dependencies github.com/prometheus/common to the newest version
  > 9d701d3 Regenerate all listers
  > b1c1c03 Merge pull request # 122892 from danwinship/codegen-nil
  > 05ff4bb Generify lister-gen
  > 506bc53 Merge pull request # 113257 from claudiubelu/path-filepath-update-staging
  > 3d4f98d Regenerate fake clients
  > 3407442 Merge pull request # 124245 from wojtek-t/informer_options
  > 79c893d Replaces path.Operation with filepath.Operation (staging)
  > 9990b0b Merge pull request # 123969 from liangyuanpeng/cleanup_rand
  > b9e952f Allow for configuring MinWatchTimeout in Reflector and Informer.
  > a82df61 Merge pull request # 123937 from p0lyn0mial/upstream-use-initial-events-annotation-key-const
  > fa185a2 cleanup: delete rand.Seed(time.Now().UnixNano()) and using global number generator.
  > 7da3197 Refactor informer constructors
  > 79491af Merge pull request # 123347 from zhouhaibing089/abstract-queue
  > 45e17fe client-go/cache/reflector: use metav1.InitialEventsAnnotationKey
  > aa7909e Merge pull request # 124174 from dims/update-x/net-for-CVE-2023-45288
  > 9c3db86 workqueue: make queue as configurable
  > a2cc490 Update x/net for CVE-2023-45288
  > 1518fca sync: update go.mod
  > eea636f Merge pull request # 123932 from pohly/dra-api-resource-model-rename
bumping golang.org/x/tools 4d2b19f...1743d1a:
  > 1743d1a go.mod: update golang.org/x dependencies
  > 43ba465 internal/analysis/modernize: minmax: reject IfStmt with Init
  > 29d66ee gopls: update toolchain to go1.23.4
  > ac39815 go/packages: add GOROOT env to avoid TestTarget failure in OpenBSD
  > 39e1a8c godoc: fix missing (Added in Go) "x.xx" for function with type parameters
  > 98a190b gopls/internal/analysis/unusedfunc: analyzer for unused funcs/methods
  > 192ac77 internal/golang: CodeAction: don't return empty source.doc titles
  > 5fe60fd internal/settings: drop "annotations" setting
  > c61a2b0 gopls/internal/golang: make GCDetails a code action, not a code lens
  > fc95c03 internal/modindex: fix index directory for tests
  > b93274b gopls/internal/cache: remove overzealous bug.Report
  > 960f0f4 x/tools: remove dead code
  > b0dc9b1 gopls/internal/filecache: Get: mitigate failure due to ENOSPC
  > 71b5dfe gopls: update telemetry dependency for crashmonitor fix
  > e3cedbd gopls/internal/server: fix nil panic in findMatchingDiagnostics
  > 649f485 all: make function and struct comments match the names
  > 26d1af2 gopls/internal/cache/parsego: fix OOB panic in fixAST
  > bf516ba gopls/internal/util/moremaps: use Sorted throughout gopls
  > 4f820db gopls/internal/golang: definition support break, goto and continue statements
  > f202b36 gopls/internal/analysis/modernize: string formatting
  > d6cc3cd go/analysis/passes/structtag: ignore findings for "encoding/json/v2"
  > 666fab2 go/analysis: document AllObjectFacts return value
  > 818ab3b gopls/doc/features: add missing `
  > a6adab9 gopls/internal/analysis/modernize: check FileVersions throughout
  > c437d40 gopls/internal/analysis/modernize: for...{m[k]=v} -> maps.Collect et al
  > a039694 internal/typesinternal: determine package name using qualifier
  > a304b37 internal/typesinternal: support union types in func TypeExpr
  > 412b5e9 go/types/typeutil: make Hasher stateless
  > 6d4eee4 go/callgraph: fix trivial mistake in benchmark
  > e56f71a gopls/internal/cmd: update codelens.hlp, change -run to -exec
  > 732f823 gopls/internal/analysis/modernize: b.N -> b.Loop()
  > 20cd540 gopls/internal/golang: check for nil ReceiverNamed
  > 02033b2 gopls/internal/analysis/modernize: simplify append(base, s...)
  > aa94d89 gopls/internal/analysis/modernize: replace interface{} with any
  > 93cc684 gopls/internal/analysis/modernize: sort.Slice -> slices.Sort
  > 3cad859 gopls/internal/analysis/modernize: quick fixes to modernize Go code
  > 8194029 gopls/internal/golang: hover support return statements
  > 851152f go/ast/inspector: add explicit parent pointer
  > b75baa0 internal/astutil/cursor: Cursor API for inspector
  > ebeac1b gopls/internal/util/astutil: fix tiny doc typo
  > 8e66a04 gopls/internal/util/astutil: NodeContains: workaround for ast.File
  > ea69910 gopls/internal/golang: fix DocLink markup
  > cb7945a gopls/internal/cache: move symbols to the file cache
  > dcc725c gopls/internal/cache/testfuncs: fix crash with *error argument
  > 74dea82 gopls/internal/golang/completion: fix crash in instance conversion
  > 8843590 gopls/internal/codeaction: replace all occurrences of expression (refactor.extract.variable.all)
  > fcb4185 gopls/internal/golang: fix bug browsing assembly for (T).f methods
  > 28cd718 cmd/stringer: place test package files in a test module
  > d3a1606 gopls/internal/golang: definition support return statements
  > bf42cd7 go/packages: add Target field and NeedTarget LoadMode bit
  > c73f4c8 internal/stdlib: re-generate manifest
  > a255cbe internal/modindex: handle deprecated symbols
  > e3a4b6b internal/gcimporter: update comment
  > 8127761 gopls/internal/settings: drop experimental hoverKind=Structured
  > 7bfb27f gopls/go.mod: update dependencies following the v0.17.0 release
  > c803483 internal/gcimporter: synchronize with internal/exportdata
  > c1ff179 gopls/internal/golang: Implementations: support generics
  > dd4bf11 internal/gcimporter: set correct Package.Path for main packages
  > aca81ce internal/gcimporter: copy GOROOT/internal/exportdata functions
  > d405635 internal/refactor/inline/analyzer: use "//go:fix inline" annotation
  > 47df42f internal/gcimporter: synchronizing FindPkg implementations
  > 82b6f75 internal/typesinternal: rollback of Zero{Value,Expr} for gopls
  > ab5f969 internal/gcimporter: moving FindPkg
  > 5e47a3d gopls/internal/cache: move Snapshot.Symbols to symbols.go
  > 3689f0b internal/gcimporter: reuse archive.ReadHeader implementation
  > 2cc4218 internal/gcimporter: moving byPath code location
  > 6ebf95a internal/gcimporter: remove indexed support from Import
  > 7790f2e gopls/internal/cache/methodsets: support generics
  > c2df0ef internal/gcimporter: remove end-of-section marker from size
  > ecb0abc internal/typeparams: delete GenericAssignableTo
  > 98c17f1 gopls/internal/golang: eliminate CollectScopes
  > bf32f4d gopls/doc/release: tweak wording following feedback
  > b7532e6 gopls/doc: update relnotes for v0.17.0
  > 44670c7 go.mod: update golang.org/x dependencies
  > 7eebab3 gopls/internal/golang: support extract variable at top level
  > e334696 gopls/internal/golang: ignore effects for change signature refactoring
  > 3901733 internal/refactor/inline: substitute groups of dependent arguments
  > 61b2408 gopls/internal/golang: add "Extract constant" counterpart
  > c01eead internal/gcimporter: require binary export data header
  > 9a04136 gopls/internal/golang/stubmethods: refine crash into bug report
  > 01e0b05 internal/refactor/inline: fix condition for splice assignment strategy
  > 557f540 gopls/internal/golang: don't offer to move variadic parameters
  > 399ee16 internal/gcimporter: update FindExportData to return a non-negative size
  > 25b0003 internal/refactor/inline: more precise redundant conversion detection
  > eb46939 gopls/internal/test/marker: add reproducers for moveparam bug bash bugs
  > 4296223 gopls/internal/analysis/yield: add comment about dataflow
  > 7a4f3b0 internal/gcimporter: require archive files
  > 2f73c61 gopls/internal/golang: avoid crash in lookupDocLinkSymbol
  > ef3d603 gopls/internal/golang/completion: fix crash with extra results
  > 8ffeaba gopls/internal/settings: enable 'waitgroup' analyzer
  > 4317959 go/analysis/passes/waitgroup: report WaitGroup.Add in goroutine
  > 72fdfa6 gopls/internal/golang: Disable test generation for generic functions
  > b80f1ed gopls/internal/analysis/yield: peephole-optimize phi(false, x)
  > e7bd227 gopls/internal/golang: fix folding range for function calls
  > e71702b internal/versions: remove constraint.GoVersion wrapper
  > c99edec gopls/internal/golang/completion: add alias support for literals
  > bfe3046 go/packages: add a unit test for golang/go# 70394
  > df87831 gopls/internal/undeclaredname: add missing colon when appropriate
  > 53efd30 gopls/internal/golang: simplify package name collection in add test
  > 68e4702 go/analysis/passes/printf: add missing call to Func.Origin
  > 30a3bd9 gopls/internal/golang: refactor.extract.variable: allow all exprs
  > 0edd1ab gopls/internal/cache/methodsets: refine crash for missing object path
  > 07a58bc gopls/internal/golang: refine crash golang/go# 70553
  > c622026 completions/inference: infer polymorphic types in completions
  > dcfb0b6 gopls/internal/golang: change signature via renaming 'func'
  > bfcbc1b internal/refactor/inline: avoid unnecessary desugaring in selectors
  > b93a72a internal/refactor/inline: fix spurious caller mutation
  > 41f04a0 internal/refactor/inline: fix comment movement due to added imports
  > 0841661 internal/refactor/inline: avoid unnecessary interface conversions
  > 68b67b4 gopls/internal/golang: support parameter movement refactorings
  > 51e54e8 gopls/doc/features: enable and document source.addTest code action
  > 458067f gopls/internal/golang: improve test package name selection for new file
  > 68caf84 gopls/internal/golang: don't lose ... when split/joining variadics
  > 1e0d4ee go/analysis/checker: disable Example on wasm
  > 8b6e84b gopls/internal/crash: don't crash in xrefs on out of bound nodes
  > 936a401 gopls/internal/golang: preserve copyright and build constraint
  > 1ffc3a1 gopls/internal/test/marker: add defloc, to bind positions by definition
  > 442d6be gopls/internal/test/marker: document named parameters
  > ae39b13 go/analysis/checker: a go/packages-based driver library
  > c3a6283 go/packages: undeprecate Load* style flags
  > 9dff42e gopls/internal/golang/extract: preserve comments in extracted block
  > 8c3ba8c internal/refactor: undo variadic elimination during substitution
  > 3b0b264 internal/refactor: handle qualified names in inlined assignments
  > 9311800 gopls/internal/test/marker: ignore diags in fixedbugs/issue59944.txt
  > c1aa79d gopls/internal/golang: fix gopls hover doc link
  > e751756 internal/analysisinternal: unify zero value function to typesinternal
  > a287481 internal/imports: test Source for go mod cache
  > 9387a39 gopls/doc/contributing.md: update expectations
  > e08fcf7 gopls/internal/analysis/undeclaredname: merge into CodeAction
  > 0c01408 internal/refactor/inline: avoid binding decl for name used by other args
  > 63e03c3 gopls/internal/test/marker: generalize codeaction with named args
  > acc2a74 go/analysis/passes/copylock: enable unfortunate tests
  > 39cb6f0 internal/facts: use alias type parameters and arguments during imports
  > 9b9871d go/analysis/passes/copylock: test for noCopy for sync Map, Mutex, Once
  > a54bd37 gopls/internal/golang: don't try to inline dynamic calls
  > 52eb446 internal/imports: adjust TestStdlibSelfImports pkg
  > b8ff201 gopls/internal/cache: refine bug reports for inconsistent dep view
  > e59fd36 go/ssa: use ZeroString unconditionally
  > 60bc93d gopls/internal/cache: fix handling of cgo standalone files
  > ed19fc7 gopls/internal/test: synchronize notifications during commands
  > 254baba gopls/internal/cache: failure to extract diagnostic fixes is an error
  > 56ec111 gopls/internal/server: remove spurious Async in legacy RunGoVulncheck
  > b1c39aa gopls/internal/cache: use a named bool type for allowNetwork
  > c043599 gopls/internal/protocol: add DocumentURI.DirPath
  > 29f4edb gopls/internal/cache: simplify usage of snapshot.GoCommandInvocation
  > 3c20e3f gopls/internal/analysis/yield: analyzer for iterator (yield) mistakes
  > 221e94d gopls/internal/cache: id command-line-arguments packages using GoFiles
  > 84e9c33 gopls/internal/golang: more idiomatic result naming in extract
  > 8bb5da3 gopls/internal/golang: special handling for input context.Context
  > b4332e0 gopls/internal/golang, go/ssa: remove unnamed input parameter
  > a8d0fa5 go/packages: call testenv.NeedsGoPackages for TestDirAndForTest
  > 47a5f7d gopls/internal/golang: fix bad slice append in function extraction
  > 5b5d57c gopls/codeaction: fix panic when removing unused parameters with syntax errors.
  > 025b812 gopls/internal/server: don't call window/showDocument if unsupported
  > 288b9cb gopls/internal/golang: add missing imports in foo_test.go
  > 87ac91f gopls/internal/server: revert the gopls.run_govulncheck command
  > c531f1b gopls/internal/golang: avoid crash in hover on field of non-struct
  > a37eeb4 README: mention the git repo
  > 8dd84a4 go/ssa/interp: assign phi nodes in parallel
  > 12610a1 internal/modindex: better behavior in edge cases
  > 06a498a go/ssa/interp: simplify output capturing
  > b2321e7 gopls/internal/golang: refine crash report golang/go# 69362
  > c875677 gopls/internal/golang: support add test for receiver w/o constructor
  > e26dff9 gopls/internal/golang: semtok: use type information consistently
  > fd8d028 go/pacakges: remove some todos
  > 69c27ad gopls/internal/protocol: upgrade to latest LSP
  > 25699c3 internal/server: make MaybePromptForTelemetry no-op
  > 35d7f28 go/packages: add Dir and ForTest fields to Package
bumping k8s.io/code-generator 890b3a1...5a98ceb:
  > 5a98ceb Update dependencies to v0.31.4 tag
  > f74ea3c informers: add comment that Start does not block
  > 80c52b8 Call non-blocking informerFactory.Start synchronously to avoid races
  > e64302e Merge pull request # 126353 from liggitt/fix-vendor
  > d9fab2a revendor dependencies
  > ec3cc88 Merge pull request # 125571 from liggitt/filter-auth-02-sar
  > b4e74fb generate
  > 41a074a Merge pull request # 126073 from a7i/fake-apply-scale-subresource
  > e788e07 Merge pull request # 126018 from aroradaman/bump-k8s-utils
  > 100ab85 fix: fake clientset ApplyScale subresource from 'status' to 'scale'
  > baa7672 bump k8s.io/utils
  > 3f99c9a Merge pull request # 125922 from dims/update_otel_27
  > bb79741 update OpenTelemetry dependencies and grpc
  > 633962a [code-generator] feat: add func for generating register code (# 124946)
  > ab86cd6 Merge pull request # 125759 from dims/bump-prometheus/common-v0.55.0
  > e57d261 Merge pull request # 125162 from sttts/sttts-code-generator-core-group
  > 59cecdb Bump `prometheus/common to` v0.55.0
  > 3c253da Merge pull request # 125766 from dims/update-moby/spdystream-to-v0.4.0
  > be30ddb make codegen
  > e589542 Update moby/spdystream to v0.4.0
  > 07cedd3 code-generator/examples: fix json tags
  > 7a55f28 Merge pull request # 125745 from BenTheElder/ping-ping
  > aad7f9e code-generator/client-gen: add example with single package api/v1
  > 1f8ce20 bump  github.com/moby/spdystream to v0.3.0
  > 3d2e34f code-generator/client-gen: decouple core group from package name 'api'
  > 13818de Merge pull request # 125731 from dashpole/revert_otel
  > 5b6873d code-generator/client-gen: add example with core group
  > 1d5e06b Revert "update OpenTelemetry dependencies"
  > 50b2150 Merge pull request # 125669 from benluddy/cbor-bump-v2.7.0
  > bea469c Bump github.com/fxamacker/cbor/v2 to v2.7.0.
  > cdfa7b0 Merge pull request # 125575 from dashpole/update_otel_27
  > 1bfc2ba Merge pull request # 125560 from jpbetz/apply-gen-fake
  > 98d7e2c update OpenTelemetry dependencies
  > e421d05 Resolve rebase
  > 0c9ff01 Generate code
  > 43d54fb Generate type fake clients that may opt in to the use of managed fields
  > 763e7a8 Merge pull request # 125673 from jpbetz/patch-6
  > f536deb Add jpbetz to code-generator OWNERS
  > 569cc2f Merge pull request # 125531 from pohly/klog-update
  > 76a3494 dependencies: klog v2.130.1
  > 2a71dee Merge pull request # 121439 from skitt/generic-client-go
  > 198ba6b Merge pull request # 125051 from marquiz/devel/codegen-fix-grep
  > 896cb08 Run codegen
  > 030791b Merge pull request # 125499 from mbobrovskyi/fix/openapi-gen-dependency
  > 4de699f kube_codegen: smarter grepping of codegen tags
  > 688a73a Generify client-go
  > 28fff67 Fix openapi-gen dependency.
  > 0f7e7aa Run codegen
  > 0b3f2ef Merge pull request # 124509 from p0lyn0mial/upstream-watch-list-code-gen
  > 0c42800 Add a non-subresource genclient:method example
  > 0c188fc ./hack/update-internal-modules.sh
  > bd8446c Run codegen
  > 20a1a2d make update
  > b5459b0 Fix "an declarative" typo
  > f0862c8 client-gen: intro watchList
  > d95c93e Run codegen
  > 3a49e5b Merge pull request # 125408 from benluddy/bump-cbor-v2.7.0
  > 87db423 Add a Name getter in named ApplyConfigurations
  > 32e53a8 Bump fxamacker/cbor/v2 to v2.7.0-beta.
  > 8d055c7 Merge pull request # 124963 from p0lyn0mial/upstream-data-consistency-checker-for-list-requests
  > 9b16c26 make update
  > 8b8ecc8 client-gen: rework list and use CheckListFromCacheDataConsistencyIfRequested
  > 2ec7af0 Merge pull request # 122832 from benluddy/cbor-fuzz-native-to-unstructured-via
  > ca05e03 Update indirect dependencies with ./hack/update-vendor.sh.
  > 620b113 Merge pull request # 124898 from alexzielenski/apiserver/prerelease-ga-tags
  > 9017335 Merge pull request # 123339 from skitt/canonical-json-patch
  > dc6562f prevent deprecated & removed from being generated for GA types
  > 6fcb6f0 Use canonical json-patch v4 import
  > 5f538ea Update kubectl kustomize to kyaml/v0.17.1, cmd/config/v0.14.1, api/v0.17.2, kustomize/v5.4.2
  > 2da86b0 Merge pull request # 125045 from pohly/ginkgo-gomega-update
  > a77a314 dependencies: ginkgo v2.19.0, gomega v1.33.1
  > e9fc761 Merge pull request # 124913 from enj/enj/i/go_work_sum_ignore
  > c9a8b3f Ignore go.work.sum for fake workspaces
  > 1a2807d Merge pull request # 124757 from dims/update-to-latest-golang.org/x/oauth2-v0.20.0
  > 68d188f Update to latest golang.org/x/oauth2 v0.20.0
  > 12b975c Merge pull request # 124281 from fulviodenza/master
  > de98b55 Merge pull request # 124562 from sbueringer/pr-bump-sigs-yaml
  > 8dcbed8 Allow request parameter and proto file path
  > 7977adc Bump sigs.k8s.io/yaml to v1.4.0
  > ff061d6 Merge pull request # 124469 from serathius/etcd-3.5.13
  > 3f5a084 Upgrade etcd libraries to v3.5.13
  > 679c60f Merge pull request # 124328 from jiahuif-forks/deps/cel-go
  > a397227 Merge pull request # 121574 from skitt/generic-lister-gen
  > 164ad5e generated: ./hack/update-vendor.sh
  > 5ae098a Merge pull request # 124346 from jwcesign/master
  > c2b93df Regenerate all listers
  > 677f552 generated: ./hack/pin-dependency.sh github.com/google/cel-go v0.20.1
  > ced7fde upgrade: upgrade dependencies github.com/prometheus/common to the newest version
  > a25c144 Generify lister-gen
  > 2a5cd15 Merge pull request # 122892 from danwinship/codegen-nil
  > a068c84 Add a lister-gen expansion example
  > 8d4f629 Regenerate fake clients
  > 5ade975 Fix codegen to use correct nil-vs-empty semantics in fake clients
  > 4c3e41e Merge pull request # 124193 from soltysh/fix_applyconfig-gen
  > d3d5e9c Merge pull request # 123988 from skitt/use-cases-titler
  > 111256a Update applyconfig-gen for pacakges where group and dir differ in name
  > 74348a6 Merge pull request # 123893 from soltysh/openapi_schema_applygen
  > ffb0802 code-generator: use cases.Title instead of strings.Title
  > 585ff30 Merge pull request # 116781 from muff1nman/protobuf-fully-qualified-types
  > fec23c2 kube_codegen: expose applyconfig-openapi-schema flag for client generation
  > 6705865 Merge pull request # 124174 from dims/update-x/net-for-CVE-2023-45288
  > cccf4c3 generate fully qualified type references
  > 5a5fa0c Update x/net for CVE-2023-45288
  > e33b396 sync: update go.mod
  > 75dbfb9 Merge pull request # 123735 from thockin/master
bumping golang.org/x/net 334afa0...8da7ed1:
  > 8da7ed1 go.mod: update golang.org/x dependencies
  > 2124140 all: make function and struct comments match the names
  > e9d95ba http2: do not surface errors from a conn's idle timer expiring
  > c2be992 quic: remember which remote connection IDs have been retired
  > dfc720d go.mod: update golang.org/x dependencies
  > 8e66b04 html: use strings.EqualFold instead of lowering ourselves
  > b935f7b html: avoid endless loop on error token
  > 9af49ef route: remove unused sizeof* consts
  > 6705db9 quic: clean up crypto streams when dropping packet protection keys
  > 4ef7588 quic: handle ACK frame in packet which drops number space
  > 552d8ac Revert "route: change from syscall to x/sys/unix"
  > 13a7c01 Revert "route: remove unused sizeof* consts on freebsd"
  > 285e1cf go.mod: update golang.org/x dependencies
  > d0a1049 route: remove unused sizeof* consts on freebsd
  > 6e41410 http2: fix benchmarks using common frame read/write functions
  > 4be1253 route: change from syscall to x/sys/unix
  > bc37675 http2: limit number of PINGs bundled with RST_STREAMs
  > e9cd716 route: fix parse of zero-length sockaddrs in RIBs
  > 9a51899 http2: add SETTINGS_ENABLE_CONNECT_PROTOCOL support
bumping knative.dev/pkg 4c90159...0c2a238:
  > 0c2a238 Bump golang.org/x/tools from 0.28.0 to 0.29.0 (# 3134)
  > accfe36 Satisfy linter (# 3132)
  > b4ff2c1 run dependabot workflow only on dependabot branches (# 3133)
  > 83cd52e Bump golang.org/x/net from 0.31.0 to 0.34.0 (# 3130)
  > 4d8f747 Bump google.golang.org/grpc from 1.68.0 to 1.69.2 (# 3131)
  > 459f60e Bump google.golang.org/protobuf from 1.35.2 to 1.36.2 (# 3129)
  > 14369d0 Bump golang.org/x/tools from 0.27.0 to 0.28.0 (# 3119)
  > 28f90c2 upgrade to latest dependencies (# 3127)
  > 23c15c0 Update k8s min and deps to 1.30  (# 3124)
  > 4ba3f1b Update community files (# 3128)
bumping google.golang.org/grpc acba4d3...b615b35:
  > b615b35 Change version to 1.69.2 (# 7947)
  > 6b36a3e experimental/stats: re-add type aliases for migration (# 7929) (# 7941)
  > 4535c6d Change version to 1.69.2-dev (# 7928)
  > b6e7c72 examples/features/csm_observability: Make CSM Observability example server listen on an IPV4 address (# 7933) (# 7934)
  > 9355fbc Change version to 1.69.1 (# 7927)
  > 927a1e1 Change version to 1.69.1-dev (# 7902)
  > 97d633a Change version to 1.69.0 (# 7901)
  > 317271b pickfirst: Register a health listener when used as a leaf policy (# 7832)
  > 5565631 balancer/pickfirst: replace grpc.Dial with grpc.NewClient in tests (# 7879)
  > 634497b test: Split import paths for generated message and service code (# 7891)
  > 78aa51b pickfirst: Stop test servers without closing listeners (# 7872)
  > 00272e8 dns: Support link local IPv6 addresses (# 7889)
  > 17d08f7 scripts/gen-deps: filter out grpc modules (# 7890)
  > ab189b0 examples/features/csm_observability: Add xDS Credentials (# 7875)
  > 3ce87dd credentials/google: Add cloud-platform scope for ADC (# 7887)
  > 3c0586a stats/opentelemetry: Cleanup OpenTelemetry API's before stabilization (# 7874)
  > 4c07bca stream: add jitter to retry backoff in accordance with gRFC A6 (# 7869)
  > 967ba46 balancer/pickfirst: Add pick first metrics (# 7839)
  > bb7ae0a Change logger to avoid Printf when disabled (# 7471)
  > dcba136 test/xds: remove redundant server when using stubserver in tests (# 7846)
  > 8b70aeb stats/opentelemetry: introduce tracing propagator and carrier (# 7677)
  > 13d5a16 balancer/weightedroundrobin: Switch Weighted Round Robin to use pick first instead of SubConns (# 7826)
  > 93f1cc1 credentials/alts: avoid SRV and TXT lookups for handshaker service (# 7861)
  > 44a5eb9 xdsclient: fix new watcher to get both old good update and nack error (if exist) from the cache  (# 7851)
  > 87f0254 xdsclient: fix new watcher hang when registering for removed resource (# 7853)
  > c63aeef transport: add send operations to ClientStream and ServerStream (# 7808)
  > 7d53957 pickfirst: Ensure pickfirst_test.go runs against both new and old policies
  > 0775031 cleanup: remove a TODO that has been take care of (# 7855)
  > db700b7 credentials: remove the context timeout to fix token request failure with non-GCE ADC (# 7845)
  > 3244606 balancer: fix SubConn embedding requirement to not recommend a nil panic hazard (# 7840)
  > 1e7fde9 test: Add unit test for channel state waiting for first resolver update (# 7768)
  > 36d5ca0 stats: deprecate trace and tags methods and remove all usages from internal code (# 7837)
  > ee3fb29 cleanup: use SliceBuffer directly where no pool is available (# 7827)
  > d7f27c4 xds: rename helper to remove mention of OutgoingCtx (# 7854)
  > fdc28bf xdsclient: remove unexported method from ResourceData interface (# 7835)
  > 3a1e3e2 xdsclient: rename the interface for the transport (# 7842)
  > 66385b2 clusterimpl: propagate state update from child when drop/request config remains unchanged (# 7844)
  > 89737ae orca: switching to stubserver in tests instead of testservice implementation (# 7727)
  > a365199 examples: fix debugging example after Dial->NewClient migration (# 7833)
  > 8c518f7 xds: switching to stubserver in tests instead of testservice implementation (# 7726)
  > b01130a xds/internal/xdsclient: fix resource type documentation to only mention handling xds responses (# 7834)
  > 274830d balancer: Add a SubConn.RegisterHealthListener API and default implementation (# 7780)
  > 0553bc3 xdsclient: don't change any global state in NewForTesting (# 7822)
  > 3db86e2 deps: Remove go patch version from go.mod (# 7831)
  > e2b98f9 pickfirst: Implement Happy Eyeballs (# 7725)
  > 60c70a4 mem: implement `ReadAll()` for more efficient `io.Reader` consumption (# 7653)
  > d2c1aae xds: Plumb EDS endpoints through xDS Balancer Tree (# 7816)
  > c2a2d20 docs: update documentation for `ClientStream.SendMsg()` returning `nil` unconditionally when `ClientStreams=false` (# 7790)
  > 0d0e530 grpc: export MethodHandler # 7794 (# 7796)
  > a3a8657 clusterimpl: update picker synchronously on config update (# 7652)
  > 74738cf grpc: Remove health check func dial option used for testing (# 7820)
  > 5b40f07 xdsclient: fix flaky test TestServeAndCloseDoNotRace (# 7814)
  > b3393d9 xdsclient: support fallback within an authority (# 7701)
  > 18d218d pickfirst: Interleave IPv6 and IPv4 addresses for happy eyeballs (# 7742)
  > e9ac44c cleanup: replace grpc.Dial with grpc.NewClient in grpclb test (# 7789)
  > 0ec8fd8 xdsclient/ads: reset the pending bit of ADS stream flow control at the end of the onDone method (# 7806)
  > 43ee172 balancer: Enforce embedding the SubConn interface in implementations (# 7758)
  > 2de6df9 xds/resolver: fix flaky test TestResolverRemovedWithRPCs with a workaround (# 7804)
  > 2a18bfc transport: refactor to split ClientStream from ServerStream from common Stream functionality (# 7802)
  > 70e8931 transport: remove useless trampoline function (# 7801)
  > ef0f617 xdsclient: start using the newly added transport and channel functionalities (# 7773)
  > d66fc3a balancer/endpointsharding: Call ExitIdle() on child if child reports IDLE (# 7782)
  > 2e3f547 ringhash: fix a couple of flakes in e2e style tests (# 7784)
  > 52d7f6a multiple: switch to math/rand/v2 (# 7711)
  > 6fd86d3 Disable buffer_pooling tests (# 7762)
  > 091d20b server: Only call FromIncomingContext with stats handlers (# 7781)
  > 192ee33 multiple: add verbosity checks to logs that use pretty.JSON (# 7785)
  > e7435d6 balancer/endpointsharding: Ignore empty endpoints (# 7674)
  > 4084b14 stats/opentelemetry: Remove OpenTelemetry module and add RLS Metrics e2e tests (# 7759)
  > ada6787 cleanup: switching to stubserver in tests instead of testservice implementation (# 7708)
  > cb32937 credentials: Support ALTSPerRPCCreds in DefaultCredentialsOptions (# 7775)
  > a0cbb52 github: add Go 1.23 testing and make staticcheck work locally with go1.23 (# 7751)
  > 67b9ebf xdsclient: make sending requests more deterministic (# 7774)
  > 94e1b29 vet: add dependency checks (# 7766)
  > a82315c testutils: change ListenerWrapper to push the most recently accepted connection (# 7772)
  > e0a730c clusterresolver: fix a comment in a test (# 7776)
  > f8e5d8f mem: use slice capacity instead of length, to determine whether to pool buffers or directly allocate them (# 7702)
  > c4c8b11 xds/resolver: add a way to specify the xDS client to use for testing purposes (# 7771)
  > 8212cf0 xdsclient: implementation of the xdsChannel (# 7757)
  > 4bb0170 status: Fix status incompatibility introduced by # 6919 and move non-regeneratable proto code into /testdata (# 7724)
  > 80937a9 credentials: Apply defaults to TLS configs provided through GetConfigForClient (# 7754)
  > c538c31 vet: Don't use GOROOT to set PATH if GOROOT is unset (# 7761)
  > 14e2a20 resolver/google-c2p: introduce SetUniverseDomain API (# 7719)
  > 98959d9 deps: update dependencies for all modules (# 7755)
  > 56df169 resolver: update ReportError() docstring (# 7732)
  > 830135e xdsclient: new Transport interface and ADS stream implementation (# 7721)
  > d2ded4b xdsclient: new Transport interface and LRS stream implementation (# 7717)
  > ec10e73 transport: refactor `trInFlow.onData` to eliminate redundant logic (# 7734)
  > 6cd00c9 clientconn: remove redundant check (# 7700)
  > 569c8eb vet: Use go1.22 instead of go1.21 for tidy and staticcheck(# 7747)
  > 4544b8a Change version to 1.69.0-dev (# 7746)
bumping github.com/davecgh/go-spew 8991bc2...d8f796a:
  > d8f796a travis: test against go 1.11
bumping golang.org/x/sys e0753d4...d4ac05d:
  > d4ac05d windows: update NewLazyDLL, LoadDLL docs to point to NewLazySystemDLL
  > 680bd24 windows: remove unused errString type
  > a7f19e9 unix: add Dup3 on dragonfly
  > fe16172 unix: define IfMsghdr2, IfData64, and RtMsghdr2 on darwin
  > 0a57dbc unix: update to kernel Linux 6.12
  > 3cf1e67 unix: don't fail TestPpoll on EINTR
  > d2cea70 windows: add functions to get named pipe process IDs
  > a13946c windows: regenerate zsyscall_windows.go
bumping golang.org/x/term b725e36...40b02d6:
  > 40b02d6 go.mod: update golang.org/x dependencies
  > 442846a go.mod: update golang.org/x dependencies
bumping k8s.io/api 83bdab1...e45474d:
  > e45474d Update dependencies to v0.31.4 tag
  > 46f6230 Merge pull request # 126761 from thockin/automated-cherry-pick-of-# 126749-upstream-release-1.31
  > 1857695 fix v1a3 ResourceSliceList metadata field name
  > 382a091 update codegen and openapi
  > 1073c1e regen clients
  > 6f8e3bd Review feedback
  > 437d97a Coordinated Leader Election Alpha API
  > 63e21d3 Merge pull request # 126243 from SergeyKanzhelev/devicePluginFailures
  > ca07d5a generated files
  > 7192863 add AllocatedResourcesStatus field to ContainerStatus
  > a789efa Merge pull request # 126281 from saschagrunert/oci-volume-docs
  > f04ea0b Merge pull request # 126145 from carlory/kep-3751-api
  > 6f2fee6 ImageVolumeSource: mention that fsGroupChangePolicy has no effect
  > 7d5eb68 Merge pull request # 126108 from gnufied/changes-volume-recovery
  > 447f08f Promote VolumeAttributesClass to beta
  > 871340c Merge pull request # 120611 from pohly/dra-resource-quotas
  > 8962de2 Rename ReizeFailed conditions to ResizeInfeasible
  > 3421a80 Merge pull request # 126067 from tenzen-y/implement-job-success-policy-e2e
  > 19081f3 DRA quota: add ResourceClaim v1.ResourceQuota limits
  > a48b61c Add new values for reporting expansion errors via conditions
  > b689d90 Merge pull request # 125374 from pwschuurman/kep-3335-stable
  > a815b26 Graduate the JobSuccessPolicy to beta
  > 9516298 Merge pull request # 125488 from pohly/dra-1.31
  > dc483d9 Remove StatefulSetStartOrdinal feature gate to target stable in 1.31
  > ff2bf05 Job: Add the CompletionsReached reason to the SuccessCriteriaMet condition
  > e0a906d DRA: new API for 1.31
  > 0030d2c DRA: remove "sharable" from claim allocation result
  > 35de2c0 DRA: remove immediate allocation
  > 8cd8793 DRA: bump API v1alpha2 -> v1alpha3
  > 7d5e5ea Merge pull request # 125571 from liggitt/filter-auth-02-sar
  > 0c2b9ec generate
  > 80604de add subjectaccessreview field and label selectors
  > 5c3f1a6 Merge pull request # 126207 from thockin/ingress-backend-port-atomic
  > 441003d Merge pull request # 125782 from aborrero/master
  > 1b3cb9c Make ServiceBackendPort an atomic struct
  > 261e6aa Merge pull request # 125660 from saschagrunert/oci-volumesource-api
  > b18808d procMount: fix default value documentation
  > fda4e0a Merge pull request # 124859 from morlay/master
  > e42b090 Add ImageVolumeSource API
  > 7ff5716 Merge pull request # 125940 from thockin/master
  > 4dc0f37 Remove json:",omitempty" where json:",inline" specified.
  > b1818c5 Merge pull request # 125470 from everpeace/kep-3619-SupplementalGroupsPolicy-e2e
  > 36ebe43 Clarify errors in ProjectedVolume validation
  > f00cb17 Merge pull request # 124568 from xyz-li/fix_apiserver_output
  > 8be67e6 KEP-3619: API: add NodeFeatures.SupplementalGroupsPolicy in NodeStatus
  > 68522b3 api: fix ValidatingAdmissionPolicyList json tag
  > 4fec474 Merge pull request # 126034 from sohankunkerkar/add-usernamespaces
  > 0787ff7 api: add user namespaces field to NodeRuntimeHandlerFeatures
  > fc8a03c Merge pull request # 126018 from aroradaman/bump-k8s-utils
  > 8ea3a95 Merge pull request # 126057 from thockin/make-pod-ip-host-ip-required
  > bf5bdd3 bump k8s.io/utils
  > 2ae1cf1 Merge pull request # 125442 from mimowo/job-pod-failure-policy-stable
  > 2ef3cc9 make PodIP.IP and HostIP.IP required.
  > e0af89b JobPodFailurePolicy to GA
  > 9c8c6d2 Merge pull request # 126046 from mimowo/fix-job-pod-failure-api
  > 4badb33 Merge pull request # 124969 from RomanBednar/pv-phase-transition-time-ga
  > 61b6c0a Use omitempty for optional fields in Job Pod Failure Policy
  > 2cf4612 Merge pull request # 125922 from dims/update_otel_27
  > ef6990b graduate PersistentVolumeLastPhaseTransitionTime to GA in 1.31
  > 9a93d6b update OpenTelemetry dependencies and grpc
  > e7b4471 Merge pull request # 125021 from aojea/servicecidrbeta
  > f41eb5f Merge pull request # 125116 from pohly/dra-one-of-source
  > db34f59 make update
  > 48abdb0 Merge pull request # 122047 from aojea/treeless
  > f74974e DRA: remove "source" indirection from v1 Pod API
  > 5ad5007 promote ServiceCIDR and IPAddress to beta
  > 70c0174 Merge pull request # 125759 from dims/bump-prometheus/common-v0.55.0
  > 19c5afd generated
  > 236105a Merge pull request # 125162 from sttts/sttts-code-generator-core-group
  > ad14b89 Bump `prometheus/common to` v0.55.0
  > ea15fac document the API with the details about the allocation CIDR ranges
  > 78bd910 Merge pull request # 125766 from dims/update-moby/spdystream-to-v0.4.0
  > 6ff324a code-generator/client-gen: decouple core group from package name 'api'
  > 77d4ad8 Merge pull request # 125600 from thockin/plus_default_deprecated_volumes
  > ba9d110 Update moby/spdystream to v0.4.0
  > 89b2da1 Merge pull request # 125745 from BenTheElder/ping-ping
  > e191233 Use +default for now deprecated ScaleIO volume
  > 26803c0 bump  github.com/moby/spdystream to v0.3.0
  > cdeb356 Use +default for now deprecated AzureDisk volume
  > e025325 Merge pull request # 125731 from dashpole/revert_otel
  > 165690c Use +default for now deprecated ISCSI volume
  > 2788e05 Revert "update OpenTelemetry dependencies"
  > 53f4e42 Use +default for now deprecated RBD volume
  > 149781f Merge pull request # 125669 from benluddy/cbor-bump-v2.7.0
  > e297a0e Bump github.com/fxamacker/cbor/v2 to v2.7.0.
  > 8d69060 Merge pull request # 125575 from dashpole/update_otel_27
  > 2d0cd50 update OpenTelemetry dependencies
  > e09016f Merge pull request # 125576 from alvaroaleman/fix
  > 9f6f03f Merge pull request # 125531 from pohly/klog-update
  > 69864e9 Corev1.Node: Link to node doc and not PV doc in status.capacity
  > a2ff581 dependencies: klog v2.130.1
  > 857a946 Merge pull request # 125540 from pohly/revert-binding-deprecation
  > ee77356 api: revert deprecation annotation for v1 Binding
  > 590e504 Merge pull request # 125408 from benluddy/bump-cbor-v2.7.0
  > b5dd4c4 Bump fxamacker/cbor/v2 to v2.7.0-beta.
  > c0840f2 Merge pull request # 125189 from mimowo/improve-ready-comment
  > c114cd7 Merge pull request # 122832 from benluddy/cbor-fuzz-native-to-unstructured-via
  > 7f878b9 Improve the Job API comment for ready field
  > d93eaf6 KEP-3619: Fine-grained SupplementalGroups control (# 117842)
  > 6e174b7 Update indirect dependencies with ./hack/update-vendor.sh.
  > 983e15a Merge pull request # 124898 from alexzielenski/apiserver/prerelease-ga-tags
  > 4261982 Merge pull request # 123339 from skitt/canonical-json-patch
  > 740a638 add prerelease lifecycle tags to GA types
  > 0b590ac Use canonical json-patch v4 import
  > 7cdaa4e Update kubectl kustomize to kyaml/v0.17.1, cmd/config/v0.14.1, api/v0.17.2, kustomize/v5.4.2
  > 7480c46 Merge pull request # 125045 from pohly/ginkgo-gomega-update
  > 9a6330a dependencies: ginkgo v2.19.0, gomega v1.33.1
  > 664bdd5 Merge pull request # 124793 from mimowo/fix-managed-by-comment
  > b1dbea2 Merge pull request # 123638 from sanposhiho/matchlabelkey-beta
  > 2a86f24 Fix the comment for the Job managedBy field
  > 8a7117f graduate MatchLabelKeysInPodAffinity to Beta
  > 7ccc245 Merge pull request # 123845 from HirazawaUi/promote-DisableNodeKubeProxyVersion-to-beta
  > aa3ed7c Merge pull request # 124590 from xrstf/fix-optional-comment
  > e6d86a2 promote DisableNodeKubeProxyVersion feature gate to beta
  > 92d2eac fix(api): make LocalObjectReference.Name and HostAlias.IP required (# 124553)
  > 2144e85 codegen
  > 7c9431b Merge pull request # 124572 from aojea/alpha_tag
  > 8db881a alpha->beta
  > c4ac111 Merge pull request # 124562 from sbueringer/pr-bump-sigs-yaml
  > fd5eb88 tag service.spec.TrafficDistribution field as alpha
  > 9bd0523 fix stray +optional comment on persistentVolumeClaimRetentionPolicy
  > 8cf3f40 Bump sigs.k8s.io/yaml to v1.4.0
  > 2676848 Merge pull request # 124469 from serathius/etcd-3.5.13
  > b75136d Merge pull request # 124075 from pohly/dra-api-comments
  > 8a34349 Upgrade etcd libraries to v3.5.13
  > 039b247 Merge pull request # 124328 from jiahuif-forks/deps/cel-go
  > 83fe34a DRA: sync internal API doc comments
  > fbc502f generated: ./hack/update-vendor.sh
  > 1e01004 DRA api: ResourceHandle.DriverName is required
  > 63e83d0 generated: ./hack/pin-dependency.sh github.com/google/cel-go v0.20.1
  > 2df4487 DRA api: explicitly reserve finalizer for Kubernetes
  > 5975d5e Merge pull request # 124365 from SataQiu/v1.30.0-api-testdata
  > 98d0c7a Merge pull request # 124103 from zhanluxianshen/fix-typos-in-rbac-api
  > 14e4e0c Remove v1.28.0 API testdata
  > 76b6c7c Merge pull request # 124062 from alculquicondor/nodename-api-comment
  > 04510aa Fix typos in rbac api.
  > b27f498 Add v1.30.0 API testdata
  > a819b1d Merge pull request # 116781 from muff1nman/protobuf-fully-qualified-types
  > 58c5883 Update API comment for nodeName to match system behavior
  > 5e7d566 Merge pull request # 124174 from dims/update-x/net-for-CVE-2023-45288
  > 33eb147 generate fully qualified type references
  > 63d4a04 Update x/net for CVE-2023-45288
  > 5147c1a sync: update go.mod
  > d1659eb Merge pull request # 123932 from pohly/dra-api-resource-model-rename
bumping golang.org/x/sync 151027e...913fb63:
  > 913fb63 singleflight: fix typo in singleflight_test.go
bumping knative.dev/hack e92a16a...f8be0cc:
  > f8be0cc Update community files (# 407)
bumping google.golang.org/protobuf c72053a...12c6ebd:
  > 12c6ebd all: release v1.36.2
  > 8878926 internal/impl: fix WhichOneof() to work with synthetic oneofs
  > c0c814f all: start v1.36.1-devel
  > 7fc5ff4 all: release v1.36.1
  > 575aebf internal/impl: revert IsSynthetic() check to fix panic
  > ce4fa19 internal/errors: delete compatibility code for Go before 1.13
  > 607da3e all: start v1.36.0-devel
  > 3b78ca8 all: release v1.36.0
  > 4cf3399 src/google/protobuf: document UnmarshalJSON / API level behavior
  > 8edf940 reflect/protoreflect: use [] syntax to reference method
  > 5376513 proto: add reference to size semantics with lazy decoding to comment
  > 560503e compiler/protogen: allow overriding API level from --go_opt
  > b64efdb cmd/protoc-gen-go: generate _protoopaque variant for hybrid
  > 9eda3d5 all: regenerate.bash for Opaque API
  > eb7b468 all: Release the Opaque API
  > 5f5de33 types/descriptorpb: regenerate using latest protobuf v29.1 release
  > bdcc7ad internal/impl: skip synthetic oneofs in messageInfo
  > 30f628e all: start v1.35.2-devel
bumping k8s.io/apimachinery 37988e5...a8f449e:
  > a8f449e Falls back to SPDY for gorilla/websocket https proxy error
  > 62791ec Merge pull request # 125571 from liggitt/filter-auth-02-sar
  > cc2ba35 add field and label selectors to authorization attributes
  > ce76a8f generate
  > 35052c5 add subjectaccessreview field and label selectors
  > ab06869 Merge pull request # 126105 from benluddy/cbor-framer
  > 429f4e4 Implement runtime.Framer for CBOR Sequences.
  > d7e1c53 Merge pull request # 126018 from aroradaman/bump-k8s-utils
  > 07cb122 Merge pull request # 125748 from benluddy/cbor-custom-marshalers
  > dd17456 bump k8s.io/utils
  > c485170 Error on custom (un)marshalers without a CBOR implementation.
  > 4524748 Merge pull request # 125790 from benluddy/cbor-fieldsv1
  > 6b362fa Merge pull request # 125676 from benluddy/cbor-bufferpool
  > 3da83fe Support either JSON or CBOR in FieldsV1.
  > 56f28d1 Merge pull request # 125629 from benluddy/cbor-rawextension
  > 5976b00 Don't pool large CBOR encode buffers.
  > e55063d Automatically transcode RawExtension between unstructured protocols.
  > e126c65 Merge pull request # 125743 from benluddy/extract-roundtrip-to-unstructured
  > c1f2403 Merge pull request # 125922 from dims/update_otel_27
  > 04bc058 Extract RoundtripToUnstructured to apimachinery apitesting library.
  > f813d28 Merge pull request # 125835 from benluddy/roundtrip-error-fmt-strings
  > 9ac6d17 update OpenTelemetry dependencies and grpc
  > 160885f Fix fmt verbs for strings in roundtrip test errors.
  > ef4453d fix: enable bool-compare rule from testifylint linter (# 125135)
  > adf72dd Merge pull request # 125759 from dims/bump-prometheus/common-v0.55.0
  > 1dfa5d9 Merge pull request # 125766 from dims/update-moby/spdystream-to-v0.4.0
  > 6deaf26 Bump `prometheus/common to` v0.55.0
  > a0fb8b1 Merge pull request # 125646 from HirazawaUi/apply-null
  > a2e9f2d Update moby/spdystream to v0.4.0
  > 65a3763 Merge pull request # 125712 from benluddy/remove-cbor-test-skips
  > 0daad00 add comment for mergeMap
  > 276559d Merge pull request # 125745 from BenTheElder/ping-ping
  > 9731e93 Remove temporary mechanism for skipping CBOR tests.
  > 11ede0a bump  github.com/moby/spdystream to v0.3.0
  > 93912e7 Merge pull request # 125422 from benluddy/cbor-disable-binarymarshaler
  > 01e80c9 Merge pull request # 125731 from dashpole/revert_otel
  > 5626c83 Disable recognition of Binary(Unm|M)arshaler in CBOR serializer.
  > 5f5d4bf Revert "update OpenTelemetry dependencies"
  > 9501ff9 Merge pull request # 125419 from benluddy/cbor-byteslice-base64
  > c225984 Merge pull request # 122891 from siyuanfoundation/api-comp-ver1
  > 541e437 Enable JSON-compatible base64 encoding of []byte for CBOR.
  > a05248b Merge pull request # 125421 from benluddy/cbor-simple-values
  > f7e861d Add version mapping in ComponentGlobalsRegistry.
  > 123bf3a Merge pull request # 125669 from benluddy/cbor-bump-v2.7.0
  > 8e6f2d4 Reject CBOR simple values other than true, false, and null.
  > cc42a15 apiserver: Add API emulation versioning.
  > e94cfc0 Merge pull request # 125418 from benluddy/cbor-byte-array-to-array
  > e837731 Bump github.com/fxamacker/cbor/v2 to v2.7.0.
  > e3238d4 Merge pull request # 125575 from dashpole/update_otel_27
  > 042376f Encode byte array to CBOR as array of integer, not byte string.
  > 0e02b52 Merge pull request # 125420 from benluddy/cbor-bignum-bigint
  > 347a004 update OpenTelemetry dependencies
  > 97723bf Reject math/big.Int on encode and bignum tags on decode for CBOR.
  > eb26334 Merge pull request # 125423 from benluddy/cbor-nan-inf
  > 04fe518 Merge pull request # 125531 from pohly/klog-update
  > cf52dcb Reject NaN or infinite floating-point values in the CBOR serializer.
  > aab19db dependencies: klog v2.130.1
  > af4f0d8 Merge pull request # 125424 from benluddy/cbor-timetag-rfc3339
  > 30b7bf1 Merge pull request # 125472 from karlkfi/karl-watch-comments
  > cfa284d Decode CBOR time tags to interface{} as RFC 3339 timestamps.
  > bcc2184 Add details to watch interface method comments
  > 1a6a62a Merge pull request # 125408 from benluddy/bump-cbor-v2.7.0
  > 71aae7d Allow decoding RFC 3339 CBOR strings to time.Time.
  > a2d2122 Bump fxamacker/cbor/v2 to v2.7.0-beta.
  > 703232e Merge pull request # 125299 from karlkfi/karl-reflector-fix-2
  > 8ac23fa Improve Reflector unit tests
  > 733a95e Merge pull request # 122832 from benluddy/cbor-fuzz-native-to-unstructured-via
  > b4069ae Implement cbor.Marshaler and cbor.Unmarshaler for resource.Quantity.
  > 6253e16 Implement cbor.Marshaler and cbor.Unmarshaler for metav1.MicroTime.
  > b19cb35 Implement cbor.Marshaler and cbor.Unmarshaler for metav1.Time.
  > bd5fa0b Implement cbor.Marshaler and cbor.Unmarshaler for IntOrString.
  > 63ab494 Merge pull request # 123339 from skitt/canonical-json-patch
  > 845ea7e Use canonical json-patch v4 import
  > a14e568 Update kubectl kustomize to kyaml/v0.17.1, cmd/config/v0.14.1, api/v0.17.2, kustomize/v5.4.2
  > d8a3da3 Merge pull request # 125045 from pohly/ginkgo-gomega-update
  > 5c8637d Merge pull request # 124775 from benluddy/cbor-unstructuredlist
  > 48701d2 dependencies: ginkgo v2.19.0, gomega v1.33.1
  > 491adfd Merge pull request # 125068 from benluddy/cbor-decode-to-any
  > 7f39f09 Decode CBOR to UnstructuredList as UnstructuredJSONScheme does.
  > 60bddd4 Add unit tests for decoding CBOR into interface{} type
  > 1da46c3 Merge pull request # 124799 from benluddy/cbor-self-described-cbor-tag-decode-test
  > 27aebe7 Add CBOR decoder unit test that accepts tag 55799.
  > 53e91cb Merge pull request # 123620 from benluddy/json-frame-reader-underlying-array-reuse
  > c9c3e94 Merge pull request # 121496 from benluddy/metav1-labelselector-fuzz
  > e79aa4b Remove shared ref to underlying array of JSONFrameReader's Read arg.
  > d5c9711 Merge pull request # 124562 from sbueringer/pr-bump-sigs-yaml
  > 8a0a7f6 Deduplicate set expression values in metav1.LabelSelector fuzzer.
  > a043c3f Merge pull request # 122026 from skitt/sets-go1.21
  > e2d25ee Bump sigs.k8s.io/yaml to v1.4.0
  > 03f2f33 Merge pull request # 124469 from serathius/etcd-3.5.13
  > b64363e Use Go 1.21 Ordered and clear for sets
  > 281251e Upgrade etcd libraries to v3.5.13
  > bb88221 Merge pull request # 124328 from jiahuif-forks/deps/cel-go
  > bfd47a1 Merge pull request # 123626 from benluddy/cbor-test-encode-no-duplicate-map-key
  > 952d3cb generated: ./hack/update-vendor.sh
  > 0ee3e61 Merge pull request # 124324 from benluddy/cbor-decode-tests-grouped-by-cbor-type
  > 948272c Add tests for CBOR encoder handling of duplicate field names/tags.
  > 125cb5f generated: ./hack/pin-dependency.sh github.com/google/cel-go v0.20.1
  > ea31e51 Merge pull request # 124116 from HiranmoyChowdhury/hiranmoy
  > d39475b Group CBOR decode tests by the kind of their inputs.
  > 8c36da9 Merge pull request # 121970 from pohly/log-apimachinery-runtime
  > 8d006f4 deep copy issue in getting controller is solved
  > 7778646 Merge pull request # 116781 from muff1nman/protobuf-fully-qualified-types
  > 126f5ce apimachinery runtime: support contextual logging
  > e696ec5 Merge pull request # 124174 from dims/update-x/net-for-CVE-2023-45288
  > ad648b1 generate fully qualified type references
  > 2dd8802 Update x/net for CVE-2023-45288
  > d82afe1 Merge pull request # 123801 from HirazawaUi/followup-allow-special-characters
bumping golang.org/x/text efd25da...d42948e:
  > d42948e go.mod: update golang.org/x dependencies

Signed-off-by: Knative Automation <automation@knative.team>
This commit is contained in:
Knative Automation 2025-01-13 14:52:00 -05:00 committed by GitHub
parent 634555c68c
commit dee364dbb0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1778 changed files with 101510 additions and 38502 deletions

38
go.mod
View File

@ -5,13 +5,13 @@ go 1.22.7
require (
github.com/google/go-cmp v0.6.0
go.uber.org/zap v1.27.0
k8s.io/api v0.30.3
k8s.io/apimachinery v0.30.3
k8s.io/client-go v0.30.3
k8s.io/code-generator v0.30.3
k8s.io/api v0.31.4
k8s.io/apimachinery v0.31.4
k8s.io/client-go v0.31.4
k8s.io/code-generator v0.31.4
k8s.io/kube-openapi v0.0.0-20240808142205-8e686545bdb8
knative.dev/hack v0.0.0-20241227080210-e92a16ae0893
knative.dev/pkg v0.0.0-20241223131119-4c901591eb4a
knative.dev/hack v0.0.0-20250109131303-f8be0ccdff36
knative.dev/pkg v0.0.0-20250113161000-0c2a238a16ed
)
require (
@ -21,10 +21,10 @@ require (
github.com/blendle/zapdriver v1.3.1 // indirect
github.com/census-instrumentation/opencensus-proto v0.4.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/emicklei/go-restful/v3 v3.12.1 // indirect
github.com/evanphx/json-patch v5.9.0+incompatible // indirect
github.com/evanphx/json-patch/v5 v5.9.0 // indirect
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
github.com/go-kit/log v0.2.1 // indirect
github.com/go-logfmt/logfmt v0.5.1 // indirect
github.com/go-logr/logr v1.4.2 // indirect
@ -54,23 +54,25 @@ require (
github.com/prometheus/procfs v0.15.1 // indirect
github.com/prometheus/statsd_exporter v0.22.7 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/x448/float16 v0.8.4 // indirect
go.opencensus.io v0.24.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/mod v0.22.0 // indirect
golang.org/x/net v0.31.0 // indirect
golang.org/x/net v0.34.0 // indirect
golang.org/x/oauth2 v0.23.0 // indirect
golang.org/x/sync v0.9.0 // indirect
golang.org/x/sys v0.27.0 // indirect
golang.org/x/term v0.26.0 // indirect
golang.org/x/text v0.20.0 // indirect
golang.org/x/sync v0.10.0 // indirect
golang.org/x/sys v0.29.0 // indirect
golang.org/x/term v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
golang.org/x/time v0.6.0 // indirect
golang.org/x/tools v0.27.0 // indirect
golang.org/x/tools v0.29.0 // indirect
gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect
google.golang.org/api v0.183.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect
google.golang.org/grpc v1.68.0 // indirect
google.golang.org/protobuf v1.35.2 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20241015192408-796eee8c2d53 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20241015192408-796eee8c2d53 // indirect
google.golang.org/grpc v1.69.2 // indirect
google.golang.org/protobuf v1.36.2 // indirect
gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect

89
go.sum
View File

@ -63,8 +63,9 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/emicklei/go-restful/v3 v3.12.1 h1:PJMDIM/ak7btuL8Ex0iYET9hxM3CI2sjZtzpL63nKAU=
github.com/emicklei/go-restful/v3 v3.12.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@ -75,6 +76,8 @@ github.com/evanphx/json-patch v5.9.0+incompatible h1:fBXyNpNMuTTDdquAq/uisOr2lSh
github.com/evanphx/json-patch v5.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch/v5 v5.9.0 h1:kcBlZQbplgElYIlo/n1hJbls2z/1awpXxpRi0/FOJfg=
github.com/evanphx/json-patch/v5 v5.9.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ=
github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@ -93,6 +96,8 @@ github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KE
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
@ -100,7 +105,6 @@ github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDsl
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
@ -235,8 +239,9 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
@ -293,6 +298,8 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stvp/go-udp-testing v0.0.0-20201019212854-469649b16807/go.mod h1:7jxmlfBCDBXRzr0eAQJ48XC1hBu1np4CS5+cHEYfwpc=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@ -305,6 +312,16 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY=
go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE=
go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE=
go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY=
go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk=
go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0=
go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc=
go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8=
go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys=
go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
@ -388,8 +405,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo=
golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -410,8 +427,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -453,12 +470,12 @@ golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220708085239-5a0f0661e09d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU=
golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E=
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -466,8 +483,8 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -516,8 +533,8 @@ golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o=
golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q=
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -580,10 +597,10 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 h1:hjSy6tcFQZ171igDaN5QHOw2n6vx40juYbC/x67CEhc=
google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
google.golang.org/genproto/googleapis/api v0.0.0-20241015192408-796eee8c2d53 h1:fVoAXEKA4+yufmbdVYv+SE73+cPZbbbe8paLsHfkK+U=
google.golang.org/genproto/googleapis/api v0.0.0-20241015192408-796eee8c2d53/go.mod h1:riSXTwQ4+nqmPGtobMFyW5FqVAmIs0St6VPp4Ug7CE4=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241015192408-796eee8c2d53 h1:X58yt85/IXCx0Y3ZwN6sEIKZzQtDEYaBWrDvErdXrRE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20241015192408-796eee8c2d53/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -597,8 +614,8 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.68.0 h1:aHQeeJbo8zAkAa3pRzrVjZlbz6uSfeOXlJNQM0RAbz0=
google.golang.org/grpc v1.68.0/go.mod h1:fmSPC5AsjSBCK54MyHRx48kpOti1/jRfOlwEWywNjWA=
google.golang.org/grpc v1.69.2 h1:U3S9QEtbXC0bYNvRtcoklF3xGtLViumSYxWykJS+7AU=
google.golang.org/grpc v1.69.2/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@ -613,8 +630,8 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io=
google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU=
google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@ -622,6 +639,8 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/evanphx/json-patch.v4 v4.12.0 h1:n6jtcsulIzXPJaxegRbvFNNrZDjbij7ny3gmSPG+6V4=
gopkg.in/evanphx/json-patch.v4 v4.12.0/go.mod h1:p8EYWUEYMpynmqDbY58zCKCFZw8pRWMG4EsWvDvM72M=
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
@ -643,14 +662,14 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
k8s.io/api v0.30.3 h1:ImHwK9DCsPA9uoU3rVh4QHAHHK5dTSv1nxJUapx8hoQ=
k8s.io/api v0.30.3/go.mod h1:GPc8jlzoe5JG3pb0KJCSLX5oAFIW3/qNJITlDj8BH04=
k8s.io/apimachinery v0.30.3 h1:q1laaWCmrszyQuSQCfNB8cFgCuDAoPszKY4ucAjDwHc=
k8s.io/apimachinery v0.30.3/go.mod h1:iexa2somDaxdnj7bha06bhb43Zpa6eWH8N8dbqVjTUc=
k8s.io/client-go v0.30.3 h1:bHrJu3xQZNXIi8/MoxYtZBBWQQXwy16zqJwloXXfD3k=
k8s.io/client-go v0.30.3/go.mod h1:8d4pf8vYu665/kUbsxWAQ/JDBNWqfFeZnvFiVdmx89U=
k8s.io/code-generator v0.30.3 h1:bmtnLJKagDS5f5uOEpLyJiDfIMKXGMKgOLBdde+w0Mc=
k8s.io/code-generator v0.30.3/go.mod h1:PFgBiv+miFV7TZYp+RXgROkhA+sWYZ+mtpbMLofMke8=
k8s.io/api v0.31.4 h1:I2QNzitPVsPeLQvexMEsj945QumYraqv9m74isPDKhM=
k8s.io/api v0.31.4/go.mod h1:d+7vgXLvmcdT1BCo79VEgJxHHryww3V5np2OYTr6jdw=
k8s.io/apimachinery v0.31.4 h1:8xjE2C4CzhYVm9DGf60yohpNUh5AEBnPxCryPBECmlM=
k8s.io/apimachinery v0.31.4/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo=
k8s.io/client-go v0.31.4 h1:t4QEXt4jgHIkKKlx06+W3+1JOwAFU/2OPiOo7H92eRQ=
k8s.io/client-go v0.31.4/go.mod h1:kvuMro4sFYIa8sulL5Gi5GFqUPvfH2O/dXuKstbaaeg=
k8s.io/code-generator v0.31.4 h1:Vu+8fKz+239rKiVDHFVHgjQ162cg5iUQPtTyQbwXeQw=
k8s.io/code-generator v0.31.4/go.mod h1:yMDt13Kn7m4MMZ4LxB1KBzdZjEyxzdT4b4qXq+lnI90=
k8s.io/gengo v0.0.0-20240404160639-a0386bf69313 h1:wBIDZID8ju9pwOiLlV22YYKjFGtiNSWgHf5CnKLRUuM=
k8s.io/gengo v0.0.0-20240404160639-a0386bf69313/go.mod h1:FiNAH4ZV3gBg2Kwh89tzAEV2be7d5xI0vBa/VySYy3E=
k8s.io/gengo/v2 v2.0.0-20240228010128-51d4e06bde70 h1:NGrVE502P0s0/1hudf8zjgwki1X/TByhmAoILTarmzo=
@ -662,10 +681,10 @@ k8s.io/kube-openapi v0.0.0-20240808142205-8e686545bdb8 h1:1Wof1cGQgA5pqgo8MxKPtf
k8s.io/kube-openapi v0.0.0-20240808142205-8e686545bdb8/go.mod h1:Os6V6dZwLNii3vxFpxcNaTmH8LJJBkOTg1N0tOA0fvA=
k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 h1:pUdcCO1Lk/tbT5ztQWOBi5HBgbBP1J8+AsQnQCKsi8A=
k8s.io/utils v0.0.0-20240711033017-18e509b52bc8/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
knative.dev/hack v0.0.0-20241227080210-e92a16ae0893 h1:zy7LwNJ2S7obPMHVAtxQgZPXxBTZzoxHbtb6uhxOl7Q=
knative.dev/hack v0.0.0-20241227080210-e92a16ae0893/go.mod h1:R0ritgYtjLDO9527h5vb5X6gfvt5LCrJ55BNbVDsWiY=
knative.dev/pkg v0.0.0-20241223131119-4c901591eb4a h1:31rLKAGHeQEkxMOc/h4XCmHOTiR/1R4NRPvJ3wg05WY=
knative.dev/pkg v0.0.0-20241223131119-4c901591eb4a/go.mod h1:C2dxK66GlycMOS0SKqv0SMAnWkxsYbG4hkH32Xg1qD0=
knative.dev/hack v0.0.0-20250109131303-f8be0ccdff36 h1:iZ6CwYLo+y82MXlK7PoG/cnFEB0tRdw8elBXj6c6ezE=
knative.dev/hack v0.0.0-20250109131303-f8be0ccdff36/go.mod h1:R0ritgYtjLDO9527h5vb5X6gfvt5LCrJ55BNbVDsWiY=
knative.dev/pkg v0.0.0-20250113161000-0c2a238a16ed h1:PUMNZc1CBUg5G4JYio3wjpCJuCJG2ZeCuLvlBTpDiZI=
knative.dev/pkg v0.0.0-20250113161000-0c2a238a16ed/go.mod h1:p7c7yCCf1YVX04FQ8YDyJaHciknu726qVWOXRX5tIBM=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=

12
vendor/github.com/fxamacker/cbor/v2/.gitignore generated vendored Normal file
View File

@ -0,0 +1,12 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out

104
vendor/github.com/fxamacker/cbor/v2/.golangci.yml generated vendored Normal file
View File

@ -0,0 +1,104 @@
# Do not delete linter settings. Linters like gocritic can be enabled on the command line.
linters-settings:
depguard:
rules:
prevent_unmaintained_packages:
list-mode: strict
files:
- $all
- "!$test"
allow:
- $gostd
- github.com/x448/float16
deny:
- pkg: io/ioutil
desc: "replaced by io and os packages since Go 1.16: https://tip.golang.org/doc/go1.16#ioutil"
dupl:
threshold: 100
funlen:
lines: 100
statements: 50
goconst:
ignore-tests: true
min-len: 2
min-occurrences: 3
gocritic:
enabled-tags:
- diagnostic
- experimental
- opinionated
- performance
- style
disabled-checks:
- commentedOutCode
- dupImport # https://github.com/go-critic/go-critic/issues/845
- ifElseChain
- octalLiteral
- paramTypeCombine
- whyNoLint
gofmt:
simplify: false
goimports:
local-prefixes: github.com/fxamacker/cbor
golint:
min-confidence: 0
govet:
check-shadowing: true
lll:
line-length: 140
maligned:
suggest-new: true
misspell:
locale: US
staticcheck:
checks: ["all"]
linters:
disable-all: true
enable:
- asciicheck
- bidichk
- depguard
- errcheck
- exportloopref
- goconst
- gocritic
- gocyclo
- gofmt
- goimports
- goprintffuncname
- gosec
- gosimple
- govet
- ineffassign
- misspell
- nilerr
- revive
- staticcheck
- stylecheck
- typecheck
- unconvert
- unused
issues:
# max-issues-per-linter default is 50. Set to 0 to disable limit.
max-issues-per-linter: 0
# max-same-issues default is 3. Set to 0 to disable limit.
max-same-issues: 0
exclude-rules:
- path: decode.go
text: "string ` overflows ` has (\\d+) occurrences, make it a constant"
- path: decode.go
text: "string ` \\(range is \\[` has (\\d+) occurrences, make it a constant"
- path: decode.go
text: "string `, ` has (\\d+) occurrences, make it a constant"
- path: decode.go
text: "string ` overflows Go's int64` has (\\d+) occurrences, make it a constant"
- path: decode.go
text: "string `\\]\\)` has (\\d+) occurrences, make it a constant"
- path: valid.go
text: "string ` for type ` has (\\d+) occurrences, make it a constant"
- path: valid.go
text: "string `cbor: ` has (\\d+) occurrences, make it a constant"

133
vendor/github.com/fxamacker/cbor/v2/CODE_OF_CONDUCT.md generated vendored Normal file
View File

@ -0,0 +1,133 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or advances of
any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address,
without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
faye.github@gmail.com.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of
actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the
community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

41
vendor/github.com/fxamacker/cbor/v2/CONTRIBUTING.md generated vendored Normal file
View File

@ -0,0 +1,41 @@
# How to contribute
You can contribute by using the library, opening issues, or opening pull requests.
## Bug reports and security vulnerabilities
Most issues are tracked publicly on [GitHub](https://github.com/fxamacker/cbor/issues).
To report security vulnerabilities, please email faye.github@gmail.com and allow time for the problem to be resolved before disclosing it to the public. For more info, see [Security Policy](https://github.com/fxamacker/cbor#security-policy).
Please do not send data that might contain personally identifiable information, even if you think you have permission. That type of support requires payment and a signed contract where I'm indemnified, held harmless, and defended by you for any data you send to me.
## Pull requests
Please [create an issue](https://github.com/fxamacker/cbor/issues/new/choose) before you begin work on a PR. The improvement may have already been considered, etc.
Pull requests have signing requirements and must not be anonymous. Exceptions are usually made for docs and CI scripts.
See the [Pull Request Template](https://github.com/fxamacker/cbor/blob/master/.github/pull_request_template.md) for details.
Pull requests have a greater chance of being approved if:
- it does not reduce speed, increase memory use, reduce security, etc. for people not using the new option or feature.
- it has > 97% code coverage.
## Describe your issue
Clearly describe the issue:
* If it's a bug, please provide: **version of this library** and **Go** (`go version`), **unmodified error message**, and describe **how to reproduce it**. Also state **what you expected to happen** instead of the error.
* If you propose a change or addition, try to give an example how the improved code could look like or how to use it.
* If you found a compilation error, please confirm you're using a supported version of Go. If you are, then provide the output of `go version` first, followed by the complete error message.
## Please don't
Please don't send data containing personally identifiable information, even if you think you have permission. That type of support requires payment and a contract where I'm indemnified, held harmless, and defended for any data you send to me.
Please don't send CBOR data larger than 1024 bytes by email. If you want to send crash-producing CBOR data > 1024 bytes by email, please get my permission before sending it to me.
## Credits
- This guide used nlohmann/json contribution guidelines for inspiration as suggested in issue #22.
- Special thanks to @lukseven for pointing out the contribution guidelines didn't mention signing requirements.

21
vendor/github.com/fxamacker/cbor/v2/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2019-present Faye Amacker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

691
vendor/github.com/fxamacker/cbor/v2/README.md generated vendored Normal file
View File

@ -0,0 +1,691 @@
# CBOR Codec in Go
<!-- [![](https://github.com/fxamacker/images/raw/master/cbor/v2.5.0/fxamacker_cbor_banner.png)](#cbor-library-in-go) -->
[fxamacker/cbor](https://github.com/fxamacker/cbor) is a library for encoding and decoding [CBOR](https://www.rfc-editor.org/info/std94) and [CBOR Sequences](https://www.rfc-editor.org/rfc/rfc8742.html).
CBOR is a [trusted alternative](https://www.rfc-editor.org/rfc/rfc8949.html#name-comparison-of-other-binary-) to JSON, MessagePack, Protocol Buffers, etc.&nbsp; CBOR is an Internet&nbsp;Standard defined by [IETF&nbsp;STD&nbsp;94 (RFC&nbsp;8949)](https://www.rfc-editor.org/info/std94) and is designed to be relevant for decades.
`fxamacker/cbor` is used in projects by Arm Ltd., Cisco, EdgeX&nbsp;Foundry, Flow Foundation, Fraunhofer&#8209;AISEC, Kubernetes, Let's&nbsp;Encrypt (ISRG), Linux&nbsp;Foundation, Microsoft, Mozilla, Oasis&nbsp;Protocol, Tailscale, Teleport, [etc](https://github.com/fxamacker/cbor#who-uses-fxamackercbor).
See [Quick&nbsp;Start](#quick-start) and [Releases](https://github.com/fxamacker/cbor/releases/). 🆕 `UnmarshalFirst` and `DiagnoseFirst` can decode CBOR Sequences. `cbor.MarshalToBuffer()` and `UserBufferEncMode` accepts user-specified buffer.
## fxamacker/cbor
[![](https://github.com/fxamacker/cbor/workflows/ci/badge.svg)](https://github.com/fxamacker/cbor/actions?query=workflow%3Aci)
[![](https://github.com/fxamacker/cbor/workflows/cover%20%E2%89%A596%25/badge.svg)](https://github.com/fxamacker/cbor/actions?query=workflow%3A%22cover+%E2%89%A596%25%22)
[![CodeQL](https://github.com/fxamacker/cbor/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/fxamacker/cbor/actions/workflows/codeql-analysis.yml)
[![](https://img.shields.io/badge/fuzzing-passing-44c010)](#fuzzing-and-code-coverage)
[![Go Report Card](https://goreportcard.com/badge/github.com/fxamacker/cbor)](https://goreportcard.com/report/github.com/fxamacker/cbor)
`fxamacker/cbor` is a CBOR codec in full conformance with [IETF STD&nbsp;94 (RFC&nbsp;8949)](https://www.rfc-editor.org/info/std94). It also supports CBOR Sequences ([RFC&nbsp;8742](https://www.rfc-editor.org/rfc/rfc8742.html)) and Extended Diagnostic Notation ([Appendix G of RFC&nbsp;8610](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G)).
Features include full support for CBOR tags, [Core Deterministic Encoding](https://www.rfc-editor.org/rfc/rfc8949.html#name-core-deterministic-encoding), duplicate map key detection, etc.
Design balances trade-offs between security, speed, concurrency, encoded data size, usability, etc.
<details><summary>Highlights</summary><p/>
__🚀&nbsp; Speed__
Encoding and decoding is fast without using Go's `unsafe` package. Slower settings are opt-in. Default limits allow very fast and memory efficient rejection of malformed CBOR data.
__🔒&nbsp; Security__
Decoder has configurable limits that defend against malicious inputs. Duplicate map key detection is supported. By contrast, `encoding/gob` is [not designed to be hardened against adversarial inputs](https://pkg.go.dev/encoding/gob#hdr-Security).
Codec passed multiple confidential security assessments in 2022. No vulnerabilities found in subset of codec in a [nonconfidential security assessment](https://github.com/veraison/go-cose/blob/v1.0.0-rc.1/reports/NCC_Microsoft-go-cose-Report_2022-05-26_v1.0.pdf) prepared by NCC&nbsp;Group for Microsoft&nbsp;Corporation.
__🗜&nbsp; Data Size__
Struct tags (`toarray`, `keyasint`, `omitempty`) automatically reduce size of encoded structs. Encoding optionally shrinks float64→32→16 when values fit.
__:jigsaw:&nbsp; Usability__
API is mostly same as `encoding/json` plus interfaces that simplify concurrency for CBOR options. Encoding and decoding modes can be created at startup and reused by any goroutines.
Presets include Core Deterministic Encoding, Preferred Serialization, CTAP2 Canonical CBOR, etc.
__📆&nbsp; Extensibility__
Features include CBOR [extension points](https://www.rfc-editor.org/rfc/rfc8949.html#section-7.1) (e.g. CBOR tags) and extensive settings. API has interfaces that allow users to create custom encoding and decoding without modifying this library.
<hr/>
</details>
### Secure Decoding with Configurable Settings
`fxamacker/cbor` has configurable limits, etc. that defend against malicious CBOR data.
By contrast, `encoding/gob` is [not designed to be hardened against adversarial inputs](https://pkg.go.dev/encoding/gob#hdr-Security).
<details><summary>Example decoding with encoding/gob 💥 fatal error (out of memory)</summary><p/>
```Go
// Example of encoding/gob having "fatal error: runtime: out of memory"
// while decoding 181 bytes.
package main
import (
"bytes"
"encoding/gob"
"encoding/hex"
"fmt"
)
// Example data is from https://github.com/golang/go/issues/24446
// (shortened to 181 bytes).
const data = "4dffb503010102303001ff30000109010130010800010130010800010130" +
"01ffb80001014a01ffb60001014b01ff860001013001ff860001013001ff" +
"860001013001ff860001013001ffb80000001eff850401010e3030303030" +
"30303030303030303001ff3000010c0104000016ffb70201010830303030" +
"3030303001ff3000010c000030ffb6040405fcff00303030303030303030" +
"303030303030303030303030303030303030303030303030303030303030" +
"30"
type X struct {
J *X
K map[string]int
}
func main() {
raw, _ := hex.DecodeString(data)
decoder := gob.NewDecoder(bytes.NewReader(raw))
var x X
decoder.Decode(&x) // fatal error: runtime: out of memory
fmt.Println("Decoding finished.")
}
```
<hr/>
</details>
`fxamacker/cbor` is fast at rejecting malformed CBOR data. E.g. attempts to
decode 10 bytes of malicious CBOR data to `[]byte` (with default settings):
| Codec | Speed (ns/op) | Memory | Allocs |
| :---- | ------------: | -----: | -----: |
| fxamacker/cbor 2.5.0 | 44 ± 5% | 32 B/op | 2 allocs/op |
| ugorji/go 1.2.11 | 5353261 ± 4% | 67111321 B/op | 13 allocs/op |
<details><summary>Benchmark details</summary><p/>
Latest comparison used:
- Input: `[]byte{0x9B, 0x00, 0x00, 0x42, 0xFA, 0x42, 0xFA, 0x42, 0xFA, 0x42}`
- go1.19.10, linux/amd64, i5-13600K (disabled all e-cores, DDR4 @2933)
- go test -bench=. -benchmem -count=20
#### Prior comparisons
| Codec | Speed (ns/op) | Memory | Allocs |
| :---- | ------------: | -----: | -----: |
| fxamacker/cbor 2.5.0-beta2 | 44.33 ± 2% | 32 B/op | 2 allocs/op |
| fxamacker/cbor 0.1.0 - 2.4.0 | ~44.68 ± 6% | 32 B/op | 2 allocs/op |
| ugorji/go 1.2.10 | 5524792.50 ± 3% | 67110491 B/op | 12 allocs/op |
| ugorji/go 1.1.0 - 1.2.6 | 💥 runtime: | out of memory: | cannot allocate |
- Input: `[]byte{0x9B, 0x00, 0x00, 0x42, 0xFA, 0x42, 0xFA, 0x42, 0xFA, 0x42}`
- go1.19.6, linux/amd64, i5-13600K (DDR4)
- go test -bench=. -benchmem -count=20
<hr/>
</details>
### Smaller Encodings with Struct Tags
Struct tags (`toarray`, `keyasint`, `omitempty`) reduce encoded size of structs.
<details><summary>Example encoding 3-level nested Go struct to 1 byte CBOR</summary><p/>
https://go.dev/play/p/YxwvfPdFQG2
```Go
// Example encoding nested struct (with omitempty tag)
// - encoding/json: 18 byte JSON
// - fxamacker/cbor: 1 byte CBOR
package main
import (
"encoding/hex"
"encoding/json"
"fmt"
"github.com/fxamacker/cbor/v2"
)
type GrandChild struct {
Quux int `json:",omitempty"`
}
type Child struct {
Baz int `json:",omitempty"`
Qux GrandChild `json:",omitempty"`
}
type Parent struct {
Foo Child `json:",omitempty"`
Bar int `json:",omitempty"`
}
func cb() {
results, _ := cbor.Marshal(Parent{})
fmt.Println("hex(CBOR): " + hex.EncodeToString(results))
text, _ := cbor.Diagnose(results) // Diagnostic Notation
fmt.Println("DN: " + text)
}
func js() {
results, _ := json.Marshal(Parent{})
fmt.Println("hex(JSON): " + hex.EncodeToString(results))
text := string(results) // JSON
fmt.Println("JSON: " + text)
}
func main() {
cb()
fmt.Println("-------------")
js()
}
```
Output (DN is Diagnostic Notation):
```
hex(CBOR): a0
DN: {}
-------------
hex(JSON): 7b22466f6f223a7b22517578223a7b7d7d7d
JSON: {"Foo":{"Qux":{}}}
```
<hr/>
</details>
Example using different struct tags together:
![alt text](https://github.com/fxamacker/images/raw/master/cbor/v2.3.0/cbor_struct_tags_api.svg?sanitize=1 "CBOR API and Go Struct Tags")
API is mostly same as `encoding/json`, plus interfaces that simplify concurrency for CBOR options.
## Quick Start
__Install__: `go get github.com/fxamacker/cbor/v2` and `import "github.com/fxamacker/cbor/v2"`.
### Key Points
This library can encode and decode CBOR (RFC 8949) and CBOR Sequences (RFC 8742).
- __CBOR data item__ is a single piece of CBOR data and its structure may contain 0 or more nested data items.
- __CBOR sequence__ is a concatenation of 0 or more encoded CBOR data items.
Configurable limits and options can be used to balance trade-offs.
- Encoding and decoding modes are created from options (settings).
- Modes can be created at startup and reused.
- Modes are safe for concurrent use.
### Default Mode
Package level functions only use this library's default settings.
They provide the "default mode" of encoding and decoding.
```go
// API matches encoding/json for Marshal, Unmarshal, Encode, Decode, etc.
b, err = cbor.Marshal(v) // encode v to []byte b
err = cbor.Unmarshal(b, &v) // decode []byte b to v
decoder = cbor.NewDecoder(r) // create decoder with io.Reader r
err = decoder.Decode(&v) // decode a CBOR data item to v
// v2.7.0 added MarshalToBuffer() and UserBufferEncMode interface.
err = cbor.MarshalToBuffer(v, b) // encode v to b instead of using built-in buf pool.
// v2.5.0 added new functions that return remaining bytes.
// UnmarshalFirst decodes first CBOR data item and returns remaining bytes.
rest, err = cbor.UnmarshalFirst(b, &v) // decode []byte b to v
// DiagnoseFirst translates first CBOR data item to text and returns remaining bytes.
text, rest, err = cbor.DiagnoseFirst(b) // decode []byte b to Diagnostic Notation text
// NOTE: Unmarshal returns ExtraneousDataError if there are remaining bytes,
// but new funcs UnmarshalFirst and DiagnoseFirst do not.
```
__IMPORTANT__: 👉 CBOR settings allow trade-offs between speed, security, encoding size, etc.
- Different CBOR libraries may use different default settings.
- CBOR-based formats or protocols usually require specific settings.
For example, WebAuthn uses "CTAP2 Canonical CBOR" which is available as a preset.
### Presets
Presets can be used as-is or as a starting point for custom settings.
```go
// EncOptions is a struct of encoder settings.
func CoreDetEncOptions() EncOptions // RFC 8949 Core Deterministic Encoding
func PreferredUnsortedEncOptions() EncOptions // RFC 8949 Preferred Serialization
func CanonicalEncOptions() EncOptions // RFC 7049 Canonical CBOR
func CTAP2EncOptions() EncOptions // FIDO2 CTAP2 Canonical CBOR
```
Presets are used to create custom modes.
### Custom Modes
Modes are created from settings. Once created, modes have immutable settings.
💡 Create the mode at startup and reuse it. It is safe for concurrent use.
```Go
// Create encoding mode.
opts := cbor.CoreDetEncOptions() // use preset options as a starting point
opts.Time = cbor.TimeUnix // change any settings if needed
em, err := opts.EncMode() // create an immutable encoding mode
// Reuse the encoding mode. It is safe for concurrent use.
// API matches encoding/json.
b, err := em.Marshal(v) // encode v to []byte b
encoder := em.NewEncoder(w) // create encoder with io.Writer w
err := encoder.Encode(v) // encode v to io.Writer w
```
Default mode and custom modes automatically apply struct tags.
### User Specified Buffer for Encoding (v2.7.0)
`UserBufferEncMode` interface extends `EncMode` interface to add `MarshalToBuffer()`. It accepts a user-specified buffer instead of using built-in buffer pool.
```Go
em, err := myEncOptions.UserBufferEncMode() // create UserBufferEncMode mode
var buf bytes.Buffer
err = em.MarshalToBuffer(v, &buf) // encode v to provided buf
```
### Struct Tags
Struct tags (`toarray`, `keyasint`, `omitempty`) reduce encoded size of structs.
<details><summary>Example encoding 3-level nested Go struct to 1 byte CBOR</summary><p/>
https://go.dev/play/p/YxwvfPdFQG2
```Go
// Example encoding nested struct (with omitempty tag)
// - encoding/json: 18 byte JSON
// - fxamacker/cbor: 1 byte CBOR
package main
import (
"encoding/hex"
"encoding/json"
"fmt"
"github.com/fxamacker/cbor/v2"
)
type GrandChild struct {
Quux int `json:",omitempty"`
}
type Child struct {
Baz int `json:",omitempty"`
Qux GrandChild `json:",omitempty"`
}
type Parent struct {
Foo Child `json:",omitempty"`
Bar int `json:",omitempty"`
}
func cb() {
results, _ := cbor.Marshal(Parent{})
fmt.Println("hex(CBOR): " + hex.EncodeToString(results))
text, _ := cbor.Diagnose(results) // Diagnostic Notation
fmt.Println("DN: " + text)
}
func js() {
results, _ := json.Marshal(Parent{})
fmt.Println("hex(JSON): " + hex.EncodeToString(results))
text := string(results) // JSON
fmt.Println("JSON: " + text)
}
func main() {
cb()
fmt.Println("-------------")
js()
}
```
Output (DN is Diagnostic Notation):
```
hex(CBOR): a0
DN: {}
-------------
hex(JSON): 7b22466f6f223a7b22517578223a7b7d7d7d
JSON: {"Foo":{"Qux":{}}}
```
<hr/>
</details>
<details><summary>Example using several struct tags</summary><p/>
![alt text](https://github.com/fxamacker/images/raw/master/cbor/v2.3.0/cbor_struct_tags_api.svg?sanitize=1 "CBOR API and Go Struct Tags")
</details>
Struct tags simplify use of CBOR-based protocols that require CBOR arrays or maps with integer keys.
### CBOR Tags
CBOR tags are specified in a `TagSet`.
Custom modes can be created with a `TagSet` to handle CBOR tags.
```go
em, err := opts.EncMode() // no CBOR tags
em, err := opts.EncModeWithTags(ts) // immutable CBOR tags
em, err := opts.EncModeWithSharedTags(ts) // mutable shared CBOR tags
```
`TagSet` and modes using it are safe for concurrent use. Equivalent API is available for `DecMode`.
<details><summary>Example using TagSet and TagOptions</summary><p/>
```go
// Use signedCWT struct defined in "Decoding CWT" example.
// Create TagSet (safe for concurrency).
tags := cbor.NewTagSet()
// Register tag COSE_Sign1 18 with signedCWT type.
tags.Add(
cbor.TagOptions{EncTag: cbor.EncTagRequired, DecTag: cbor.DecTagRequired},
reflect.TypeOf(signedCWT{}),
18)
// Create DecMode with immutable tags.
dm, _ := cbor.DecOptions{}.DecModeWithTags(tags)
// Unmarshal to signedCWT with tag support.
var v signedCWT
if err := dm.Unmarshal(data, &v); err != nil {
return err
}
// Create EncMode with immutable tags.
em, _ := cbor.EncOptions{}.EncModeWithTags(tags)
// Marshal signedCWT with tag number.
if data, err := cbor.Marshal(v); err != nil {
return err
}
```
</details>
### Functions and Interfaces
<details><summary>Functions and interfaces at a glance</summary><p/>
Common functions with same API as `encoding/json`:
- `Marshal`, `Unmarshal`
- `NewEncoder`, `(*Encoder).Encode`
- `NewDecoder`, `(*Decoder).Decode`
NOTE: `Unmarshal` will return `ExtraneousDataError` if there are remaining bytes
because RFC 8949 treats CBOR data item with remaining bytes as malformed.
- 💡 Use `UnmarshalFirst` to decode first CBOR data item and return any remaining bytes.
Other useful functions:
- `Diagnose`, `DiagnoseFirst` produce human-readable [Extended Diagnostic Notation](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G) from CBOR data.
- `UnmarshalFirst` decodes first CBOR data item and return any remaining bytes.
- `Wellformed` returns true if the the CBOR data item is well-formed.
Interfaces identical or comparable to Go `encoding` packages include:
`Marshaler`, `Unmarshaler`, `BinaryMarshaler`, and `BinaryUnmarshaler`.
The `RawMessage` type can be used to delay CBOR decoding or precompute CBOR encoding.
</details>
### Security Tips
🔒 Use Go's `io.LimitReader` to limit size when decoding very large or indefinite size data.
Default limits may need to be increased for systems handling very large data (e.g. blockchains).
`DecOptions` can be used to modify default limits for `MaxArrayElements`, `MaxMapPairs`, and `MaxNestedLevels`.
## Status
v2.7.0 (June 23, 2024) adds features and improvements that help large projects (e.g. Kubernetes) use CBOR as an alternative to JSON and Protocol Buffers. Other improvements include speedups, improved memory use, bug fixes, new serialization options, etc. It passed fuzz tests (5+ billion executions) and is production quality.
For more details, see [release notes](https://github.com/fxamacker/cbor/releases).
### Prior Release
[v2.6.0](https://github.com/fxamacker/cbor/releases/tag/v2.6.0) (February 2024) adds important new features, optimizations, and bug fixes. It is especially useful to systems that need to convert data between CBOR and JSON. New options and optimizations improve handling of bignum, integers, maps, and strings.
v2.5.0 was released on Sunday, August 13, 2023 with new features and important bug fixes. It is fuzz tested and production quality after extended beta [v2.5.0-beta](https://github.com/fxamacker/cbor/releases/tag/v2.5.0-beta) (Dec 2022) -> [v2.5.0](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) (Aug 2023).
__IMPORTANT__: 👉 Before upgrading from v2.4 or older release, please read the notable changes highlighted in the release notes. v2.5.0 is a large release with bug fixes to error handling for extraneous data in `Unmarshal`, etc. that should be reviewed before upgrading.
See [v2.5.0 release notes](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) for list of new features, improvements, and bug fixes.
See ["Version and API Changes"](https://github.com/fxamacker/cbor#versions-and-api-changes) section for more info about version numbering, etc.
<!--
<details><summary>👉 Benchmark Comparison: v2.4.0 vs v2.5.0</summary><p/>
TODO: Update to v2.4.0 vs 2.5.0 (not beta2).
Comparison of v2.4.0 vs v2.5.0-beta2 provided by @448 (edited to fit width).
PR [#382](https://github.com/fxamacker/cbor/pull/382) returns buffer to pool in `Encode()`. It adds a bit of overhead to `Encode()` but `NewEncoder().Encode()` is a lot faster and uses less memory as shown here:
```
$ benchstat bench-v2.4.0.log bench-f9e6291.log
goos: linux
goarch: amd64
pkg: github.com/fxamacker/cbor/v2
cpu: 12th Gen Intel(R) Core(TM) i7-12700H
│ bench-v2.4.0.log │ bench-f9e6291.log │
│ sec/op │ sec/op vs base │
NewEncoderEncode/Go_bool_to_CBOR_bool-20 236.70n ± 2% 58.04n ± 1% -75.48% (p=0.000 n=10)
NewEncoderEncode/Go_uint64_to_CBOR_positive_int-20 238.00n ± 2% 63.93n ± 1% -73.14% (p=0.000 n=10)
NewEncoderEncode/Go_int64_to_CBOR_negative_int-20 238.65n ± 2% 64.88n ± 1% -72.81% (p=0.000 n=10)
NewEncoderEncode/Go_float64_to_CBOR_float-20 242.00n ± 2% 63.00n ± 1% -73.97% (p=0.000 n=10)
NewEncoderEncode/Go_[]uint8_to_CBOR_bytes-20 245.60n ± 1% 68.55n ± 1% -72.09% (p=0.000 n=10)
NewEncoderEncode/Go_string_to_CBOR_text-20 243.20n ± 3% 68.39n ± 1% -71.88% (p=0.000 n=10)
NewEncoderEncode/Go_[]int_to_CBOR_array-20 563.0n ± 2% 378.3n ± 0% -32.81% (p=0.000 n=10)
NewEncoderEncode/Go_map[string]string_to_CBOR_map-20 2.043µ ± 2% 1.906µ ± 2% -6.75% (p=0.000 n=10)
geomean 349.7n 122.7n -64.92%
│ bench-v2.4.0.log │ bench-f9e6291.log │
│ B/op │ B/op vs base │
NewEncoderEncode/Go_bool_to_CBOR_bool-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_uint64_to_CBOR_positive_int-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_int64_to_CBOR_negative_int-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_float64_to_CBOR_float-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_[]uint8_to_CBOR_bytes-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_string_to_CBOR_text-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_[]int_to_CBOR_array-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_map[string]string_to_CBOR_map-20 544.0 ± 0% 416.0 ± 0% -23.53% (p=0.000 n=10)
geomean 153.4 ? ¹ ²
¹ summaries must be >0 to compute geomean
² ratios must be >0 to compute geomean
│ bench-v2.4.0.log │ bench-f9e6291.log │
│ allocs/op │ allocs/op vs base │
NewEncoderEncode/Go_bool_to_CBOR_bool-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_uint64_to_CBOR_positive_int-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_int64_to_CBOR_negative_int-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_float64_to_CBOR_float-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_[]uint8_to_CBOR_bytes-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_string_to_CBOR_text-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_[]int_to_CBOR_array-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
NewEncoderEncode/Go_map[string]string_to_CBOR_map-20 28.00 ± 0% 26.00 ± 0% -7.14% (p=0.000 n=10)
geomean 2.782 ? ¹ ²
¹ summaries must be >0 to compute geomean
² ratios must be >0 to compute geomean
```
</details>
-->
## Who uses fxamacker/cbor
`fxamacker/cbor` is used in projects by Arm Ltd., Berlin Institute of Health at Charité, Chainlink, Cisco, Confidential Computing Consortium, ConsenSys, Dapper&nbsp;Labs, EdgeX&nbsp;Foundry, F5, FIDO Alliance, Fraunhofer&#8209;AISEC, Kubernetes, Let's Encrypt (ISRG), Linux&nbsp;Foundation, Matrix.org, Microsoft, Mozilla, National&nbsp;Cybersecurity&nbsp;Agency&nbsp;of&nbsp;France (govt), Netherlands (govt), Oasis Protocol, Smallstep, Tailscale, Taurus SA, Teleport, TIBCO, and others.
`fxamacker/cbor` passed multiple confidential security assessments. A [nonconfidential security assessment](https://github.com/veraison/go-cose/blob/v1.0.0-rc.1/reports/NCC_Microsoft-go-cose-Report_2022-05-26_v1.0.pdf) (prepared by NCC Group for Microsoft Corporation) includes a subset of fxamacker/cbor v2.4.0 in its scope.
## Standards
`fxamacker/cbor` is a CBOR codec in full conformance with [IETF STD&nbsp;94 (RFC&nbsp;8949)](https://www.rfc-editor.org/info/std94). It also supports CBOR Sequences ([RFC&nbsp;8742](https://www.rfc-editor.org/rfc/rfc8742.html)) and Extended Diagnostic Notation ([Appendix G of RFC&nbsp;8610](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G)).
Notable CBOR features include:
| CBOR Feature | Description |
| :--- | :--- |
| CBOR tags | API supports built-in and user-defined tags. |
| Preferred serialization | Integers encode to fewest bytes. Optional float64 → float32 → float16. |
| Map key sorting | Unsorted, length-first (Canonical CBOR), and bytewise-lexicographic (CTAP2). |
| Duplicate map keys | Always forbid for encoding and option to allow/forbid for decoding. |
| Indefinite length data | Option to allow/forbid for encoding and decoding. |
| Well-formedness | Always checked and enforced. |
| Basic validity checks | Optionally check UTF-8 validity and duplicate map keys. |
| Security considerations | Prevent integer overflow and resource exhaustion (RFC 8949 Section 10). |
Known limitations are noted in the [Limitations section](#limitations).
Go nil values for slices, maps, pointers, etc. are encoded as CBOR null. Empty slices, maps, etc. are encoded as empty CBOR arrays and maps.
Decoder checks for all required well-formedness errors, including all "subkinds" of syntax errors and too little data.
After well-formedness is verified, basic validity errors are handled as follows:
* Invalid UTF-8 string: Decoder has option to check and return invalid UTF-8 string error. This check is enabled by default.
* Duplicate keys in a map: Decoder has options to ignore or enforce rejection of duplicate map keys.
When decoding well-formed CBOR arrays and maps, decoder saves the first error it encounters and continues with the next item. Options to handle this differently may be added in the future.
By default, decoder treats time values of floating-point NaN and Infinity as if they are CBOR Null or CBOR Undefined.
__Click to expand topic:__
<details>
<summary>Duplicate Map Keys</summary><p>
This library provides options for fast detection and rejection of duplicate map keys based on applying a Go-specific data model to CBOR's extended generic data model in order to determine duplicate vs distinct map keys. Detection relies on whether the CBOR map key would be a duplicate "key" when decoded and applied to the user-provided Go map or struct.
`DupMapKeyQuiet` turns off detection of duplicate map keys. It tries to use a "keep fastest" method by choosing either "keep first" or "keep last" depending on the Go data type.
`DupMapKeyEnforcedAPF` enforces detection and rejection of duplidate map keys. Decoding stops immediately and returns `DupMapKeyError` when the first duplicate key is detected. The error includes the duplicate map key and the index number.
APF suffix means "Allow Partial Fill" so the destination map or struct can contain some decoded values at the time of error. It is the caller's responsibility to respond to the `DupMapKeyError` by discarding the partially filled result if that's required by their protocol.
</details>
<details>
<summary>Tag Validity</summary><p>
This library checks tag validity for built-in tags (currently tag numbers 0, 1, 2, 3, and 55799):
* Inadmissible type for tag content
* Inadmissible value for tag content
Unknown tag data items (not tag number 0, 1, 2, 3, or 55799) are handled in two ways:
* When decoding into an empty interface, unknown tag data item will be decoded into `cbor.Tag` data type, which contains tag number and tag content. The tag content will be decoded into the default Go data type for the CBOR data type.
* When decoding into other Go types, unknown tag data item is decoded into the specified Go type. If Go type is registered with a tag number, the tag number can optionally be verified.
Decoder also has an option to forbid tag data items (treat any tag data item as error) which is specified by protocols such as CTAP2 Canonical CBOR.
For more information, see [decoding options](#decoding-options-1) and [tag options](#tag-options).
</details>
## Limitations
If any of these limitations prevent you from using this library, please open an issue along with a link to your project.
* CBOR `Undefined` (0xf7) value decodes to Go's `nil` value. CBOR `Null` (0xf6) more closely matches Go's `nil`.
* CBOR map keys with data types not supported by Go for map keys are ignored and an error is returned after continuing to decode remaining items.
* When decoding registered CBOR tag data to interface type, decoder creates a pointer to registered Go type matching CBOR tag number. Requiring a pointer for this is a Go limitation.
## Fuzzing and Code Coverage
__Code coverage__ is always 95% or higher (with `go test -cover`) when tagging a release.
__Coverage-guided fuzzing__ must pass billions of execs using before tagging a release. Fuzzing is done using nonpublic code which may eventually get merged into this project. Until then, reports like OpenSSF&nbsp;Scorecard can't detect fuzz tests being used by this project.
<hr>
## Versions and API Changes
This project uses [Semantic Versioning](https://semver.org), so the API is always backwards compatible unless the major version number changes.
These functions have signatures identical to encoding/json and their API will continue to match `encoding/json` even after major new releases:
`Marshal`, `Unmarshal`, `NewEncoder`, `NewDecoder`, `(*Encoder).Encode`, and `(*Decoder).Decode`.
Exclusions from SemVer:
- Newly added API documented as "subject to change".
- Newly added API in the master branch that has never been tagged in non-beta release.
- If function parameters are unchanged, bug fixes that change behavior (e.g. return error for edge case was missed in prior version). We try to highlight these in the release notes and add extended beta period. E.g. [v2.5.0-beta](https://github.com/fxamacker/cbor/releases/tag/v2.5.0-beta) (Dec 2022) -> [v2.5.0](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) (Aug 2023).
This project avoids breaking changes to behavior of encoding and decoding functions unless required to improve conformance with supported RFCs (e.g. RFC 8949, RFC 8742, etc.) Visible changes that don't improve conformance to standards are typically made available as new opt-in settings or new functions.
## Code of Conduct
This project has adopted the [Contributor Covenant Code of Conduct](CODE_OF_CONDUCT.md). Contact [faye.github@gmail.com](mailto:faye.github@gmail.com) with any questions or comments.
## Contributing
Please open an issue before beginning work on a PR. The improvement may have already been considered, etc.
For more info, see [How to Contribute](CONTRIBUTING.md).
## Security Policy
Security fixes are provided for the latest released version of fxamacker/cbor.
For the full text of the Security Policy, see [SECURITY.md](SECURITY.md).
## Acknowledgements
Many thanks to all the contributors on this project!
I'm especially grateful to Bastian Müller and Dieter Shirley for suggesting and collaborating on CBOR stream mode, and much more.
I'm very grateful to Stefan Tatschner, Yawning Angel, Jernej Kos, x448, ZenGround0, and Jakob Borg for their contributions or support in the very early days.
Big thanks to Ben Luddy for his contributions in v2.6.0 and v2.7.0.
This library clearly wouldn't be possible without Carsten Bormann authoring CBOR RFCs.
Special thanks to Laurence Lundblade and Jeffrey Yasskin for their help on IETF mailing list or at [7049bis](https://github.com/cbor-wg/CBORbis).
Huge thanks to The Go Authors for creating a fun and practical programming language with batteries included!
This library uses `x448/float16` which used to be included. As a standalone package, `x448/float16` is useful to other projects as well.
## License
Copyright © 2019-2024 [Faye Amacker](https://github.com/fxamacker).
fxamacker/cbor is licensed under the MIT License. See [LICENSE](LICENSE) for the full license text.
<hr>

7
vendor/github.com/fxamacker/cbor/v2/SECURITY.md generated vendored Normal file
View File

@ -0,0 +1,7 @@
# Security Policy
Security fixes are provided for the latest released version of fxamacker/cbor.
If the security vulnerability is already known to the public, then you can open an issue as a bug report.
To report security vulnerabilities not yet known to the public, please email faye.github@gmail.com and allow time for the problem to be resolved before reporting it to the public.

63
vendor/github.com/fxamacker/cbor/v2/bytestring.go generated vendored Normal file
View File

@ -0,0 +1,63 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"errors"
)
// ByteString represents CBOR byte string (major type 2). ByteString can be used
// when using a Go []byte is not possible or convenient. For example, Go doesn't
// allow []byte as map key, so ByteString can be used to support data formats
// having CBOR map with byte string keys. ByteString can also be used to
// encode invalid UTF-8 string as CBOR byte string.
// See DecOption.MapKeyByteStringMode for more details.
type ByteString string
// Bytes returns bytes representing ByteString.
func (bs ByteString) Bytes() []byte {
return []byte(bs)
}
// MarshalCBOR encodes ByteString as CBOR byte string (major type 2).
func (bs ByteString) MarshalCBOR() ([]byte, error) {
e := getEncodeBuffer()
defer putEncodeBuffer(e)
// Encode length
encodeHead(e, byte(cborTypeByteString), uint64(len(bs)))
// Encode data
buf := make([]byte, e.Len()+len(bs))
n := copy(buf, e.Bytes())
copy(buf[n:], bs)
return buf, nil
}
// UnmarshalCBOR decodes CBOR byte string (major type 2) to ByteString.
// Decoding CBOR null and CBOR undefined sets ByteString to be empty.
func (bs *ByteString) UnmarshalCBOR(data []byte) error {
if bs == nil {
return errors.New("cbor.ByteString: UnmarshalCBOR on nil pointer")
}
// Decoding CBOR null and CBOR undefined to ByteString resets data.
// This behavior is similar to decoding CBOR null and CBOR undefined to []byte.
if len(data) == 1 && (data[0] == 0xf6 || data[0] == 0xf7) {
*bs = ""
return nil
}
d := decoder{data: data, dm: defaultDecMode}
// Check if CBOR data type is byte string
if typ := d.nextCBORType(); typ != cborTypeByteString {
return &UnmarshalTypeError{CBORType: typ.String(), GoType: typeByteString.String()}
}
b, _ := d.parseByteString()
*bs = ByteString(b)
return nil
}

363
vendor/github.com/fxamacker/cbor/v2/cache.go generated vendored Normal file
View File

@ -0,0 +1,363 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"bytes"
"errors"
"fmt"
"reflect"
"sort"
"strconv"
"strings"
"sync"
)
type encodeFuncs struct {
ef encodeFunc
ief isEmptyFunc
}
var (
decodingStructTypeCache sync.Map // map[reflect.Type]*decodingStructType
encodingStructTypeCache sync.Map // map[reflect.Type]*encodingStructType
encodeFuncCache sync.Map // map[reflect.Type]encodeFuncs
typeInfoCache sync.Map // map[reflect.Type]*typeInfo
)
type specialType int
const (
specialTypeNone specialType = iota
specialTypeUnmarshalerIface
specialTypeEmptyIface
specialTypeIface
specialTypeTag
specialTypeTime
)
type typeInfo struct {
elemTypeInfo *typeInfo
keyTypeInfo *typeInfo
typ reflect.Type
kind reflect.Kind
nonPtrType reflect.Type
nonPtrKind reflect.Kind
spclType specialType
}
func newTypeInfo(t reflect.Type) *typeInfo {
tInfo := typeInfo{typ: t, kind: t.Kind()}
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
k := t.Kind()
tInfo.nonPtrType = t
tInfo.nonPtrKind = k
if k == reflect.Interface {
if t.NumMethod() == 0 {
tInfo.spclType = specialTypeEmptyIface
} else {
tInfo.spclType = specialTypeIface
}
} else if t == typeTag {
tInfo.spclType = specialTypeTag
} else if t == typeTime {
tInfo.spclType = specialTypeTime
} else if reflect.PtrTo(t).Implements(typeUnmarshaler) {
tInfo.spclType = specialTypeUnmarshalerIface
}
switch k {
case reflect.Array, reflect.Slice:
tInfo.elemTypeInfo = getTypeInfo(t.Elem())
case reflect.Map:
tInfo.keyTypeInfo = getTypeInfo(t.Key())
tInfo.elemTypeInfo = getTypeInfo(t.Elem())
}
return &tInfo
}
type decodingStructType struct {
fields fields
fieldIndicesByName map[string]int
err error
toArray bool
}
// The stdlib errors.Join was introduced in Go 1.20, and we still support Go 1.17, so instead,
// here's a very basic implementation of an aggregated error.
type multierror []error
func (m multierror) Error() string {
var sb strings.Builder
for i, err := range m {
sb.WriteString(err.Error())
if i < len(m)-1 {
sb.WriteString(", ")
}
}
return sb.String()
}
func getDecodingStructType(t reflect.Type) *decodingStructType {
if v, _ := decodingStructTypeCache.Load(t); v != nil {
return v.(*decodingStructType)
}
flds, structOptions := getFields(t)
toArray := hasToArrayOption(structOptions)
var errs []error
for i := 0; i < len(flds); i++ {
if flds[i].keyAsInt {
nameAsInt, numErr := strconv.Atoi(flds[i].name)
if numErr != nil {
errs = append(errs, errors.New("cbor: failed to parse field name \""+flds[i].name+"\" to int ("+numErr.Error()+")"))
break
}
flds[i].nameAsInt = int64(nameAsInt)
}
flds[i].typInfo = getTypeInfo(flds[i].typ)
}
fieldIndicesByName := make(map[string]int, len(flds))
for i, fld := range flds {
if _, ok := fieldIndicesByName[fld.name]; ok {
errs = append(errs, fmt.Errorf("cbor: two or more fields of %v have the same name %q", t, fld.name))
continue
}
fieldIndicesByName[fld.name] = i
}
var err error
{
var multi multierror
for _, each := range errs {
if each != nil {
multi = append(multi, each)
}
}
if len(multi) == 1 {
err = multi[0]
} else if len(multi) > 1 {
err = multi
}
}
structType := &decodingStructType{
fields: flds,
fieldIndicesByName: fieldIndicesByName,
err: err,
toArray: toArray,
}
decodingStructTypeCache.Store(t, structType)
return structType
}
type encodingStructType struct {
fields fields
bytewiseFields fields
lengthFirstFields fields
omitEmptyFieldsIdx []int
err error
toArray bool
}
func (st *encodingStructType) getFields(em *encMode) fields {
switch em.sort {
case SortNone, SortFastShuffle:
return st.fields
case SortLengthFirst:
return st.lengthFirstFields
default:
return st.bytewiseFields
}
}
type bytewiseFieldSorter struct {
fields fields
}
func (x *bytewiseFieldSorter) Len() int {
return len(x.fields)
}
func (x *bytewiseFieldSorter) Swap(i, j int) {
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
}
func (x *bytewiseFieldSorter) Less(i, j int) bool {
return bytes.Compare(x.fields[i].cborName, x.fields[j].cborName) <= 0
}
type lengthFirstFieldSorter struct {
fields fields
}
func (x *lengthFirstFieldSorter) Len() int {
return len(x.fields)
}
func (x *lengthFirstFieldSorter) Swap(i, j int) {
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
}
func (x *lengthFirstFieldSorter) Less(i, j int) bool {
if len(x.fields[i].cborName) != len(x.fields[j].cborName) {
return len(x.fields[i].cborName) < len(x.fields[j].cborName)
}
return bytes.Compare(x.fields[i].cborName, x.fields[j].cborName) <= 0
}
func getEncodingStructType(t reflect.Type) (*encodingStructType, error) {
if v, _ := encodingStructTypeCache.Load(t); v != nil {
structType := v.(*encodingStructType)
return structType, structType.err
}
flds, structOptions := getFields(t)
if hasToArrayOption(structOptions) {
return getEncodingStructToArrayType(t, flds)
}
var err error
var hasKeyAsInt bool
var hasKeyAsStr bool
var omitEmptyIdx []int
e := getEncodeBuffer()
for i := 0; i < len(flds); i++ {
// Get field's encodeFunc
flds[i].ef, flds[i].ief = getEncodeFunc(flds[i].typ)
if flds[i].ef == nil {
err = &UnsupportedTypeError{t}
break
}
// Encode field name
if flds[i].keyAsInt {
nameAsInt, numErr := strconv.Atoi(flds[i].name)
if numErr != nil {
err = errors.New("cbor: failed to parse field name \"" + flds[i].name + "\" to int (" + numErr.Error() + ")")
break
}
flds[i].nameAsInt = int64(nameAsInt)
if nameAsInt >= 0 {
encodeHead(e, byte(cborTypePositiveInt), uint64(nameAsInt))
} else {
n := nameAsInt*(-1) - 1
encodeHead(e, byte(cborTypeNegativeInt), uint64(n))
}
flds[i].cborName = make([]byte, e.Len())
copy(flds[i].cborName, e.Bytes())
e.Reset()
hasKeyAsInt = true
} else {
encodeHead(e, byte(cborTypeTextString), uint64(len(flds[i].name)))
flds[i].cborName = make([]byte, e.Len()+len(flds[i].name))
n := copy(flds[i].cborName, e.Bytes())
copy(flds[i].cborName[n:], flds[i].name)
e.Reset()
// If cborName contains a text string, then cborNameByteString contains a
// string that has the byte string major type but is otherwise identical to
// cborName.
flds[i].cborNameByteString = make([]byte, len(flds[i].cborName))
copy(flds[i].cborNameByteString, flds[i].cborName)
// Reset encoded CBOR type to byte string, preserving the "additional
// information" bits:
flds[i].cborNameByteString[0] = byte(cborTypeByteString) |
getAdditionalInformation(flds[i].cborNameByteString[0])
hasKeyAsStr = true
}
// Check if field can be omitted when empty
if flds[i].omitEmpty {
omitEmptyIdx = append(omitEmptyIdx, i)
}
}
putEncodeBuffer(e)
if err != nil {
structType := &encodingStructType{err: err}
encodingStructTypeCache.Store(t, structType)
return structType, structType.err
}
// Sort fields by canonical order
bytewiseFields := make(fields, len(flds))
copy(bytewiseFields, flds)
sort.Sort(&bytewiseFieldSorter{bytewiseFields})
lengthFirstFields := bytewiseFields
if hasKeyAsInt && hasKeyAsStr {
lengthFirstFields = make(fields, len(flds))
copy(lengthFirstFields, flds)
sort.Sort(&lengthFirstFieldSorter{lengthFirstFields})
}
structType := &encodingStructType{
fields: flds,
bytewiseFields: bytewiseFields,
lengthFirstFields: lengthFirstFields,
omitEmptyFieldsIdx: omitEmptyIdx,
}
encodingStructTypeCache.Store(t, structType)
return structType, structType.err
}
func getEncodingStructToArrayType(t reflect.Type, flds fields) (*encodingStructType, error) {
for i := 0; i < len(flds); i++ {
// Get field's encodeFunc
flds[i].ef, flds[i].ief = getEncodeFunc(flds[i].typ)
if flds[i].ef == nil {
structType := &encodingStructType{err: &UnsupportedTypeError{t}}
encodingStructTypeCache.Store(t, structType)
return structType, structType.err
}
}
structType := &encodingStructType{
fields: flds,
toArray: true,
}
encodingStructTypeCache.Store(t, structType)
return structType, structType.err
}
func getEncodeFunc(t reflect.Type) (encodeFunc, isEmptyFunc) {
if v, _ := encodeFuncCache.Load(t); v != nil {
fs := v.(encodeFuncs)
return fs.ef, fs.ief
}
ef, ief := getEncodeFuncInternal(t)
encodeFuncCache.Store(t, encodeFuncs{ef, ief})
return ef, ief
}
func getTypeInfo(t reflect.Type) *typeInfo {
if v, _ := typeInfoCache.Load(t); v != nil {
return v.(*typeInfo)
}
tInfo := newTypeInfo(t)
typeInfoCache.Store(t, tInfo)
return tInfo
}
func hasToArrayOption(tag string) bool {
s := ",toarray"
idx := strings.Index(tag, s)
return idx >= 0 && (len(tag) == idx+len(s) || tag[idx+len(s)] == ',')
}

182
vendor/github.com/fxamacker/cbor/v2/common.go generated vendored Normal file
View File

@ -0,0 +1,182 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"fmt"
"strconv"
)
type cborType uint8
const (
cborTypePositiveInt cborType = 0x00
cborTypeNegativeInt cborType = 0x20
cborTypeByteString cborType = 0x40
cborTypeTextString cborType = 0x60
cborTypeArray cborType = 0x80
cborTypeMap cborType = 0xa0
cborTypeTag cborType = 0xc0
cborTypePrimitives cborType = 0xe0
)
func (t cborType) String() string {
switch t {
case cborTypePositiveInt:
return "positive integer"
case cborTypeNegativeInt:
return "negative integer"
case cborTypeByteString:
return "byte string"
case cborTypeTextString:
return "UTF-8 text string"
case cborTypeArray:
return "array"
case cborTypeMap:
return "map"
case cborTypeTag:
return "tag"
case cborTypePrimitives:
return "primitives"
default:
return "Invalid type " + strconv.Itoa(int(t))
}
}
type additionalInformation uint8
const (
maxAdditionalInformationWithoutArgument = 23
additionalInformationWith1ByteArgument = 24
additionalInformationWith2ByteArgument = 25
additionalInformationWith4ByteArgument = 26
additionalInformationWith8ByteArgument = 27
// For major type 7.
additionalInformationAsFalse = 20
additionalInformationAsTrue = 21
additionalInformationAsNull = 22
additionalInformationAsUndefined = 23
additionalInformationAsFloat16 = 25
additionalInformationAsFloat32 = 26
additionalInformationAsFloat64 = 27
// For major type 2, 3, 4, 5.
additionalInformationAsIndefiniteLengthFlag = 31
)
const (
maxSimpleValueInAdditionalInformation = 23
minSimpleValueIn1ByteArgument = 32
)
func (ai additionalInformation) isIndefiniteLength() bool {
return ai == additionalInformationAsIndefiniteLengthFlag
}
const (
// From RFC 8949 Section 3:
// "The initial byte of each encoded data item contains both information about the major type
// (the high-order 3 bits, described in Section 3.1) and additional information
// (the low-order 5 bits)."
// typeMask is used to extract major type in initial byte of encoded data item.
typeMask = 0xe0
// additionalInformationMask is used to extract additional information in initial byte of encoded data item.
additionalInformationMask = 0x1f
)
func getType(raw byte) cborType {
return cborType(raw & typeMask)
}
func getAdditionalInformation(raw byte) byte {
return raw & additionalInformationMask
}
func isBreakFlag(raw byte) bool {
return raw == cborBreakFlag
}
func parseInitialByte(b byte) (t cborType, ai byte) {
return getType(b), getAdditionalInformation(b)
}
const (
tagNumRFC3339Time = 0
tagNumEpochTime = 1
tagNumUnsignedBignum = 2
tagNumNegativeBignum = 3
tagNumExpectedLaterEncodingBase64URL = 21
tagNumExpectedLaterEncodingBase64 = 22
tagNumExpectedLaterEncodingBase16 = 23
tagNumSelfDescribedCBOR = 55799
)
const (
cborBreakFlag = byte(0xff)
cborByteStringWithIndefiniteLengthHead = byte(0x5f)
cborTextStringWithIndefiniteLengthHead = byte(0x7f)
cborArrayWithIndefiniteLengthHead = byte(0x9f)
cborMapWithIndefiniteLengthHead = byte(0xbf)
)
var (
cborFalse = []byte{0xf4}
cborTrue = []byte{0xf5}
cborNil = []byte{0xf6}
cborNaN = []byte{0xf9, 0x7e, 0x00}
cborPositiveInfinity = []byte{0xf9, 0x7c, 0x00}
cborNegativeInfinity = []byte{0xf9, 0xfc, 0x00}
)
// validBuiltinTag checks that supported built-in tag numbers are followed by expected content types.
func validBuiltinTag(tagNum uint64, contentHead byte) error {
t := getType(contentHead)
switch tagNum {
case tagNumRFC3339Time:
// Tag content (date/time text string in RFC 3339 format) must be string type.
if t != cborTypeTextString {
return newInadmissibleTagContentTypeError(
tagNumRFC3339Time,
"text string",
t.String())
}
return nil
case tagNumEpochTime:
// Tag content (epoch date/time) must be uint, int, or float type.
if t != cborTypePositiveInt && t != cborTypeNegativeInt && (contentHead < 0xf9 || contentHead > 0xfb) {
return newInadmissibleTagContentTypeError(
tagNumEpochTime,
"integer or floating-point number",
t.String())
}
return nil
case tagNumUnsignedBignum, tagNumNegativeBignum:
// Tag content (bignum) must be byte type.
if t != cborTypeByteString {
return newInadmissibleTagContentTypeErrorf(
fmt.Sprintf(
"tag number %d or %d must be followed by byte string, got %s",
tagNumUnsignedBignum,
tagNumNegativeBignum,
t.String(),
))
}
return nil
case tagNumExpectedLaterEncodingBase64URL, tagNumExpectedLaterEncodingBase64, tagNumExpectedLaterEncodingBase16:
// From RFC 8949 3.4.5.2:
// The data item tagged can be a byte string or any other data item. In the latter
// case, the tag applies to all of the byte string data items contained in the data
// item, except for those contained in a nested data item tagged with an expected
// conversion.
return nil
}
return nil
}

3187
vendor/github.com/fxamacker/cbor/v2/decode.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

724
vendor/github.com/fxamacker/cbor/v2/diagnose.go generated vendored Normal file
View File

@ -0,0 +1,724 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"bytes"
"encoding/base32"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io"
"math"
"math/big"
"strconv"
"unicode/utf16"
"unicode/utf8"
"github.com/x448/float16"
)
// DiagMode is the main interface for CBOR diagnostic notation.
type DiagMode interface {
// Diagnose returns extended diagnostic notation (EDN) of CBOR data items using this DiagMode.
Diagnose([]byte) (string, error)
// DiagnoseFirst returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest.
DiagnoseFirst([]byte) (string, []byte, error)
// DiagOptions returns user specified options used to create this DiagMode.
DiagOptions() DiagOptions
}
// ByteStringEncoding specifies the base encoding that byte strings are notated.
type ByteStringEncoding uint8
const (
// ByteStringBase16Encoding encodes byte strings in base16, without padding.
ByteStringBase16Encoding ByteStringEncoding = iota
// ByteStringBase32Encoding encodes byte strings in base32, without padding.
ByteStringBase32Encoding
// ByteStringBase32HexEncoding encodes byte strings in base32hex, without padding.
ByteStringBase32HexEncoding
// ByteStringBase64Encoding encodes byte strings in base64url, without padding.
ByteStringBase64Encoding
maxByteStringEncoding
)
func (bse ByteStringEncoding) valid() error {
if bse >= maxByteStringEncoding {
return errors.New("cbor: invalid ByteStringEncoding " + strconv.Itoa(int(bse)))
}
return nil
}
// DiagOptions specifies Diag options.
type DiagOptions struct {
// ByteStringEncoding specifies the base encoding that byte strings are notated.
// Default is ByteStringBase16Encoding.
ByteStringEncoding ByteStringEncoding
// ByteStringHexWhitespace specifies notating with whitespace in byte string
// when ByteStringEncoding is ByteStringBase16Encoding.
ByteStringHexWhitespace bool
// ByteStringText specifies notating with text in byte string
// if it is a valid UTF-8 text.
ByteStringText bool
// ByteStringEmbeddedCBOR specifies notating embedded CBOR in byte string
// if it is a valid CBOR bytes.
ByteStringEmbeddedCBOR bool
// CBORSequence specifies notating CBOR sequences.
// otherwise, it returns an error if there are more bytes after the first CBOR.
CBORSequence bool
// FloatPrecisionIndicator specifies appending a suffix to indicate float precision.
// Refer to https://www.rfc-editor.org/rfc/rfc8949.html#name-encoding-indicators.
FloatPrecisionIndicator bool
// MaxNestedLevels specifies the max nested levels allowed for any combination of CBOR array, maps, and tags.
// Default is 32 levels and it can be set to [4, 65535]. Note that higher maximum levels of nesting can
// require larger amounts of stack to deserialize. Don't increase this higher than you require.
MaxNestedLevels int
// MaxArrayElements specifies the max number of elements for CBOR arrays.
// Default is 128*1024=131072 and it can be set to [16, 2147483647]
MaxArrayElements int
// MaxMapPairs specifies the max number of key-value pairs for CBOR maps.
// Default is 128*1024=131072 and it can be set to [16, 2147483647]
MaxMapPairs int
}
// DiagMode returns a DiagMode with immutable options.
func (opts DiagOptions) DiagMode() (DiagMode, error) {
return opts.diagMode()
}
func (opts DiagOptions) diagMode() (*diagMode, error) {
if err := opts.ByteStringEncoding.valid(); err != nil {
return nil, err
}
decMode, err := DecOptions{
MaxNestedLevels: opts.MaxNestedLevels,
MaxArrayElements: opts.MaxArrayElements,
MaxMapPairs: opts.MaxMapPairs,
}.decMode()
if err != nil {
return nil, err
}
return &diagMode{
byteStringEncoding: opts.ByteStringEncoding,
byteStringHexWhitespace: opts.ByteStringHexWhitespace,
byteStringText: opts.ByteStringText,
byteStringEmbeddedCBOR: opts.ByteStringEmbeddedCBOR,
cborSequence: opts.CBORSequence,
floatPrecisionIndicator: opts.FloatPrecisionIndicator,
decMode: decMode,
}, nil
}
type diagMode struct {
byteStringEncoding ByteStringEncoding
byteStringHexWhitespace bool
byteStringText bool
byteStringEmbeddedCBOR bool
cborSequence bool
floatPrecisionIndicator bool
decMode *decMode
}
// DiagOptions returns user specified options used to create this DiagMode.
func (dm *diagMode) DiagOptions() DiagOptions {
return DiagOptions{
ByteStringEncoding: dm.byteStringEncoding,
ByteStringHexWhitespace: dm.byteStringHexWhitespace,
ByteStringText: dm.byteStringText,
ByteStringEmbeddedCBOR: dm.byteStringEmbeddedCBOR,
CBORSequence: dm.cborSequence,
FloatPrecisionIndicator: dm.floatPrecisionIndicator,
MaxNestedLevels: dm.decMode.maxNestedLevels,
MaxArrayElements: dm.decMode.maxArrayElements,
MaxMapPairs: dm.decMode.maxMapPairs,
}
}
// Diagnose returns extended diagnostic notation (EDN) of CBOR data items using the DiagMode.
func (dm *diagMode) Diagnose(data []byte) (string, error) {
return newDiagnose(data, dm.decMode, dm).diag(dm.cborSequence)
}
// DiagnoseFirst returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest.
func (dm *diagMode) DiagnoseFirst(data []byte) (diagNotation string, rest []byte, err error) {
return newDiagnose(data, dm.decMode, dm).diagFirst()
}
var defaultDiagMode, _ = DiagOptions{}.diagMode()
// Diagnose returns extended diagnostic notation (EDN) of CBOR data items
// using the default diagnostic mode.
//
// Refer to https://www.rfc-editor.org/rfc/rfc8949.html#name-diagnostic-notation.
func Diagnose(data []byte) (string, error) {
return defaultDiagMode.Diagnose(data)
}
// Diagnose returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest.
func DiagnoseFirst(data []byte) (diagNotation string, rest []byte, err error) {
return defaultDiagMode.DiagnoseFirst(data)
}
type diagnose struct {
dm *diagMode
d *decoder
w *bytes.Buffer
}
func newDiagnose(data []byte, decm *decMode, diagm *diagMode) *diagnose {
return &diagnose{
dm: diagm,
d: &decoder{data: data, dm: decm},
w: &bytes.Buffer{},
}
}
func (di *diagnose) diag(cborSequence bool) (string, error) {
// CBOR Sequence
firstItem := true
for {
switch err := di.wellformed(cborSequence); err {
case nil:
if !firstItem {
di.w.WriteString(", ")
}
firstItem = false
if itemErr := di.item(); itemErr != nil {
return di.w.String(), itemErr
}
case io.EOF:
if firstItem {
return di.w.String(), err
}
return di.w.String(), nil
default:
return di.w.String(), err
}
}
}
func (di *diagnose) diagFirst() (diagNotation string, rest []byte, err error) {
err = di.wellformed(true)
if err == nil {
err = di.item()
}
if err == nil {
// Return EDN and the rest of the data slice (which might be len 0)
return di.w.String(), di.d.data[di.d.off:], nil
}
return di.w.String(), nil, err
}
func (di *diagnose) wellformed(allowExtraData bool) error {
off := di.d.off
err := di.d.wellformed(allowExtraData, false)
di.d.off = off
return err
}
func (di *diagnose) item() error { //nolint:gocyclo
initialByte := di.d.data[di.d.off]
switch initialByte {
case cborByteStringWithIndefiniteLengthHead,
cborTextStringWithIndefiniteLengthHead: // indefinite-length byte/text string
di.d.off++
if isBreakFlag(di.d.data[di.d.off]) {
di.d.off++
switch initialByte {
case cborByteStringWithIndefiniteLengthHead:
// indefinite-length bytes with no chunks.
di.w.WriteString(`''_`)
return nil
case cborTextStringWithIndefiniteLengthHead:
// indefinite-length text with no chunks.
di.w.WriteString(`""_`)
return nil
}
}
di.w.WriteString("(_ ")
i := 0
for !di.d.foundBreak() {
if i > 0 {
di.w.WriteString(", ")
}
i++
// wellformedIndefiniteString() already checked that the next item is a byte/text string.
if err := di.item(); err != nil {
return err
}
}
di.w.WriteByte(')')
return nil
case cborArrayWithIndefiniteLengthHead: // indefinite-length array
di.d.off++
di.w.WriteString("[_ ")
i := 0
for !di.d.foundBreak() {
if i > 0 {
di.w.WriteString(", ")
}
i++
if err := di.item(); err != nil {
return err
}
}
di.w.WriteByte(']')
return nil
case cborMapWithIndefiniteLengthHead: // indefinite-length map
di.d.off++
di.w.WriteString("{_ ")
i := 0
for !di.d.foundBreak() {
if i > 0 {
di.w.WriteString(", ")
}
i++
// key
if err := di.item(); err != nil {
return err
}
di.w.WriteString(": ")
// value
if err := di.item(); err != nil {
return err
}
}
di.w.WriteByte('}')
return nil
}
t := di.d.nextCBORType()
switch t {
case cborTypePositiveInt:
_, _, val := di.d.getHead()
di.w.WriteString(strconv.FormatUint(val, 10))
return nil
case cborTypeNegativeInt:
_, _, val := di.d.getHead()
if val > math.MaxInt64 {
// CBOR negative integer overflows int64, use big.Int to store value.
bi := new(big.Int)
bi.SetUint64(val)
bi.Add(bi, big.NewInt(1))
bi.Neg(bi)
di.w.WriteString(bi.String())
return nil
}
nValue := int64(-1) ^ int64(val)
di.w.WriteString(strconv.FormatInt(nValue, 10))
return nil
case cborTypeByteString:
b, _ := di.d.parseByteString()
return di.encodeByteString(b)
case cborTypeTextString:
b, err := di.d.parseTextString()
if err != nil {
return err
}
return di.encodeTextString(string(b), '"')
case cborTypeArray:
_, _, val := di.d.getHead()
count := int(val)
di.w.WriteByte('[')
for i := 0; i < count; i++ {
if i > 0 {
di.w.WriteString(", ")
}
if err := di.item(); err != nil {
return err
}
}
di.w.WriteByte(']')
return nil
case cborTypeMap:
_, _, val := di.d.getHead()
count := int(val)
di.w.WriteByte('{')
for i := 0; i < count; i++ {
if i > 0 {
di.w.WriteString(", ")
}
// key
if err := di.item(); err != nil {
return err
}
di.w.WriteString(": ")
// value
if err := di.item(); err != nil {
return err
}
}
di.w.WriteByte('}')
return nil
case cborTypeTag:
_, _, tagNum := di.d.getHead()
switch tagNum {
case tagNumUnsignedBignum:
if nt := di.d.nextCBORType(); nt != cborTypeByteString {
return newInadmissibleTagContentTypeError(
tagNumUnsignedBignum,
"byte string",
nt.String())
}
b, _ := di.d.parseByteString()
bi := new(big.Int).SetBytes(b)
di.w.WriteString(bi.String())
return nil
case tagNumNegativeBignum:
if nt := di.d.nextCBORType(); nt != cborTypeByteString {
return newInadmissibleTagContentTypeError(
tagNumNegativeBignum,
"byte string",
nt.String(),
)
}
b, _ := di.d.parseByteString()
bi := new(big.Int).SetBytes(b)
bi.Add(bi, big.NewInt(1))
bi.Neg(bi)
di.w.WriteString(bi.String())
return nil
default:
di.w.WriteString(strconv.FormatUint(tagNum, 10))
di.w.WriteByte('(')
if err := di.item(); err != nil {
return err
}
di.w.WriteByte(')')
return nil
}
case cborTypePrimitives:
_, ai, val := di.d.getHead()
switch ai {
case additionalInformationAsFalse:
di.w.WriteString("false")
return nil
case additionalInformationAsTrue:
di.w.WriteString("true")
return nil
case additionalInformationAsNull:
di.w.WriteString("null")
return nil
case additionalInformationAsUndefined:
di.w.WriteString("undefined")
return nil
case additionalInformationAsFloat16,
additionalInformationAsFloat32,
additionalInformationAsFloat64:
return di.encodeFloat(ai, val)
default:
di.w.WriteString("simple(")
di.w.WriteString(strconv.FormatUint(val, 10))
di.w.WriteByte(')')
return nil
}
}
return nil
}
// writeU16 format a rune as "\uxxxx"
func (di *diagnose) writeU16(val rune) {
di.w.WriteString("\\u")
var in [2]byte
in[0] = byte(val >> 8)
in[1] = byte(val)
sz := hex.EncodedLen(len(in))
di.w.Grow(sz)
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
hex.Encode(dst, in[:])
di.w.Write(dst)
}
var rawBase32Encoding = base32.StdEncoding.WithPadding(base32.NoPadding)
var rawBase32HexEncoding = base32.HexEncoding.WithPadding(base32.NoPadding)
func (di *diagnose) encodeByteString(val []byte) error {
if len(val) > 0 {
if di.dm.byteStringText && utf8.Valid(val) {
return di.encodeTextString(string(val), '\'')
}
if di.dm.byteStringEmbeddedCBOR {
di2 := newDiagnose(val, di.dm.decMode, di.dm)
// should always notating embedded CBOR sequence.
if str, err := di2.diag(true); err == nil {
di.w.WriteString("<<")
di.w.WriteString(str)
di.w.WriteString(">>")
return nil
}
}
}
switch di.dm.byteStringEncoding {
case ByteStringBase16Encoding:
di.w.WriteString("h'")
if di.dm.byteStringHexWhitespace {
sz := hex.EncodedLen(len(val))
if len(val) > 0 {
sz += len(val) - 1
}
di.w.Grow(sz)
dst := di.w.Bytes()[di.w.Len():]
for i := range val {
if i > 0 {
dst = append(dst, ' ')
}
hex.Encode(dst[len(dst):len(dst)+2], val[i:i+1])
dst = dst[:len(dst)+2]
}
di.w.Write(dst)
} else {
sz := hex.EncodedLen(len(val))
di.w.Grow(sz)
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
hex.Encode(dst, val)
di.w.Write(dst)
}
di.w.WriteByte('\'')
return nil
case ByteStringBase32Encoding:
di.w.WriteString("b32'")
sz := rawBase32Encoding.EncodedLen(len(val))
di.w.Grow(sz)
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
rawBase32Encoding.Encode(dst, val)
di.w.Write(dst)
di.w.WriteByte('\'')
return nil
case ByteStringBase32HexEncoding:
di.w.WriteString("h32'")
sz := rawBase32HexEncoding.EncodedLen(len(val))
di.w.Grow(sz)
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
rawBase32HexEncoding.Encode(dst, val)
di.w.Write(dst)
di.w.WriteByte('\'')
return nil
case ByteStringBase64Encoding:
di.w.WriteString("b64'")
sz := base64.RawURLEncoding.EncodedLen(len(val))
di.w.Grow(sz)
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
base64.RawURLEncoding.Encode(dst, val)
di.w.Write(dst)
di.w.WriteByte('\'')
return nil
default:
// It should not be possible for users to construct a *diagMode with an invalid byte
// string encoding.
panic(fmt.Sprintf("diagmode has invalid ByteStringEncoding %v", di.dm.byteStringEncoding))
}
}
const utf16SurrSelf = rune(0x10000)
// quote should be either `'` or `"`
func (di *diagnose) encodeTextString(val string, quote byte) error {
di.w.WriteByte(quote)
for i := 0; i < len(val); {
if b := val[i]; b < utf8.RuneSelf {
switch {
case b == '\t', b == '\n', b == '\r', b == '\\', b == quote:
di.w.WriteByte('\\')
switch b {
case '\t':
b = 't'
case '\n':
b = 'n'
case '\r':
b = 'r'
}
di.w.WriteByte(b)
case b >= ' ' && b <= '~':
di.w.WriteByte(b)
default:
di.writeU16(rune(b))
}
i++
continue
}
c, size := utf8.DecodeRuneInString(val[i:])
switch {
case c == utf8.RuneError:
return &SemanticError{"cbor: invalid UTF-8 string"}
case c < utf16SurrSelf:
di.writeU16(c)
default:
c1, c2 := utf16.EncodeRune(c)
di.writeU16(c1)
di.writeU16(c2)
}
i += size
}
di.w.WriteByte(quote)
return nil
}
func (di *diagnose) encodeFloat(ai byte, val uint64) error {
f64 := float64(0)
switch ai {
case additionalInformationAsFloat16:
f16 := float16.Frombits(uint16(val))
switch {
case f16.IsNaN():
di.w.WriteString("NaN")
return nil
case f16.IsInf(1):
di.w.WriteString("Infinity")
return nil
case f16.IsInf(-1):
di.w.WriteString("-Infinity")
return nil
default:
f64 = float64(f16.Float32())
}
case additionalInformationAsFloat32:
f32 := math.Float32frombits(uint32(val))
switch {
case f32 != f32:
di.w.WriteString("NaN")
return nil
case f32 > math.MaxFloat32:
di.w.WriteString("Infinity")
return nil
case f32 < -math.MaxFloat32:
di.w.WriteString("-Infinity")
return nil
default:
f64 = float64(f32)
}
case additionalInformationAsFloat64:
f64 = math.Float64frombits(val)
switch {
case f64 != f64:
di.w.WriteString("NaN")
return nil
case f64 > math.MaxFloat64:
di.w.WriteString("Infinity")
return nil
case f64 < -math.MaxFloat64:
di.w.WriteString("-Infinity")
return nil
}
}
// Use ES6 number to string conversion which should match most JSON generators.
// Inspired by https://github.com/golang/go/blob/4df10fba1687a6d4f51d7238a403f8f2298f6a16/src/encoding/json/encode.go#L585
const bitSize = 64
b := make([]byte, 0, 32)
if abs := math.Abs(f64); abs != 0 && (abs < 1e-6 || abs >= 1e21) {
b = strconv.AppendFloat(b, f64, 'e', -1, bitSize)
// clean up e-09 to e-9
n := len(b)
if n >= 4 && string(b[n-4:n-1]) == "e-0" {
b = append(b[:n-2], b[n-1])
}
} else {
b = strconv.AppendFloat(b, f64, 'f', -1, bitSize)
}
// add decimal point and trailing zero if needed
if bytes.IndexByte(b, '.') < 0 {
if i := bytes.IndexByte(b, 'e'); i < 0 {
b = append(b, '.', '0')
} else {
b = append(b[:i+2], b[i:]...)
b[i] = '.'
b[i+1] = '0'
}
}
di.w.WriteString(string(b))
if di.dm.floatPrecisionIndicator {
switch ai {
case additionalInformationAsFloat16:
di.w.WriteString("_1")
return nil
case additionalInformationAsFloat32:
di.w.WriteString("_2")
return nil
case additionalInformationAsFloat64:
di.w.WriteString("_3")
return nil
}
}
return nil
}

129
vendor/github.com/fxamacker/cbor/v2/doc.go generated vendored Normal file
View File

@ -0,0 +1,129 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
/*
Package cbor is a modern CBOR codec (RFC 8949 & RFC 7049) with CBOR tags,
Go struct tags (toarray/keyasint/omitempty), Core Deterministic Encoding,
CTAP2, Canonical CBOR, float64->32->16, and duplicate map key detection.
Encoding options allow "preferred serialization" by encoding integers and floats
to their smallest forms (e.g. float16) when values fit.
Struct tags like "keyasint", "toarray" and "omitempty" make CBOR data smaller
and easier to use with structs.
For example, "toarray" tag makes struct fields encode to CBOR array elements. And
"keyasint" makes a field encode to an element of CBOR map with specified int key.
Latest docs can be viewed at https://github.com/fxamacker/cbor#cbor-library-in-go
# Basics
The Quick Start guide is at https://github.com/fxamacker/cbor#quick-start
Function signatures identical to encoding/json include:
Marshal, Unmarshal, NewEncoder, NewDecoder, (*Encoder).Encode, (*Decoder).Decode.
Standard interfaces include:
BinaryMarshaler, BinaryUnmarshaler, Marshaler, and Unmarshaler.
Custom encoding and decoding is possible by implementing standard interfaces for
user-defined Go types.
Codec functions are available at package-level (using defaults options) or by
creating modes from options at runtime.
"Mode" in this API means definite way of encoding (EncMode) or decoding (DecMode).
EncMode and DecMode interfaces are created from EncOptions or DecOptions structs.
em, err := cbor.EncOptions{...}.EncMode()
em, err := cbor.CanonicalEncOptions().EncMode()
em, err := cbor.CTAP2EncOptions().EncMode()
Modes use immutable options to avoid side-effects and simplify concurrency. Behavior of
modes won't accidentally change at runtime after they're created.
Modes are intended to be reused and are safe for concurrent use.
EncMode and DecMode Interfaces
// EncMode interface uses immutable options and is safe for concurrent use.
type EncMode interface {
Marshal(v interface{}) ([]byte, error)
NewEncoder(w io.Writer) *Encoder
EncOptions() EncOptions // returns copy of options
}
// DecMode interface uses immutable options and is safe for concurrent use.
type DecMode interface {
Unmarshal(data []byte, v interface{}) error
NewDecoder(r io.Reader) *Decoder
DecOptions() DecOptions // returns copy of options
}
Using Default Encoding Mode
b, err := cbor.Marshal(v)
encoder := cbor.NewEncoder(w)
err = encoder.Encode(v)
Using Default Decoding Mode
err := cbor.Unmarshal(b, &v)
decoder := cbor.NewDecoder(r)
err = decoder.Decode(&v)
Creating and Using Encoding Modes
// Create EncOptions using either struct literal or a function.
opts := cbor.CanonicalEncOptions()
// If needed, modify encoding options
opts.Time = cbor.TimeUnix
// Create reusable EncMode interface with immutable options, safe for concurrent use.
em, err := opts.EncMode()
// Use EncMode like encoding/json, with same function signatures.
b, err := em.Marshal(v)
// or
encoder := em.NewEncoder(w)
err := encoder.Encode(v)
// NOTE: Both em.Marshal(v) and encoder.Encode(v) use encoding options
// specified during creation of em (encoding mode).
# CBOR Options
Predefined Encoding Options: https://github.com/fxamacker/cbor#predefined-encoding-options
Encoding Options: https://github.com/fxamacker/cbor#encoding-options
Decoding Options: https://github.com/fxamacker/cbor#decoding-options
# Struct Tags
Struct tags like `cbor:"name,omitempty"` and `json:"name,omitempty"` work as expected.
If both struct tags are specified then `cbor` is used.
Struct tags like "keyasint", "toarray", and "omitempty" make it easy to use
very compact formats like COSE and CWT (CBOR Web Tokens) with structs.
For example, "toarray" makes struct fields encode to array elements. And "keyasint"
makes struct fields encode to elements of CBOR map with int keys.
https://raw.githubusercontent.com/fxamacker/images/master/cbor/v2.0.0/cbor_easy_api.png
Struct tags are listed at https://github.com/fxamacker/cbor#struct-tags-1
# Tests and Fuzzing
Over 375 tests are included in this package. Cover-guided fuzzing is handled by
a private fuzzer that replaced fxamacker/cbor-fuzz years ago.
*/
package cbor

1989
vendor/github.com/fxamacker/cbor/v2/encode.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

94
vendor/github.com/fxamacker/cbor/v2/encode_map.go generated vendored Normal file
View File

@ -0,0 +1,94 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
//go:build go1.20
package cbor
import (
"bytes"
"reflect"
"sync"
)
type mapKeyValueEncodeFunc struct {
kf, ef encodeFunc
kpool, vpool sync.Pool
}
func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *bytes.Buffer, em *encMode, v reflect.Value, kvs []keyValue) error {
iterk := me.kpool.Get().(*reflect.Value)
defer func() {
iterk.SetZero()
me.kpool.Put(iterk)
}()
iterv := me.vpool.Get().(*reflect.Value)
defer func() {
iterv.SetZero()
me.vpool.Put(iterv)
}()
if kvs == nil {
for i, iter := 0, v.MapRange(); iter.Next(); i++ {
iterk.SetIterKey(iter)
iterv.SetIterValue(iter)
if err := me.kf(e, em, *iterk); err != nil {
return err
}
if err := me.ef(e, em, *iterv); err != nil {
return err
}
}
return nil
}
initial := e.Len()
for i, iter := 0, v.MapRange(); iter.Next(); i++ {
iterk.SetIterKey(iter)
iterv.SetIterValue(iter)
offset := e.Len()
if err := me.kf(e, em, *iterk); err != nil {
return err
}
valueOffset := e.Len()
if err := me.ef(e, em, *iterv); err != nil {
return err
}
kvs[i] = keyValue{
offset: offset - initial,
valueOffset: valueOffset - initial,
nextOffset: e.Len() - initial,
}
}
return nil
}
func getEncodeMapFunc(t reflect.Type) encodeFunc {
kf, _ := getEncodeFunc(t.Key())
ef, _ := getEncodeFunc(t.Elem())
if kf == nil || ef == nil {
return nil
}
mkv := &mapKeyValueEncodeFunc{
kf: kf,
ef: ef,
kpool: sync.Pool{
New: func() interface{} {
rk := reflect.New(t.Key()).Elem()
return &rk
},
},
vpool: sync.Pool{
New: func() interface{} {
rv := reflect.New(t.Elem()).Elem()
return &rv
},
},
}
return mapEncodeFunc{
e: mkv.encodeKeyValues,
}.encode
}

View File

@ -0,0 +1,60 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
//go:build !go1.20
package cbor
import (
"bytes"
"reflect"
)
type mapKeyValueEncodeFunc struct {
kf, ef encodeFunc
}
func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *bytes.Buffer, em *encMode, v reflect.Value, kvs []keyValue) error {
if kvs == nil {
for i, iter := 0, v.MapRange(); iter.Next(); i++ {
if err := me.kf(e, em, iter.Key()); err != nil {
return err
}
if err := me.ef(e, em, iter.Value()); err != nil {
return err
}
}
return nil
}
initial := e.Len()
for i, iter := 0, v.MapRange(); iter.Next(); i++ {
offset := e.Len()
if err := me.kf(e, em, iter.Key()); err != nil {
return err
}
valueOffset := e.Len()
if err := me.ef(e, em, iter.Value()); err != nil {
return err
}
kvs[i] = keyValue{
offset: offset - initial,
valueOffset: valueOffset - initial,
nextOffset: e.Len() - initial,
}
}
return nil
}
func getEncodeMapFunc(t reflect.Type) encodeFunc {
kf, _ := getEncodeFunc(t.Key())
ef, _ := getEncodeFunc(t.Elem())
if kf == nil || ef == nil {
return nil
}
mkv := &mapKeyValueEncodeFunc{kf: kf, ef: ef}
return mapEncodeFunc{
e: mkv.encodeKeyValues,
}.encode
}

69
vendor/github.com/fxamacker/cbor/v2/simplevalue.go generated vendored Normal file
View File

@ -0,0 +1,69 @@
package cbor
import (
"errors"
"fmt"
"reflect"
)
// SimpleValue represents CBOR simple value.
// CBOR simple value is:
// - an extension point like CBOR tag.
// - a subset of CBOR major type 7 that isn't floating-point.
// - "identified by a number between 0 and 255, but distinct from that number itself".
// For example, "a simple value 2 is not equivalent to an integer 2" as a CBOR map key.
//
// CBOR simple values identified by 20..23 are: "false", "true" , "null", and "undefined".
// Other CBOR simple values are currently unassigned/reserved by IANA.
type SimpleValue uint8
var (
typeSimpleValue = reflect.TypeOf(SimpleValue(0))
)
// MarshalCBOR encodes SimpleValue as CBOR simple value (major type 7).
func (sv SimpleValue) MarshalCBOR() ([]byte, error) {
// RFC 8949 3.3. Floating-Point Numbers and Values with No Content says:
// "An encoder MUST NOT issue two-byte sequences that start with 0xf8
// (major type 7, additional information 24) and continue with a byte
// less than 0x20 (32 decimal). Such sequences are not well-formed.
// (This implies that an encoder cannot encode false, true, null, or
// undefined in two-byte sequences and that only the one-byte variants
// of these are well-formed; more generally speaking, each simple value
// only has a single representation variant)."
switch {
case sv <= maxSimpleValueInAdditionalInformation:
return []byte{byte(cborTypePrimitives) | byte(sv)}, nil
case sv >= minSimpleValueIn1ByteArgument:
return []byte{byte(cborTypePrimitives) | additionalInformationWith1ByteArgument, byte(sv)}, nil
default:
return nil, &UnsupportedValueError{msg: fmt.Sprintf("SimpleValue(%d)", sv)}
}
}
// UnmarshalCBOR decodes CBOR simple value (major type 7) to SimpleValue.
func (sv *SimpleValue) UnmarshalCBOR(data []byte) error {
if sv == nil {
return errors.New("cbor.SimpleValue: UnmarshalCBOR on nil pointer")
}
d := decoder{data: data, dm: defaultDecMode}
typ, ai, val := d.getHead()
if typ != cborTypePrimitives {
return &UnmarshalTypeError{CBORType: typ.String(), GoType: "SimpleValue"}
}
if ai > additionalInformationWith1ByteArgument {
return &UnmarshalTypeError{CBORType: typ.String(), GoType: "SimpleValue", errorMsg: "not simple values"}
}
// It is safe to cast val to uint8 here because
// - data is already verified to be well-formed CBOR simple value and
// - val is <= math.MaxUint8.
*sv = SimpleValue(val)
return nil
}

277
vendor/github.com/fxamacker/cbor/v2/stream.go generated vendored Normal file
View File

@ -0,0 +1,277 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"bytes"
"errors"
"io"
"reflect"
)
// Decoder reads and decodes CBOR values from io.Reader.
type Decoder struct {
r io.Reader
d decoder
buf []byte
off int // next read offset in buf
bytesRead int
}
// NewDecoder returns a new decoder that reads and decodes from r using
// the default decoding options.
func NewDecoder(r io.Reader) *Decoder {
return defaultDecMode.NewDecoder(r)
}
// Decode reads CBOR value and decodes it into the value pointed to by v.
func (dec *Decoder) Decode(v interface{}) error {
_, err := dec.readNext()
if err != nil {
// Return validation error or read error.
return err
}
dec.d.reset(dec.buf[dec.off:])
err = dec.d.value(v)
// Increment dec.off even if decoding err is not nil because
// dec.d.off points to the next CBOR data item if current
// CBOR data item is valid but failed to be decoded into v.
// This allows next CBOR data item to be decoded in next
// call to this function.
dec.off += dec.d.off
dec.bytesRead += dec.d.off
return err
}
// Skip skips to the next CBOR data item (if there is any),
// otherwise it returns error such as io.EOF, io.UnexpectedEOF, etc.
func (dec *Decoder) Skip() error {
n, err := dec.readNext()
if err != nil {
// Return validation error or read error.
return err
}
dec.off += n
dec.bytesRead += n
return nil
}
// NumBytesRead returns the number of bytes read.
func (dec *Decoder) NumBytesRead() int {
return dec.bytesRead
}
// Buffered returns a reader for data remaining in Decoder's buffer.
// Returned reader is valid until the next call to Decode or Skip.
func (dec *Decoder) Buffered() io.Reader {
return bytes.NewReader(dec.buf[dec.off:])
}
// readNext() reads next CBOR data item from Reader to buffer.
// It returns the size of next CBOR data item.
// It also returns validation error or read error if any.
func (dec *Decoder) readNext() (int, error) {
var readErr error
var validErr error
for {
// Process any unread data in dec.buf.
if dec.off < len(dec.buf) {
dec.d.reset(dec.buf[dec.off:])
off := dec.off // Save offset before data validation
validErr = dec.d.wellformed(true, false)
dec.off = off // Restore offset
if validErr == nil {
return dec.d.off, nil
}
if validErr != io.ErrUnexpectedEOF {
return 0, validErr
}
// Process last read error on io.ErrUnexpectedEOF.
if readErr != nil {
if readErr == io.EOF {
// current CBOR data item is incomplete.
return 0, io.ErrUnexpectedEOF
}
return 0, readErr
}
}
// More data is needed and there was no read error.
var n int
for n == 0 {
n, readErr = dec.read()
if n == 0 && readErr != nil {
// No more data can be read and read error is encountered.
// At this point, validErr is either nil or io.ErrUnexpectedEOF.
if readErr == io.EOF {
if validErr == io.ErrUnexpectedEOF {
// current CBOR data item is incomplete.
return 0, io.ErrUnexpectedEOF
}
}
return 0, readErr
}
}
// At this point, dec.buf contains new data from last read (n > 0).
}
}
// read() reads data from Reader to buffer.
// It returns number of bytes read and any read error encountered.
// Postconditions:
// - dec.buf contains previously unread data and new data.
// - dec.off is 0.
func (dec *Decoder) read() (int, error) {
// Grow buf if needed.
const minRead = 512
if cap(dec.buf)-len(dec.buf)+dec.off < minRead {
oldUnreadBuf := dec.buf[dec.off:]
dec.buf = make([]byte, len(dec.buf)-dec.off, 2*cap(dec.buf)+minRead)
dec.overwriteBuf(oldUnreadBuf)
}
// Copy unread data over read data and reset off to 0.
if dec.off > 0 {
dec.overwriteBuf(dec.buf[dec.off:])
}
// Read from reader and reslice buf.
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
dec.buf = dec.buf[0 : len(dec.buf)+n]
return n, err
}
func (dec *Decoder) overwriteBuf(newBuf []byte) {
n := copy(dec.buf, newBuf)
dec.buf = dec.buf[:n]
dec.off = 0
}
// Encoder writes CBOR values to io.Writer.
type Encoder struct {
w io.Writer
em *encMode
indefTypes []cborType
}
// NewEncoder returns a new encoder that writes to w using the default encoding options.
func NewEncoder(w io.Writer) *Encoder {
return defaultEncMode.NewEncoder(w)
}
// Encode writes the CBOR encoding of v.
func (enc *Encoder) Encode(v interface{}) error {
if len(enc.indefTypes) > 0 && v != nil {
indefType := enc.indefTypes[len(enc.indefTypes)-1]
if indefType == cborTypeTextString {
k := reflect.TypeOf(v).Kind()
if k != reflect.String {
return errors.New("cbor: cannot encode item type " + k.String() + " for indefinite-length text string")
}
} else if indefType == cborTypeByteString {
t := reflect.TypeOf(v)
k := t.Kind()
if (k != reflect.Array && k != reflect.Slice) || t.Elem().Kind() != reflect.Uint8 {
return errors.New("cbor: cannot encode item type " + k.String() + " for indefinite-length byte string")
}
}
}
buf := getEncodeBuffer()
err := encode(buf, enc.em, reflect.ValueOf(v))
if err == nil {
_, err = enc.w.Write(buf.Bytes())
}
putEncodeBuffer(buf)
return err
}
// StartIndefiniteByteString starts byte string encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes definite length byte strings
// ("chunks") as one contiguous string until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteByteString() error {
return enc.startIndefinite(cborTypeByteString)
}
// StartIndefiniteTextString starts text string encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes definite length text strings
// ("chunks") as one contiguous string until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteTextString() error {
return enc.startIndefinite(cborTypeTextString)
}
// StartIndefiniteArray starts array encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes elements of the array
// until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteArray() error {
return enc.startIndefinite(cborTypeArray)
}
// StartIndefiniteMap starts array encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes elements of the map
// until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteMap() error {
return enc.startIndefinite(cborTypeMap)
}
// EndIndefinite closes last opened indefinite length value.
func (enc *Encoder) EndIndefinite() error {
if len(enc.indefTypes) == 0 {
return errors.New("cbor: cannot encode \"break\" code outside indefinite length values")
}
_, err := enc.w.Write([]byte{cborBreakFlag})
if err == nil {
enc.indefTypes = enc.indefTypes[:len(enc.indefTypes)-1]
}
return err
}
var cborIndefHeader = map[cborType][]byte{
cborTypeByteString: {cborByteStringWithIndefiniteLengthHead},
cborTypeTextString: {cborTextStringWithIndefiniteLengthHead},
cborTypeArray: {cborArrayWithIndefiniteLengthHead},
cborTypeMap: {cborMapWithIndefiniteLengthHead},
}
func (enc *Encoder) startIndefinite(typ cborType) error {
if enc.em.indefLength == IndefLengthForbidden {
return &IndefiniteLengthError{typ}
}
_, err := enc.w.Write(cborIndefHeader[typ])
if err == nil {
enc.indefTypes = append(enc.indefTypes, typ)
}
return err
}
// RawMessage is a raw encoded CBOR value.
type RawMessage []byte
// MarshalCBOR returns m or CBOR nil if m is nil.
func (m RawMessage) MarshalCBOR() ([]byte, error) {
if len(m) == 0 {
return cborNil, nil
}
return m, nil
}
// UnmarshalCBOR creates a copy of data and saves to *m.
func (m *RawMessage) UnmarshalCBOR(data []byte) error {
if m == nil {
return errors.New("cbor.RawMessage: UnmarshalCBOR on nil pointer")
}
*m = append((*m)[0:0], data...)
return nil
}

260
vendor/github.com/fxamacker/cbor/v2/structfields.go generated vendored Normal file
View File

@ -0,0 +1,260 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"reflect"
"sort"
"strings"
)
type field struct {
name string
nameAsInt int64 // used to decoder to match field name with CBOR int
cborName []byte
cborNameByteString []byte // major type 2 name encoding iff cborName has major type 3
idx []int
typ reflect.Type
ef encodeFunc
ief isEmptyFunc
typInfo *typeInfo // used to decoder to reuse type info
tagged bool // used to choose dominant field (at the same level tagged fields dominate untagged fields)
omitEmpty bool // used to skip empty field
keyAsInt bool // used to encode/decode field name as int
}
type fields []*field
// indexFieldSorter sorts fields by field idx at each level, breaking ties with idx depth.
type indexFieldSorter struct {
fields fields
}
func (x *indexFieldSorter) Len() int {
return len(x.fields)
}
func (x *indexFieldSorter) Swap(i, j int) {
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
}
func (x *indexFieldSorter) Less(i, j int) bool {
iIdx, jIdx := x.fields[i].idx, x.fields[j].idx
for k := 0; k < len(iIdx) && k < len(jIdx); k++ {
if iIdx[k] != jIdx[k] {
return iIdx[k] < jIdx[k]
}
}
return len(iIdx) <= len(jIdx)
}
// nameLevelAndTagFieldSorter sorts fields by field name, idx depth, and presence of tag.
type nameLevelAndTagFieldSorter struct {
fields fields
}
func (x *nameLevelAndTagFieldSorter) Len() int {
return len(x.fields)
}
func (x *nameLevelAndTagFieldSorter) Swap(i, j int) {
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
}
func (x *nameLevelAndTagFieldSorter) Less(i, j int) bool {
fi, fj := x.fields[i], x.fields[j]
if fi.name != fj.name {
return fi.name < fj.name
}
if len(fi.idx) != len(fj.idx) {
return len(fi.idx) < len(fj.idx)
}
if fi.tagged != fj.tagged {
return fi.tagged
}
return i < j // Field i and j have the same name, depth, and tagged status. Nothing else matters.
}
// getFields returns visible fields of struct type t following visibility rules for JSON encoding.
func getFields(t reflect.Type) (flds fields, structOptions string) {
// Get special field "_" tag options
if f, ok := t.FieldByName("_"); ok {
tag := f.Tag.Get("cbor")
if tag != "-" {
structOptions = tag
}
}
// nTypes contains next level anonymous fields' types and indexes
// (there can be multiple fields of the same type at the same level)
flds, nTypes := appendFields(t, nil, nil, nil)
if len(nTypes) > 0 {
var cTypes map[reflect.Type][][]int // current level anonymous fields' types and indexes
vTypes := map[reflect.Type]bool{t: true} // visited field types at less nested levels
for len(nTypes) > 0 {
cTypes, nTypes = nTypes, nil
for t, idx := range cTypes {
// If there are multiple anonymous fields of the same struct type at the same level, all are ignored.
if len(idx) > 1 {
continue
}
// Anonymous field of the same type at deeper nested level is ignored.
if vTypes[t] {
continue
}
vTypes[t] = true
flds, nTypes = appendFields(t, idx[0], flds, nTypes)
}
}
}
sort.Sort(&nameLevelAndTagFieldSorter{flds})
// Keep visible fields.
j := 0 // index of next unique field
for i := 0; i < len(flds); {
name := flds[i].name
if i == len(flds)-1 || // last field
name != flds[i+1].name || // field i has unique field name
len(flds[i].idx) < len(flds[i+1].idx) || // field i is at a less nested level than field i+1
(flds[i].tagged && !flds[i+1].tagged) { // field i is tagged while field i+1 is not
flds[j] = flds[i]
j++
}
// Skip fields with the same field name.
for i++; i < len(flds) && name == flds[i].name; i++ { //nolint:revive
}
}
if j != len(flds) {
flds = flds[:j]
}
// Sort fields by field index
sort.Sort(&indexFieldSorter{flds})
return flds, structOptions
}
// appendFields appends type t's exportable fields to flds and anonymous struct fields to nTypes .
func appendFields(
t reflect.Type,
idx []int,
flds fields,
nTypes map[reflect.Type][][]int,
) (
_flds fields,
_nTypes map[reflect.Type][][]int,
) {
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
ft := f.Type
for ft.Kind() == reflect.Ptr {
ft = ft.Elem()
}
if !isFieldExportable(f, ft.Kind()) {
continue
}
tag := f.Tag.Get("cbor")
if tag == "" {
tag = f.Tag.Get("json")
}
if tag == "-" {
continue
}
tagged := tag != ""
// Parse field tag options
var tagFieldName string
var omitempty, keyasint bool
for j := 0; tag != ""; j++ {
var token string
idx := strings.IndexByte(tag, ',')
if idx == -1 {
token, tag = tag, ""
} else {
token, tag = tag[:idx], tag[idx+1:]
}
if j == 0 {
tagFieldName = token
} else {
switch token {
case "omitempty":
omitempty = true
case "keyasint":
keyasint = true
}
}
}
fieldName := tagFieldName
if tagFieldName == "" {
fieldName = f.Name
}
fIdx := make([]int, len(idx)+1)
copy(fIdx, idx)
fIdx[len(fIdx)-1] = i
if !f.Anonymous || ft.Kind() != reflect.Struct || tagFieldName != "" {
flds = append(flds, &field{
name: fieldName,
idx: fIdx,
typ: f.Type,
omitEmpty: omitempty,
keyAsInt: keyasint,
tagged: tagged})
} else {
if nTypes == nil {
nTypes = make(map[reflect.Type][][]int)
}
nTypes[ft] = append(nTypes[ft], fIdx)
}
}
return flds, nTypes
}
// isFieldExportable returns true if f is an exportable (regular or anonymous) field or
// a nonexportable anonymous field of struct type.
// Nonexportable anonymous field of struct type can contain exportable fields.
func isFieldExportable(f reflect.StructField, fk reflect.Kind) bool { //nolint:gocritic // ignore hugeParam
exportable := f.PkgPath == ""
return exportable || (f.Anonymous && fk == reflect.Struct)
}
type embeddedFieldNullPtrFunc func(reflect.Value) (reflect.Value, error)
// getFieldValue returns field value of struct v by index. When encountering null pointer
// to anonymous (embedded) struct field, f is called with the last traversed field value.
func getFieldValue(v reflect.Value, idx []int, f embeddedFieldNullPtrFunc) (fv reflect.Value, err error) {
fv = v
for i, n := range idx {
fv = fv.Field(n)
if i < len(idx)-1 {
if fv.Kind() == reflect.Ptr && fv.Type().Elem().Kind() == reflect.Struct {
if fv.IsNil() {
// Null pointer to embedded struct field
fv, err = f(fv)
if err != nil || !fv.IsValid() {
return fv, err
}
}
fv = fv.Elem()
}
}
}
return fv, nil
}

299
vendor/github.com/fxamacker/cbor/v2/tag.go generated vendored Normal file
View File

@ -0,0 +1,299 @@
package cbor
import (
"errors"
"fmt"
"reflect"
"sync"
)
// Tag represents CBOR tag data, including tag number and unmarshaled tag content. Marshaling and
// unmarshaling of tag content is subject to any encode and decode options that would apply to
// enclosed data item if it were to appear outside of a tag.
type Tag struct {
Number uint64
Content interface{}
}
// RawTag represents CBOR tag data, including tag number and raw tag content.
// RawTag implements Unmarshaler and Marshaler interfaces.
type RawTag struct {
Number uint64
Content RawMessage
}
// UnmarshalCBOR sets *t with tag number and raw tag content copied from data.
func (t *RawTag) UnmarshalCBOR(data []byte) error {
if t == nil {
return errors.New("cbor.RawTag: UnmarshalCBOR on nil pointer")
}
// Decoding CBOR null and undefined to cbor.RawTag is no-op.
if len(data) == 1 && (data[0] == 0xf6 || data[0] == 0xf7) {
return nil
}
d := decoder{data: data, dm: defaultDecMode}
// Unmarshal tag number.
typ, _, num := d.getHead()
if typ != cborTypeTag {
return &UnmarshalTypeError{CBORType: typ.String(), GoType: typeRawTag.String()}
}
t.Number = num
// Unmarshal tag content.
c := d.data[d.off:]
t.Content = make([]byte, len(c))
copy(t.Content, c)
return nil
}
// MarshalCBOR returns CBOR encoding of t.
func (t RawTag) MarshalCBOR() ([]byte, error) {
if t.Number == 0 && len(t.Content) == 0 {
// Marshal uninitialized cbor.RawTag
b := make([]byte, len(cborNil))
copy(b, cborNil)
return b, nil
}
e := getEncodeBuffer()
encodeHead(e, byte(cborTypeTag), t.Number)
content := t.Content
if len(content) == 0 {
content = cborNil
}
buf := make([]byte, len(e.Bytes())+len(content))
n := copy(buf, e.Bytes())
copy(buf[n:], content)
putEncodeBuffer(e)
return buf, nil
}
// DecTagMode specifies how decoder handles tag number.
type DecTagMode int
const (
// DecTagIgnored makes decoder ignore tag number (skips if present).
DecTagIgnored DecTagMode = iota
// DecTagOptional makes decoder verify tag number if it's present.
DecTagOptional
// DecTagRequired makes decoder verify tag number and tag number must be present.
DecTagRequired
maxDecTagMode
)
func (dtm DecTagMode) valid() bool {
return dtm >= 0 && dtm < maxDecTagMode
}
// EncTagMode specifies how encoder handles tag number.
type EncTagMode int
const (
// EncTagNone makes encoder not encode tag number.
EncTagNone EncTagMode = iota
// EncTagRequired makes encoder encode tag number.
EncTagRequired
maxEncTagMode
)
func (etm EncTagMode) valid() bool {
return etm >= 0 && etm < maxEncTagMode
}
// TagOptions specifies how encoder and decoder handle tag number.
type TagOptions struct {
DecTag DecTagMode
EncTag EncTagMode
}
// TagSet is an interface to add and remove tag info. It is used by EncMode and DecMode
// to provide CBOR tag support.
type TagSet interface {
// Add adds given tag number(s), content type, and tag options to TagSet.
Add(opts TagOptions, contentType reflect.Type, num uint64, nestedNum ...uint64) error
// Remove removes given tag content type from TagSet.
Remove(contentType reflect.Type)
tagProvider
}
type tagProvider interface {
getTagItemFromType(t reflect.Type) *tagItem
getTypeFromTagNum(num []uint64) reflect.Type
}
type tagItem struct {
num []uint64
cborTagNum []byte
contentType reflect.Type
opts TagOptions
}
func (t *tagItem) equalTagNum(num []uint64) bool {
// Fast path to compare 1 tag number
if len(t.num) == 1 && len(num) == 1 && t.num[0] == num[0] {
return true
}
if len(t.num) != len(num) {
return false
}
for i := 0; i < len(t.num); i++ {
if t.num[i] != num[i] {
return false
}
}
return true
}
type (
tagSet map[reflect.Type]*tagItem
syncTagSet struct {
sync.RWMutex
t tagSet
}
)
func (t tagSet) getTagItemFromType(typ reflect.Type) *tagItem {
return t[typ]
}
func (t tagSet) getTypeFromTagNum(num []uint64) reflect.Type {
for typ, tag := range t {
if tag.equalTagNum(num) {
return typ
}
}
return nil
}
// NewTagSet returns TagSet (safe for concurrency).
func NewTagSet() TagSet {
return &syncTagSet{t: make(map[reflect.Type]*tagItem)}
}
// Add adds given tag number(s), content type, and tag options to TagSet.
func (t *syncTagSet) Add(opts TagOptions, contentType reflect.Type, num uint64, nestedNum ...uint64) error {
if contentType == nil {
return errors.New("cbor: cannot add nil content type to TagSet")
}
for contentType.Kind() == reflect.Ptr {
contentType = contentType.Elem()
}
tag, err := newTagItem(opts, contentType, num, nestedNum...)
if err != nil {
return err
}
t.Lock()
defer t.Unlock()
for typ, ti := range t.t {
if typ == contentType {
return errors.New("cbor: content type " + contentType.String() + " already exists in TagSet")
}
if ti.equalTagNum(tag.num) {
return fmt.Errorf("cbor: tag number %v already exists in TagSet", tag.num)
}
}
t.t[contentType] = tag
return nil
}
// Remove removes given tag content type from TagSet.
func (t *syncTagSet) Remove(contentType reflect.Type) {
for contentType.Kind() == reflect.Ptr {
contentType = contentType.Elem()
}
t.Lock()
delete(t.t, contentType)
t.Unlock()
}
func (t *syncTagSet) getTagItemFromType(typ reflect.Type) *tagItem {
t.RLock()
ti := t.t[typ]
t.RUnlock()
return ti
}
func (t *syncTagSet) getTypeFromTagNum(num []uint64) reflect.Type {
t.RLock()
rt := t.t.getTypeFromTagNum(num)
t.RUnlock()
return rt
}
func newTagItem(opts TagOptions, contentType reflect.Type, num uint64, nestedNum ...uint64) (*tagItem, error) {
if opts.DecTag == DecTagIgnored && opts.EncTag == EncTagNone {
return nil, errors.New("cbor: cannot add tag with DecTagIgnored and EncTagNone options to TagSet")
}
if contentType.PkgPath() == "" || contentType.Kind() == reflect.Interface {
return nil, errors.New("cbor: can only add named types to TagSet, got " + contentType.String())
}
if contentType == typeTime {
return nil, errors.New("cbor: cannot add time.Time to TagSet, use EncOptions.TimeTag and DecOptions.TimeTag instead")
}
if contentType == typeBigInt {
return nil, errors.New("cbor: cannot add big.Int to TagSet, it's built-in and supported automatically")
}
if contentType == typeTag {
return nil, errors.New("cbor: cannot add cbor.Tag to TagSet")
}
if contentType == typeRawTag {
return nil, errors.New("cbor: cannot add cbor.RawTag to TagSet")
}
if num == 0 || num == 1 {
return nil, errors.New("cbor: cannot add tag number 0 or 1 to TagSet, use EncOptions.TimeTag and DecOptions.TimeTag instead")
}
if num == 2 || num == 3 {
return nil, errors.New("cbor: cannot add tag number 2 or 3 to TagSet, it's built-in and supported automatically")
}
if num == tagNumSelfDescribedCBOR {
return nil, errors.New("cbor: cannot add tag number 55799 to TagSet, it's built-in and ignored automatically")
}
te := tagItem{num: []uint64{num}, opts: opts, contentType: contentType}
te.num = append(te.num, nestedNum...)
// Cache encoded tag numbers
e := getEncodeBuffer()
for _, n := range te.num {
encodeHead(e, byte(cborTypeTag), n)
}
te.cborTagNum = make([]byte, e.Len())
copy(te.cborTagNum, e.Bytes())
putEncodeBuffer(e)
return &te, nil
}
var (
typeTag = reflect.TypeOf(Tag{})
typeRawTag = reflect.TypeOf(RawTag{})
)
// WrongTagError describes mismatch between CBOR tag and registered tag.
type WrongTagError struct {
RegisteredType reflect.Type
RegisteredTagNum []uint64
TagNum []uint64
}
func (e *WrongTagError) Error() string {
return fmt.Sprintf("cbor: wrong tag number for %s, got %v, expected %v", e.RegisteredType.String(), e.TagNum, e.RegisteredTagNum)
}

394
vendor/github.com/fxamacker/cbor/v2/valid.go generated vendored Normal file
View File

@ -0,0 +1,394 @@
// Copyright (c) Faye Amacker. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
package cbor
import (
"encoding/binary"
"errors"
"io"
"math"
"strconv"
"github.com/x448/float16"
)
// SyntaxError is a description of a CBOR syntax error.
type SyntaxError struct {
msg string
}
func (e *SyntaxError) Error() string { return e.msg }
// SemanticError is a description of a CBOR semantic error.
type SemanticError struct {
msg string
}
func (e *SemanticError) Error() string { return e.msg }
// MaxNestedLevelError indicates exceeded max nested level of any combination of CBOR arrays/maps/tags.
type MaxNestedLevelError struct {
maxNestedLevels int
}
func (e *MaxNestedLevelError) Error() string {
return "cbor: exceeded max nested level " + strconv.Itoa(e.maxNestedLevels)
}
// MaxArrayElementsError indicates exceeded max number of elements for CBOR arrays.
type MaxArrayElementsError struct {
maxArrayElements int
}
func (e *MaxArrayElementsError) Error() string {
return "cbor: exceeded max number of elements " + strconv.Itoa(e.maxArrayElements) + " for CBOR array"
}
// MaxMapPairsError indicates exceeded max number of key-value pairs for CBOR maps.
type MaxMapPairsError struct {
maxMapPairs int
}
func (e *MaxMapPairsError) Error() string {
return "cbor: exceeded max number of key-value pairs " + strconv.Itoa(e.maxMapPairs) + " for CBOR map"
}
// IndefiniteLengthError indicates found disallowed indefinite length items.
type IndefiniteLengthError struct {
t cborType
}
func (e *IndefiniteLengthError) Error() string {
return "cbor: indefinite-length " + e.t.String() + " isn't allowed"
}
// TagsMdError indicates found disallowed CBOR tags.
type TagsMdError struct {
}
func (e *TagsMdError) Error() string {
return "cbor: CBOR tag isn't allowed"
}
// ExtraneousDataError indicates found extraneous data following well-formed CBOR data item.
type ExtraneousDataError struct {
numOfBytes int // number of bytes of extraneous data
index int // location of extraneous data
}
func (e *ExtraneousDataError) Error() string {
return "cbor: " + strconv.Itoa(e.numOfBytes) + " bytes of extraneous data starting at index " + strconv.Itoa(e.index)
}
// wellformed checks whether the CBOR data item is well-formed.
// allowExtraData indicates if extraneous data is allowed after the CBOR data item.
// - use allowExtraData = true when using Decoder.Decode()
// - use allowExtraData = false when using Unmarshal()
func (d *decoder) wellformed(allowExtraData bool, checkBuiltinTags bool) error {
if len(d.data) == d.off {
return io.EOF
}
_, err := d.wellformedInternal(0, checkBuiltinTags)
if err == nil {
if !allowExtraData && d.off != len(d.data) {
err = &ExtraneousDataError{len(d.data) - d.off, d.off}
}
}
return err
}
// wellformedInternal checks data's well-formedness and returns max depth and error.
func (d *decoder) wellformedInternal(depth int, checkBuiltinTags bool) (int, error) { //nolint:gocyclo
t, _, val, indefiniteLength, err := d.wellformedHeadWithIndefiniteLengthFlag()
if err != nil {
return 0, err
}
switch t {
case cborTypeByteString, cborTypeTextString:
if indefiniteLength {
if d.dm.indefLength == IndefLengthForbidden {
return 0, &IndefiniteLengthError{t}
}
return d.wellformedIndefiniteString(t, depth, checkBuiltinTags)
}
valInt := int(val)
if valInt < 0 {
// Detect integer overflow
return 0, errors.New("cbor: " + t.String() + " length " + strconv.FormatUint(val, 10) + " is too large, causing integer overflow")
}
if len(d.data)-d.off < valInt { // valInt+off may overflow integer
return 0, io.ErrUnexpectedEOF
}
d.off += valInt
case cborTypeArray, cborTypeMap:
depth++
if depth > d.dm.maxNestedLevels {
return 0, &MaxNestedLevelError{d.dm.maxNestedLevels}
}
if indefiniteLength {
if d.dm.indefLength == IndefLengthForbidden {
return 0, &IndefiniteLengthError{t}
}
return d.wellformedIndefiniteArrayOrMap(t, depth, checkBuiltinTags)
}
valInt := int(val)
if valInt < 0 {
// Detect integer overflow
return 0, errors.New("cbor: " + t.String() + " length " + strconv.FormatUint(val, 10) + " is too large, it would cause integer overflow")
}
if t == cborTypeArray {
if valInt > d.dm.maxArrayElements {
return 0, &MaxArrayElementsError{d.dm.maxArrayElements}
}
} else {
if valInt > d.dm.maxMapPairs {
return 0, &MaxMapPairsError{d.dm.maxMapPairs}
}
}
count := 1
if t == cborTypeMap {
count = 2
}
maxDepth := depth
for j := 0; j < count; j++ {
for i := 0; i < valInt; i++ {
var dpt int
if dpt, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil {
return 0, err
}
if dpt > maxDepth {
maxDepth = dpt // Save max depth
}
}
}
depth = maxDepth
case cborTypeTag:
if d.dm.tagsMd == TagsForbidden {
return 0, &TagsMdError{}
}
tagNum := val
// Scan nested tag numbers to avoid recursion.
for {
if len(d.data) == d.off { // Tag number must be followed by tag content.
return 0, io.ErrUnexpectedEOF
}
if checkBuiltinTags {
err = validBuiltinTag(tagNum, d.data[d.off])
if err != nil {
return 0, err
}
}
if d.dm.bignumTag == BignumTagForbidden && (tagNum == 2 || tagNum == 3) {
return 0, &UnacceptableDataItemError{
CBORType: cborTypeTag.String(),
Message: "bignum",
}
}
if getType(d.data[d.off]) != cborTypeTag {
break
}
if _, _, tagNum, err = d.wellformedHead(); err != nil {
return 0, err
}
depth++
if depth > d.dm.maxNestedLevels {
return 0, &MaxNestedLevelError{d.dm.maxNestedLevels}
}
}
// Check tag content.
return d.wellformedInternal(depth, checkBuiltinTags)
}
return depth, nil
}
// wellformedIndefiniteString checks indefinite length byte/text string's well-formedness and returns max depth and error.
func (d *decoder) wellformedIndefiniteString(t cborType, depth int, checkBuiltinTags bool) (int, error) {
var err error
for {
if len(d.data) == d.off {
return 0, io.ErrUnexpectedEOF
}
if isBreakFlag(d.data[d.off]) {
d.off++
break
}
// Peek ahead to get next type and indefinite length status.
nt, ai := parseInitialByte(d.data[d.off])
if t != nt {
return 0, &SyntaxError{"cbor: wrong element type " + nt.String() + " for indefinite-length " + t.String()}
}
if additionalInformation(ai).isIndefiniteLength() {
return 0, &SyntaxError{"cbor: indefinite-length " + t.String() + " chunk is not definite-length"}
}
if depth, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil {
return 0, err
}
}
return depth, nil
}
// wellformedIndefiniteArrayOrMap checks indefinite length array/map's well-formedness and returns max depth and error.
func (d *decoder) wellformedIndefiniteArrayOrMap(t cborType, depth int, checkBuiltinTags bool) (int, error) {
var err error
maxDepth := depth
i := 0
for {
if len(d.data) == d.off {
return 0, io.ErrUnexpectedEOF
}
if isBreakFlag(d.data[d.off]) {
d.off++
break
}
var dpt int
if dpt, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil {
return 0, err
}
if dpt > maxDepth {
maxDepth = dpt
}
i++
if t == cborTypeArray {
if i > d.dm.maxArrayElements {
return 0, &MaxArrayElementsError{d.dm.maxArrayElements}
}
} else {
if i%2 == 0 && i/2 > d.dm.maxMapPairs {
return 0, &MaxMapPairsError{d.dm.maxMapPairs}
}
}
}
if t == cborTypeMap && i%2 == 1 {
return 0, &SyntaxError{"cbor: unexpected \"break\" code"}
}
return maxDepth, nil
}
func (d *decoder) wellformedHeadWithIndefiniteLengthFlag() (
t cborType,
ai byte,
val uint64,
indefiniteLength bool,
err error,
) {
t, ai, val, err = d.wellformedHead()
if err != nil {
return
}
indefiniteLength = additionalInformation(ai).isIndefiniteLength()
return
}
func (d *decoder) wellformedHead() (t cborType, ai byte, val uint64, err error) {
dataLen := len(d.data) - d.off
if dataLen == 0 {
return 0, 0, 0, io.ErrUnexpectedEOF
}
t, ai = parseInitialByte(d.data[d.off])
val = uint64(ai)
d.off++
dataLen--
if ai <= maxAdditionalInformationWithoutArgument {
return t, ai, val, nil
}
if ai == additionalInformationWith1ByteArgument {
const argumentSize = 1
if dataLen < argumentSize {
return 0, 0, 0, io.ErrUnexpectedEOF
}
val = uint64(d.data[d.off])
d.off++
if t == cborTypePrimitives && val < 32 {
return 0, 0, 0, &SyntaxError{"cbor: invalid simple value " + strconv.Itoa(int(val)) + " for type " + t.String()}
}
return t, ai, val, nil
}
if ai == additionalInformationWith2ByteArgument {
const argumentSize = 2
if dataLen < argumentSize {
return 0, 0, 0, io.ErrUnexpectedEOF
}
val = uint64(binary.BigEndian.Uint16(d.data[d.off : d.off+argumentSize]))
d.off += argumentSize
if t == cborTypePrimitives {
if err := d.acceptableFloat(float64(float16.Frombits(uint16(val)).Float32())); err != nil {
return 0, 0, 0, err
}
}
return t, ai, val, nil
}
if ai == additionalInformationWith4ByteArgument {
const argumentSize = 4
if dataLen < argumentSize {
return 0, 0, 0, io.ErrUnexpectedEOF
}
val = uint64(binary.BigEndian.Uint32(d.data[d.off : d.off+argumentSize]))
d.off += argumentSize
if t == cborTypePrimitives {
if err := d.acceptableFloat(float64(math.Float32frombits(uint32(val)))); err != nil {
return 0, 0, 0, err
}
}
return t, ai, val, nil
}
if ai == additionalInformationWith8ByteArgument {
const argumentSize = 8
if dataLen < argumentSize {
return 0, 0, 0, io.ErrUnexpectedEOF
}
val = binary.BigEndian.Uint64(d.data[d.off : d.off+argumentSize])
d.off += argumentSize
if t == cborTypePrimitives {
if err := d.acceptableFloat(math.Float64frombits(val)); err != nil {
return 0, 0, 0, err
}
}
return t, ai, val, nil
}
if additionalInformation(ai).isIndefiniteLength() {
switch t {
case cborTypePositiveInt, cborTypeNegativeInt, cborTypeTag:
return 0, 0, 0, &SyntaxError{"cbor: invalid additional information " + strconv.Itoa(int(ai)) + " for type " + t.String()}
case cborTypePrimitives: // 0xff (break code) should not be outside wellformedIndefinite().
return 0, 0, 0, &SyntaxError{"cbor: unexpected \"break\" code"}
}
return t, ai, val, nil
}
// ai == 28, 29, 30
return 0, 0, 0, &SyntaxError{"cbor: invalid additional information " + strconv.Itoa(int(ai)) + " for type " + t.String()}
}
func (d *decoder) acceptableFloat(f float64) error {
switch {
case d.dm.nanDec == NaNDecodeForbidden && math.IsNaN(f):
return &UnacceptableDataItemError{
CBORType: cborTypePrimitives.String(),
Message: "floating-point NaN",
}
case d.dm.infDec == InfDecodeForbidden && math.IsInf(f, 0):
return &UnacceptableDataItemError{
CBORType: cborTypePrimitives.String(),
Message: "floating-point infinity",
}
}
return nil
}

13
vendor/github.com/x448/float16/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,13 @@
language: go
go:
- 1.11.x
env:
- GO111MODULE=on
script:
- go test -short -coverprofile=coverage.txt -covermode=count ./...
after_success:
- bash <(curl -s https://codecov.io/bash)

22
vendor/github.com/x448/float16/LICENSE generated vendored Normal file
View File

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2019 Montgomery Edwards⁴⁴⁸ and Faye Amacker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

133
vendor/github.com/x448/float16/README.md generated vendored Normal file
View File

@ -0,0 +1,133 @@
# Float16 (Binary16) in Go/Golang
[![Build Status](https://travis-ci.org/x448/float16.svg?branch=master)](https://travis-ci.org/x448/float16)
[![codecov](https://codecov.io/gh/x448/float16/branch/master/graph/badge.svg?v=4)](https://codecov.io/gh/x448/float16)
[![Go Report Card](https://goreportcard.com/badge/github.com/x448/float16)](https://goreportcard.com/report/github.com/x448/float16)
[![Release](https://img.shields.io/github/release/x448/float16.svg?style=flat-square)](https://github.com/x448/float16/releases)
[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/x448/float16/master/LICENSE)
`float16` package provides [IEEE 754 half-precision floating-point format (binary16)](https://en.wikipedia.org/wiki/Half-precision_floating-point_format) with IEEE 754 default rounding for conversions. IEEE 754-2008 refers to this 16-bit floating-point format as binary16.
IEEE 754 default rounding ("Round-to-Nearest RoundTiesToEven") is considered the most accurate and statistically unbiased estimate of the true result.
All possible 4+ billion floating-point conversions with this library are verified to be correct.
Lowercase "float16" refers to IEEE 754 binary16. And capitalized "Float16" refers to exported Go data type provided by this library.
## Features
Current features include:
* float16 to float32 conversions use lossless conversion.
* float32 to float16 conversions use IEEE 754-2008 "Round-to-Nearest RoundTiesToEven".
* conversions using pure Go take about 2.65 ns/op on a desktop amd64.
* unit tests provide 100% code coverage and check all possible 4+ billion conversions.
* other functions include: IsInf(), IsNaN(), IsNormal(), PrecisionFromfloat32(), String(), etc.
* all functions in this library use zero allocs except String().
## Status
This library is used by [fxamacker/cbor](https://github.com/fxamacker/cbor) and is ready for production use on supported platforms. The version number < 1.0 indicates more functions and options are planned but not yet published.
Current status:
* core API is done and breaking API changes are unlikely.
* 100% of unit tests pass:
* short mode (`go test -short`) tests around 65765 conversions in 0.005s.
* normal mode (`go test`) tests all possible 4+ billion conversions in about 95s.
* 100% code coverage with both short mode and normal mode.
* tested on amd64 but it should work on all little-endian platforms supported by Go.
Roadmap:
* add functions for fast batch conversions leveraging SIMD when supported by hardware.
* speed up unit test when verifying all possible 4+ billion conversions.
* test on additional platforms.
## Float16 to Float32 Conversion
Conversions from float16 to float32 are lossless conversions. All 65536 possible float16 to float32 conversions (in pure Go) are confirmed to be correct.
Unit tests take a fraction of a second to check all 65536 expected values for float16 to float32 conversions.
## Float32 to Float16 Conversion
Conversions from float32 to float16 use IEEE 754 default rounding ("Round-to-Nearest RoundTiesToEven"). All 4294967296 possible float32 to float16 conversions (in pure Go) are confirmed to be correct.
Unit tests in normal mode take about 1-2 minutes to check all 4+ billion float32 input values and results for Fromfloat32(), FromNaN32ps(), and PrecisionFromfloat32().
Unit tests in short mode use a small subset (around 229 float32 inputs) and finish in under 0.01 second while still reaching 100% code coverage.
## Usage
Install with `go get github.com/x448/float16`.
```
// Convert float32 to float16
pi := float32(math.Pi)
pi16 := float16.Fromfloat32(pi)
// Convert float16 to float32
pi32 := pi16.Float32()
// PrecisionFromfloat32() is faster than the overhead of calling a function.
// This example only converts if there's no data loss and input is not a subnormal.
if float16.PrecisionFromfloat32(pi) == float16.PrecisionExact {
pi16 := float16.Fromfloat32(pi)
}
```
## Float16 Type and API
Float16 (capitalized) is a Go type with uint16 as the underlying state. There are 6 exported functions and 9 exported methods.
```
package float16 // import "github.com/x448/float16"
// Exported types and consts
type Float16 uint16
const ErrInvalidNaNValue = float16Error("float16: invalid NaN value, expected IEEE 754 NaN")
// Exported functions
Fromfloat32(f32 float32) Float16 // Float16 number converted from f32 using IEEE 754 default rounding
with identical results to AMD and Intel F16C hardware. NaN inputs
are converted with quiet bit always set on, to be like F16C.
FromNaN32ps(nan float32) (Float16, error) // Float16 NaN without modifying quiet bit.
// The "ps" suffix means "preserve signaling".
// Returns sNaN and ErrInvalidNaNValue if nan isn't a NaN.
Frombits(b16 uint16) Float16 // Float16 number corresponding to b16 (IEEE 754 binary16 rep.)
NaN() Float16 // Float16 of IEEE 754 binary16 not-a-number
Inf(sign int) Float16 // Float16 of IEEE 754 binary16 infinity according to sign
PrecisionFromfloat32(f32 float32) Precision // quickly indicates exact, ..., overflow, underflow
// (inline and < 1 ns/op)
// Exported methods
(f Float16) Float32() float32 // float32 number converted from f16 using lossless conversion
(f Float16) Bits() uint16 // the IEEE 754 binary16 representation of f
(f Float16) IsNaN() bool // true if f is not-a-number (NaN)
(f Float16) IsQuietNaN() bool // true if f is a quiet not-a-number (NaN)
(f Float16) IsInf(sign int) bool // true if f is infinite based on sign (-1=NegInf, 0=any, 1=PosInf)
(f Float16) IsFinite() bool // true if f is not infinite or NaN
(f Float16) IsNormal() bool // true if f is not zero, infinite, subnormal, or NaN.
(f Float16) Signbit() bool // true if f is negative or negative zero
(f Float16) String() string // string representation of f to satisfy fmt.Stringer interface
```
See [API](https://godoc.org/github.com/x448/float16) at godoc.org for more info.
## Benchmarks
Conversions (in pure Go) are around 2.65 ns/op for float16 -> float32 and float32 -> float16 on amd64. Speeds can vary depending on input value.
```
All functions have zero allocations except float16.String().
FromFloat32pi-2 2.59ns ± 0% // speed using Fromfloat32() to convert a float32 of math.Pi to Float16
ToFloat32pi-2 2.69ns ± 0% // speed using Float32() to convert a float16 of math.Pi to float32
Frombits-2 0.29ns ± 5% // speed using Frombits() to cast a uint16 to Float16
PrecisionFromFloat32-2 0.29ns ± 1% // speed using PrecisionFromfloat32() to check for overflows, etc.
```
## System Requirements
* Tested on Go 1.11, 1.12, and 1.13 but it should also work with older versions.
* Tested on amd64 but it should also work on all little-endian platforms supported by Go.
## Special Thanks
Special thanks to Kathryn Long (starkat99) for creating [half-rs](https://github.com/starkat99/half-rs), a very nice rust implementation of float16.
## License
Copyright (c) 2019 Montgomery Edwards⁴⁴⁸ and Faye Amacker
Licensed under [MIT License](LICENSE)

302
vendor/github.com/x448/float16/float16.go generated vendored Normal file
View File

@ -0,0 +1,302 @@
// Copyright 2019 Montgomery Edwards⁴⁴⁸ and Faye Amacker
//
// Special thanks to Kathryn Long for her Rust implementation
// of float16 at github.com/starkat99/half-rs (MIT license)
package float16
import (
"math"
"strconv"
)
// Float16 represents IEEE 754 half-precision floating-point numbers (binary16).
type Float16 uint16
// Precision indicates whether the conversion to Float16 is
// exact, subnormal without dropped bits, inexact, underflow, or overflow.
type Precision int
const (
// PrecisionExact is for non-subnormals that don't drop bits during conversion.
// All of these can round-trip. Should always convert to float16.
PrecisionExact Precision = iota
// PrecisionUnknown is for subnormals that don't drop bits during conversion but
// not all of these can round-trip so precision is unknown without more effort.
// Only 2046 of these can round-trip and the rest cannot round-trip.
PrecisionUnknown
// PrecisionInexact is for dropped significand bits and cannot round-trip.
// Some of these are subnormals. Cannot round-trip float32->float16->float32.
PrecisionInexact
// PrecisionUnderflow is for Underflows. Cannot round-trip float32->float16->float32.
PrecisionUnderflow
// PrecisionOverflow is for Overflows. Cannot round-trip float32->float16->float32.
PrecisionOverflow
)
// PrecisionFromfloat32 returns Precision without performing
// the conversion. Conversions from both Infinity and NaN
// values will always report PrecisionExact even if NaN payload
// or NaN-Quiet-Bit is lost. This function is kept simple to
// allow inlining and run < 0.5 ns/op, to serve as a fast filter.
func PrecisionFromfloat32(f32 float32) Precision {
u32 := math.Float32bits(f32)
if u32 == 0 || u32 == 0x80000000 {
// +- zero will always be exact conversion
return PrecisionExact
}
const COEFMASK uint32 = 0x7fffff // 23 least significant bits
const EXPSHIFT uint32 = 23
const EXPBIAS uint32 = 127
const EXPMASK uint32 = uint32(0xff) << EXPSHIFT
const DROPMASK uint32 = COEFMASK >> 10
exp := int32(((u32 & EXPMASK) >> EXPSHIFT) - EXPBIAS)
coef := u32 & COEFMASK
if exp == 128 {
// +- infinity or NaN
// apps may want to do extra checks for NaN separately
return PrecisionExact
}
// https://en.wikipedia.org/wiki/Half-precision_floating-point_format says,
// "Decimals between 2^24 (minimum positive subnormal) and 2^14 (maximum subnormal): fixed interval 2^24"
if exp < -24 {
return PrecisionUnderflow
}
if exp > 15 {
return PrecisionOverflow
}
if (coef & DROPMASK) != uint32(0) {
// these include subnormals and non-subnormals that dropped bits
return PrecisionInexact
}
if exp < -14 {
// Subnormals. Caller may want to test these further.
// There are 2046 subnormals that can successfully round-trip f32->f16->f32
// and 20 of those 2046 have 32-bit input coef == 0.
// RFC 7049 and 7049bis Draft 12 don't precisely define "preserves value"
// so some protocols and libraries will choose to handle subnormals differently
// when deciding to encode them to CBOR float32 vs float16.
return PrecisionUnknown
}
return PrecisionExact
}
// Frombits returns the float16 number corresponding to the IEEE 754 binary16
// representation u16, with the sign bit of u16 and the result in the same bit
// position. Frombits(Bits(x)) == x.
func Frombits(u16 uint16) Float16 {
return Float16(u16)
}
// Fromfloat32 returns a Float16 value converted from f32. Conversion uses
// IEEE default rounding (nearest int, with ties to even).
func Fromfloat32(f32 float32) Float16 {
return Float16(f32bitsToF16bits(math.Float32bits(f32)))
}
// ErrInvalidNaNValue indicates a NaN was not received.
const ErrInvalidNaNValue = float16Error("float16: invalid NaN value, expected IEEE 754 NaN")
type float16Error string
func (e float16Error) Error() string { return string(e) }
// FromNaN32ps converts nan to IEEE binary16 NaN while preserving both
// signaling and payload. Unlike Fromfloat32(), which can only return
// qNaN because it sets quiet bit = 1, this can return both sNaN and qNaN.
// If the result is infinity (sNaN with empty payload), then the
// lowest bit of payload is set to make the result a NaN.
// Returns ErrInvalidNaNValue and 0x7c01 (sNaN) if nan isn't IEEE 754 NaN.
// This function was kept simple to be able to inline.
func FromNaN32ps(nan float32) (Float16, error) {
const SNAN = Float16(uint16(0x7c01)) // signalling NaN
u32 := math.Float32bits(nan)
sign := u32 & 0x80000000
exp := u32 & 0x7f800000
coef := u32 & 0x007fffff
if (exp != 0x7f800000) || (coef == 0) {
return SNAN, ErrInvalidNaNValue
}
u16 := uint16((sign >> 16) | uint32(0x7c00) | (coef >> 13))
if (u16 & 0x03ff) == 0 {
// result became infinity, make it NaN by setting lowest bit in payload
u16 = u16 | 0x0001
}
return Float16(u16), nil
}
// NaN returns a Float16 of IEEE 754 binary16 not-a-number (NaN).
// Returned NaN value 0x7e01 has all exponent bits = 1 with the
// first and last bits = 1 in the significand. This is consistent
// with Go's 64-bit math.NaN(). Canonical CBOR in RFC 7049 uses 0x7e00.
func NaN() Float16 {
return Float16(0x7e01)
}
// Inf returns a Float16 with an infinity value with the specified sign.
// A sign >= returns positive infinity.
// A sign < 0 returns negative infinity.
func Inf(sign int) Float16 {
if sign >= 0 {
return Float16(0x7c00)
}
return Float16(0x8000 | 0x7c00)
}
// Float32 returns a float32 converted from f (Float16).
// This is a lossless conversion.
func (f Float16) Float32() float32 {
u32 := f16bitsToF32bits(uint16(f))
return math.Float32frombits(u32)
}
// Bits returns the IEEE 754 binary16 representation of f, with the sign bit
// of f and the result in the same bit position. Bits(Frombits(x)) == x.
func (f Float16) Bits() uint16 {
return uint16(f)
}
// IsNaN reports whether f is an IEEE 754 binary16 “not-a-number” value.
func (f Float16) IsNaN() bool {
return (f&0x7c00 == 0x7c00) && (f&0x03ff != 0)
}
// IsQuietNaN reports whether f is a quiet (non-signaling) IEEE 754 binary16
// “not-a-number” value.
func (f Float16) IsQuietNaN() bool {
return (f&0x7c00 == 0x7c00) && (f&0x03ff != 0) && (f&0x0200 != 0)
}
// IsInf reports whether f is an infinity (inf).
// A sign > 0 reports whether f is positive inf.
// A sign < 0 reports whether f is negative inf.
// A sign == 0 reports whether f is either inf.
func (f Float16) IsInf(sign int) bool {
return ((f == 0x7c00) && sign >= 0) ||
(f == 0xfc00 && sign <= 0)
}
// IsFinite returns true if f is neither infinite nor NaN.
func (f Float16) IsFinite() bool {
return (uint16(f) & uint16(0x7c00)) != uint16(0x7c00)
}
// IsNormal returns true if f is neither zero, infinite, subnormal, or NaN.
func (f Float16) IsNormal() bool {
exp := uint16(f) & uint16(0x7c00)
return (exp != uint16(0x7c00)) && (exp != 0)
}
// Signbit reports whether f is negative or negative zero.
func (f Float16) Signbit() bool {
return (uint16(f) & uint16(0x8000)) != 0
}
// String satisfies the fmt.Stringer interface.
func (f Float16) String() string {
return strconv.FormatFloat(float64(f.Float32()), 'f', -1, 32)
}
// f16bitsToF32bits returns uint32 (float32 bits) converted from specified uint16.
func f16bitsToF32bits(in uint16) uint32 {
// All 65536 conversions with this were confirmed to be correct
// by Montgomery Edwards⁴⁴⁸ (github.com/x448).
sign := uint32(in&0x8000) << 16 // sign for 32-bit
exp := uint32(in&0x7c00) >> 10 // exponenent for 16-bit
coef := uint32(in&0x03ff) << 13 // significand for 32-bit
if exp == 0x1f {
if coef == 0 {
// infinity
return sign | 0x7f800000 | coef
}
// NaN
return sign | 0x7fc00000 | coef
}
if exp == 0 {
if coef == 0 {
// zero
return sign
}
// normalize subnormal numbers
exp++
for coef&0x7f800000 == 0 {
coef <<= 1
exp--
}
coef &= 0x007fffff
}
return sign | ((exp + (0x7f - 0xf)) << 23) | coef
}
// f32bitsToF16bits returns uint16 (Float16 bits) converted from the specified float32.
// Conversion rounds to nearest integer with ties to even.
func f32bitsToF16bits(u32 uint32) uint16 {
// Translated from Rust to Go by Montgomery Edwards⁴⁴⁸ (github.com/x448).
// All 4294967296 conversions with this were confirmed to be correct by x448.
// Original Rust implementation is by Kathryn Long (github.com/starkat99) with MIT license.
sign := u32 & 0x80000000
exp := u32 & 0x7f800000
coef := u32 & 0x007fffff
if exp == 0x7f800000 {
// NaN or Infinity
nanBit := uint32(0)
if coef != 0 {
nanBit = uint32(0x0200)
}
return uint16((sign >> 16) | uint32(0x7c00) | nanBit | (coef >> 13))
}
halfSign := sign >> 16
unbiasedExp := int32(exp>>23) - 127
halfExp := unbiasedExp + 15
if halfExp >= 0x1f {
return uint16(halfSign | uint32(0x7c00))
}
if halfExp <= 0 {
if 14-halfExp > 24 {
return uint16(halfSign)
}
coef := coef | uint32(0x00800000)
halfCoef := coef >> uint32(14-halfExp)
roundBit := uint32(1) << uint32(13-halfExp)
if (coef&roundBit) != 0 && (coef&(3*roundBit-1)) != 0 {
halfCoef++
}
return uint16(halfSign | halfCoef)
}
uHalfExp := uint32(halfExp) << 10
halfCoef := coef >> 13
roundBit := uint32(0x00001000)
if (coef&roundBit) != 0 && (coef&(3*roundBit-1)) != 0 {
return uint16((halfSign | uHalfExp | halfCoef) + 1)
}
return uint16(halfSign | uHalfExp | halfCoef)
}

View File

@ -60,7 +60,7 @@ func configFromServer(h1 *http.Server, h2 *Server) http2Config {
return conf
}
// configFromServer merges configuration settings from h2 and h2.t1.HTTP2
// configFromTransport merges configuration settings from h2 and h2.t1.HTTP2
// (the net/http Transport).
func configFromTransport(h2 *Transport) http2Config {
conf := http2Config{

View File

@ -13,7 +13,7 @@ func fillNetHTTPServerConfig(conf *http2Config, srv *http.Server) {
fillNetHTTPConfig(conf, srv.HTTP2)
}
// fillNetHTTPServerConfig sets fields in conf from tr.HTTP2.
// fillNetHTTPTransportConfig sets fields in conf from tr.HTTP2.
func fillNetHTTPTransportConfig(conf *http2Config, tr *http.Transport) {
fillNetHTTPConfig(conf, tr.HTTP2)
}

View File

@ -1490,7 +1490,7 @@ func (mh *MetaHeadersFrame) checkPseudos() error {
pf := mh.PseudoFields()
for i, hf := range pf {
switch hf.Name {
case ":method", ":path", ":scheme", ":authority":
case ":method", ":path", ":scheme", ":authority", ":protocol":
isRequest = true
case ":status":
isResponse = true
@ -1498,7 +1498,7 @@ func (mh *MetaHeadersFrame) checkPseudos() error {
return pseudoHeaderError(hf.Name)
}
// Check for duplicates.
// This would be a bad algorithm, but N is 4.
// This would be a bad algorithm, but N is 5.
// And this doesn't allocate.
for _, hf2 := range pf[:i] {
if hf.Name == hf2.Name {

View File

@ -38,6 +38,7 @@ var (
logFrameWrites bool
logFrameReads bool
inTests bool
disableExtendedConnectProtocol bool
)
func init() {
@ -50,6 +51,9 @@ func init() {
logFrameWrites = true
logFrameReads = true
}
if strings.Contains(e, "http2xconnect=0") {
disableExtendedConnectProtocol = true
}
}
const (
@ -141,6 +145,10 @@ func (s Setting) Valid() error {
if s.Val < 16384 || s.Val > 1<<24-1 {
return ConnectionError(ErrCodeProtocol)
}
case SettingEnableConnectProtocol:
if s.Val != 1 && s.Val != 0 {
return ConnectionError(ErrCodeProtocol)
}
}
return nil
}
@ -156,6 +164,7 @@ const (
SettingInitialWindowSize SettingID = 0x4
SettingMaxFrameSize SettingID = 0x5
SettingMaxHeaderListSize SettingID = 0x6
SettingEnableConnectProtocol SettingID = 0x8
)
var settingName = map[SettingID]string{
@ -165,6 +174,7 @@ var settingName = map[SettingID]string{
SettingInitialWindowSize: "INITIAL_WINDOW_SIZE",
SettingMaxFrameSize: "MAX_FRAME_SIZE",
SettingMaxHeaderListSize: "MAX_HEADER_LIST_SIZE",
SettingEnableConnectProtocol: "ENABLE_CONNECT_PROTOCOL",
}
func (s SettingID) String() string {

View File

@ -932,14 +932,18 @@ func (sc *serverConn) serve(conf http2Config) {
sc.vlogf("http2: server connection from %v on %p", sc.conn.RemoteAddr(), sc.hs)
}
sc.writeFrame(FrameWriteRequest{
write: writeSettings{
settings := writeSettings{
{SettingMaxFrameSize, conf.MaxReadFrameSize},
{SettingMaxConcurrentStreams, sc.advMaxStreams},
{SettingMaxHeaderListSize, sc.maxHeaderListSize()},
{SettingHeaderTableSize, conf.MaxDecoderHeaderTableSize},
{SettingInitialWindowSize, uint32(sc.initialStreamRecvWindowSize)},
},
}
if !disableExtendedConnectProtocol {
settings = append(settings, Setting{SettingEnableConnectProtocol, 1})
}
sc.writeFrame(FrameWriteRequest{
write: settings,
})
sc.unackedSettings++
@ -1801,6 +1805,9 @@ func (sc *serverConn) processSetting(s Setting) error {
sc.maxFrameSize = int32(s.Val) // the maximum valid s.Val is < 2^31
case SettingMaxHeaderListSize:
sc.peerMaxHeaderListSize = s.Val
case SettingEnableConnectProtocol:
// Receipt of this parameter by a server does not
// have any impact
default:
// Unknown setting: "An endpoint that receives a SETTINGS
// frame with any unknown or unsupported identifier MUST
@ -2231,11 +2238,17 @@ func (sc *serverConn) newWriterAndRequest(st *stream, f *MetaHeadersFrame) (*res
scheme: f.PseudoValue("scheme"),
authority: f.PseudoValue("authority"),
path: f.PseudoValue("path"),
protocol: f.PseudoValue("protocol"),
}
// extended connect is disabled, so we should not see :protocol
if disableExtendedConnectProtocol && rp.protocol != "" {
return nil, nil, sc.countError("bad_connect", streamError(f.StreamID, ErrCodeProtocol))
}
isConnect := rp.method == "CONNECT"
if isConnect {
if rp.path != "" || rp.scheme != "" || rp.authority == "" {
if rp.protocol == "" && (rp.path != "" || rp.scheme != "" || rp.authority == "") {
return nil, nil, sc.countError("bad_connect", streamError(f.StreamID, ErrCodeProtocol))
}
} else if rp.method == "" || rp.path == "" || (rp.scheme != "https" && rp.scheme != "http") {
@ -2259,6 +2272,9 @@ func (sc *serverConn) newWriterAndRequest(st *stream, f *MetaHeadersFrame) (*res
if rp.authority == "" {
rp.authority = rp.header.Get("Host")
}
if rp.protocol != "" {
rp.header.Set(":protocol", rp.protocol)
}
rw, req, err := sc.newWriterAndRequestNoBody(st, rp)
if err != nil {
@ -2285,6 +2301,7 @@ func (sc *serverConn) newWriterAndRequest(st *stream, f *MetaHeadersFrame) (*res
type requestParam struct {
method string
scheme, authority, path string
protocol string
header http.Header
}
@ -2326,7 +2343,7 @@ func (sc *serverConn) newWriterAndRequestNoBody(st *stream, rp requestParam) (*r
var url_ *url.URL
var requestURI string
if rp.method == "CONNECT" {
if rp.method == "CONNECT" && rp.protocol == "" {
url_ = &url.URL{Host: rp.authority}
requestURI = rp.authority // mimic HTTP/1 server behavior
} else {

View File

@ -375,7 +375,9 @@ type ClientConn struct {
doNotReuse bool // whether conn is marked to not be reused for any future requests
closing bool
closed bool
closedOnIdle bool // true if conn was closed for idleness
seenSettings bool // true if we've seen a settings frame, false otherwise
seenSettingsChan chan struct{} // closed when seenSettings is true or frame reading fails
wantSettingsAck bool // we sent a SETTINGS frame and haven't heard back
goAway *GoAwayFrame // if non-nil, the GoAwayFrame we received
goAwayDebug string // goAway frame's debug data, retained as a string
@ -396,6 +398,17 @@ type ClientConn struct {
initialStreamRecvWindowSize int32
readIdleTimeout time.Duration
pingTimeout time.Duration
extendedConnectAllowed bool
// rstStreamPingsBlocked works around an unfortunate gRPC behavior.
// gRPC strictly limits the number of PING frames that it will receive.
// The default is two pings per two hours, but the limit resets every time
// the gRPC endpoint sends a HEADERS or DATA frame. See golang/go#70575.
//
// rstStreamPingsBlocked is set after receiving a response to a PING frame
// bundled with an RST_STREAM (see pendingResets below), and cleared after
// receiving a HEADERS or DATA frame.
rstStreamPingsBlocked bool
// pendingResets is the number of RST_STREAM frames we have sent to the peer,
// without confirming that the peer has received them. When we send a RST_STREAM,
@ -819,6 +832,7 @@ func (t *Transport) newClientConn(c net.Conn, singleUse bool) (*ClientConn, erro
peerMaxHeaderListSize: 0xffffffffffffffff, // "infinite", per spec. Use 2^64-1 instead.
streams: make(map[uint32]*clientStream),
singleUse: singleUse,
seenSettingsChan: make(chan struct{}),
wantSettingsAck: true,
readIdleTimeout: conf.SendPingTimeout,
pingTimeout: conf.PingTimeout,
@ -1076,10 +1090,12 @@ func (cc *ClientConn) idleStateLocked() (st clientConnIdleState) {
// If this connection has never been used for a request and is closed,
// then let it take a request (which will fail).
// If the conn was closed for idleness, we're racing the idle timer;
// don't try to use the conn. (Issue #70515.)
//
// This avoids a situation where an error early in a connection's lifetime
// goes unreported.
if cc.nextStreamID == 1 && cc.streamsReserved == 0 && cc.closed {
if cc.nextStreamID == 1 && cc.streamsReserved == 0 && cc.closed && !cc.closedOnIdle {
st.canTakeNewRequest = true
}
@ -1142,6 +1158,7 @@ func (cc *ClientConn) closeIfIdle() {
return
}
cc.closed = true
cc.closedOnIdle = true
nextID := cc.nextStreamID
// TODO: do clients send GOAWAY too? maybe? Just Close:
cc.mu.Unlock()
@ -1466,6 +1483,8 @@ func (cs *clientStream) doRequest(req *http.Request, streamf func(*clientStream)
cs.cleanupWriteRequest(err)
}
var errExtendedConnectNotSupported = errors.New("net/http: extended connect not supported by peer")
// writeRequest sends a request.
//
// It returns nil after the request is written, the response read,
@ -1481,12 +1500,31 @@ func (cs *clientStream) writeRequest(req *http.Request, streamf func(*clientStre
return err
}
// wait for setting frames to be received, a server can change this value later,
// but we just wait for the first settings frame
var isExtendedConnect bool
if req.Method == "CONNECT" && req.Header.Get(":protocol") != "" {
isExtendedConnect = true
}
// Acquire the new-request lock by writing to reqHeaderMu.
// This lock guards the critical section covering allocating a new stream ID
// (requires mu) and creating the stream (requires wmu).
if cc.reqHeaderMu == nil {
panic("RoundTrip on uninitialized ClientConn") // for tests
}
if isExtendedConnect {
select {
case <-cs.reqCancel:
return errRequestCanceled
case <-ctx.Done():
return ctx.Err()
case <-cc.seenSettingsChan:
if !cc.extendedConnectAllowed {
return errExtendedConnectNotSupported
}
}
}
select {
case cc.reqHeaderMu <- struct{}{}:
case <-cs.reqCancel:
@ -1714,10 +1752,14 @@ func (cs *clientStream) cleanupWriteRequest(err error) {
ping := false
if !closeOnIdle {
cc.mu.Lock()
// rstStreamPingsBlocked works around a gRPC behavior:
// see comment on the field for details.
if !cc.rstStreamPingsBlocked {
if cc.pendingResets == 0 {
ping = true
}
cc.pendingResets++
}
cc.mu.Unlock()
}
cc.writeStreamReset(cs.ID, ErrCodeCancel, ping, err)
@ -2030,7 +2072,7 @@ func (cs *clientStream) awaitFlowControl(maxBytes int) (taken int32, err error)
func validateHeaders(hdrs http.Header) string {
for k, vv := range hdrs {
if !httpguts.ValidHeaderFieldName(k) {
if !httpguts.ValidHeaderFieldName(k) && k != ":protocol" {
return fmt.Sprintf("name %q", k)
}
for _, v := range vv {
@ -2046,6 +2088,10 @@ func validateHeaders(hdrs http.Header) string {
var errNilRequestURL = errors.New("http2: Request.URI is nil")
func isNormalConnect(req *http.Request) bool {
return req.Method == "CONNECT" && req.Header.Get(":protocol") == ""
}
// requires cc.wmu be held.
func (cc *ClientConn) encodeHeaders(req *http.Request, addGzipHeader bool, trailers string, contentLength int64) ([]byte, error) {
cc.hbuf.Reset()
@ -2066,7 +2112,7 @@ func (cc *ClientConn) encodeHeaders(req *http.Request, addGzipHeader bool, trail
}
var path string
if req.Method != "CONNECT" {
if !isNormalConnect(req) {
path = req.URL.RequestURI()
if !validPseudoPath(path) {
orig := path
@ -2103,7 +2149,7 @@ func (cc *ClientConn) encodeHeaders(req *http.Request, addGzipHeader bool, trail
m = http.MethodGet
}
f(":method", m)
if req.Method != "CONNECT" {
if !isNormalConnect(req) {
f(":path", path)
f(":scheme", req.URL.Scheme)
}
@ -2392,9 +2438,12 @@ func (rl *clientConnReadLoop) cleanup() {
// This avoids a situation where new connections are constantly created,
// added to the pool, fail, and are removed from the pool, without any error
// being surfaced to the user.
const unusedWaitTime = 5 * time.Second
unusedWaitTime := 5 * time.Second
if cc.idleTimeout > 0 && unusedWaitTime > cc.idleTimeout {
unusedWaitTime = cc.idleTimeout
}
idleTime := cc.t.now().Sub(cc.lastActive)
if atomic.LoadUint32(&cc.atomicReused) == 0 && idleTime < unusedWaitTime {
if atomic.LoadUint32(&cc.atomicReused) == 0 && idleTime < unusedWaitTime && !cc.closedOnIdle {
cc.idleTimer = cc.t.afterFunc(unusedWaitTime-idleTime, func() {
cc.t.connPool().MarkDead(cc)
})
@ -2461,7 +2510,7 @@ func (rl *clientConnReadLoop) run() error {
cc.vlogf("http2: Transport readFrame error on conn %p: (%T) %v", cc, err, err)
}
if se, ok := err.(StreamError); ok {
if cs := rl.streamByID(se.StreamID); cs != nil {
if cs := rl.streamByID(se.StreamID, notHeaderOrDataFrame); cs != nil {
if se.Cause == nil {
se.Cause = cc.fr.errDetail
}
@ -2507,13 +2556,16 @@ func (rl *clientConnReadLoop) run() error {
if VerboseLogs {
cc.vlogf("http2: Transport conn %p received error from processing frame %v: %v", cc, summarizeFrame(f), err)
}
if !cc.seenSettings {
close(cc.seenSettingsChan)
}
return err
}
}
}
func (rl *clientConnReadLoop) processHeaders(f *MetaHeadersFrame) error {
cs := rl.streamByID(f.StreamID)
cs := rl.streamByID(f.StreamID, headerOrDataFrame)
if cs == nil {
// We'd get here if we canceled a request while the
// server had its response still in flight. So if this
@ -2842,7 +2894,7 @@ func (b transportResponseBody) Close() error {
func (rl *clientConnReadLoop) processData(f *DataFrame) error {
cc := rl.cc
cs := rl.streamByID(f.StreamID)
cs := rl.streamByID(f.StreamID, headerOrDataFrame)
data := f.Data()
if cs == nil {
cc.mu.Lock()
@ -2977,9 +3029,22 @@ func (rl *clientConnReadLoop) endStreamError(cs *clientStream, err error) {
cs.abortStream(err)
}
func (rl *clientConnReadLoop) streamByID(id uint32) *clientStream {
// Constants passed to streamByID for documentation purposes.
const (
headerOrDataFrame = true
notHeaderOrDataFrame = false
)
// streamByID returns the stream with the given id, or nil if no stream has that id.
// If headerOrData is true, it clears rst.StreamPingsBlocked.
func (rl *clientConnReadLoop) streamByID(id uint32, headerOrData bool) *clientStream {
rl.cc.mu.Lock()
defer rl.cc.mu.Unlock()
if headerOrData {
// Work around an unfortunate gRPC behavior.
// See comment on ClientConn.rstStreamPingsBlocked for details.
rl.cc.rstStreamPingsBlocked = false
}
cs := rl.cc.streams[id]
if cs != nil && !cs.readAborted {
return cs
@ -3073,6 +3138,21 @@ func (rl *clientConnReadLoop) processSettingsNoWrite(f *SettingsFrame) error {
case SettingHeaderTableSize:
cc.henc.SetMaxDynamicTableSize(s.Val)
cc.peerMaxHeaderTableSize = s.Val
case SettingEnableConnectProtocol:
if err := s.Valid(); err != nil {
return err
}
// If the peer wants to send us SETTINGS_ENABLE_CONNECT_PROTOCOL,
// we require that it do so in the first SETTINGS frame.
//
// When we attempt to use extended CONNECT, we wait for the first
// SETTINGS frame to see if the server supports it. If we let the
// server enable the feature with a later SETTINGS frame, then
// users will see inconsistent results depending on whether we've
// seen that frame or not.
if !cc.seenSettings {
cc.extendedConnectAllowed = s.Val == 1
}
default:
cc.vlogf("Unhandled Setting: %v", s)
}
@ -3090,6 +3170,7 @@ func (rl *clientConnReadLoop) processSettingsNoWrite(f *SettingsFrame) error {
// connection can establish to our default.
cc.maxConcurrentStreams = defaultMaxConcurrentStreams
}
close(cc.seenSettingsChan)
cc.seenSettings = true
}
@ -3098,7 +3179,7 @@ func (rl *clientConnReadLoop) processSettingsNoWrite(f *SettingsFrame) error {
func (rl *clientConnReadLoop) processWindowUpdate(f *WindowUpdateFrame) error {
cc := rl.cc
cs := rl.streamByID(f.StreamID)
cs := rl.streamByID(f.StreamID, notHeaderOrDataFrame)
if f.StreamID != 0 && cs == nil {
return nil
}
@ -3127,7 +3208,7 @@ func (rl *clientConnReadLoop) processWindowUpdate(f *WindowUpdateFrame) error {
}
func (rl *clientConnReadLoop) processResetStream(f *RSTStreamFrame) error {
cs := rl.streamByID(f.StreamID)
cs := rl.streamByID(f.StreamID, notHeaderOrDataFrame)
if cs == nil {
// TODO: return error if server tries to RST_STREAM an idle stream
return nil
@ -3205,6 +3286,7 @@ func (rl *clientConnReadLoop) processPing(f *PingFrame) error {
if cc.pendingResets > 0 {
// See clientStream.cleanupWriteRequest.
cc.pendingResets = 0
cc.rstStreamPingsBlocked = true
cc.cond.Broadcast()
}
return nil

View File

@ -246,6 +246,18 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e
return sendfile(outfd, infd, offset, count)
}
func Dup3(oldfd, newfd, flags int) error {
if oldfd == newfd || flags&^O_CLOEXEC != 0 {
return EINVAL
}
how := F_DUP2FD
if flags&O_CLOEXEC != 0 {
how = F_DUP2FD_CLOEXEC
}
_, err := fcntl(oldfd, how, newfd)
return err
}
/*
* Exposed directly
*/

View File

@ -321,6 +321,9 @@ const (
AUDIT_INTEGRITY_STATUS = 0x70a
AUDIT_IPC = 0x517
AUDIT_IPC_SET_PERM = 0x51f
AUDIT_IPE_ACCESS = 0x58c
AUDIT_IPE_CONFIG_CHANGE = 0x58d
AUDIT_IPE_POLICY_LOAD = 0x58e
AUDIT_KERNEL = 0x7d0
AUDIT_KERNEL_OTHER = 0x524
AUDIT_KERN_MODULE = 0x532
@ -489,6 +492,7 @@ const (
BPF_F_ID = 0x20
BPF_F_NETFILTER_IP_DEFRAG = 0x1
BPF_F_QUERY_EFFECTIVE = 0x1
BPF_F_REDIRECT_FLAGS = 0x19
BPF_F_REPLACE = 0x4
BPF_F_SLEEPABLE = 0x10
BPF_F_STRICT_ALIGNMENT = 0x1
@ -1166,6 +1170,7 @@ const (
EXTA = 0xe
EXTB = 0xf
F2FS_SUPER_MAGIC = 0xf2f52010
FALLOC_FL_ALLOCATE_RANGE = 0x0
FALLOC_FL_COLLAPSE_RANGE = 0x8
FALLOC_FL_INSERT_RANGE = 0x20
FALLOC_FL_KEEP_SIZE = 0x1
@ -1799,6 +1804,8 @@ const (
LANDLOCK_ACCESS_NET_BIND_TCP = 0x1
LANDLOCK_ACCESS_NET_CONNECT_TCP = 0x2
LANDLOCK_CREATE_RULESET_VERSION = 0x1
LANDLOCK_SCOPE_ABSTRACT_UNIX_SOCKET = 0x1
LANDLOCK_SCOPE_SIGNAL = 0x2
LINUX_REBOOT_CMD_CAD_OFF = 0x0
LINUX_REBOOT_CMD_CAD_ON = 0x89abcdef
LINUX_REBOOT_CMD_HALT = 0xcdef0123
@ -1924,6 +1931,7 @@ const (
MNT_FORCE = 0x1
MNT_ID_REQ_SIZE_VER0 = 0x18
MNT_ID_REQ_SIZE_VER1 = 0x20
MNT_NS_INFO_SIZE_VER0 = 0x10
MODULE_INIT_COMPRESSED_FILE = 0x4
MODULE_INIT_IGNORE_MODVERSIONS = 0x1
MODULE_INIT_IGNORE_VERMAGIC = 0x2
@ -2970,6 +2978,7 @@ const (
RWF_WRITE_LIFE_NOT_SET = 0x0
SCHED_BATCH = 0x3
SCHED_DEADLINE = 0x6
SCHED_EXT = 0x7
SCHED_FIFO = 0x1
SCHED_FLAG_ALL = 0x7f
SCHED_FLAG_DL_OVERRUN = 0x4

View File

@ -109,6 +109,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -297,6 +298,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -335,6 +338,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -109,6 +109,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -298,6 +299,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -336,6 +339,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -303,6 +304,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -341,6 +344,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -112,6 +112,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -205,6 +206,7 @@ const (
PERF_EVENT_IOC_SET_BPF = 0x40042408
PERF_EVENT_IOC_SET_FILTER = 0x40082406
PERF_EVENT_IOC_SET_OUTPUT = 0x2405
POE_MAGIC = 0x504f4530
PPPIOCATTACH = 0x4004743d
PPPIOCATTCHAN = 0x40047438
PPPIOCBRIDGECHAN = 0x40047435
@ -294,6 +296,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -332,6 +336,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -109,6 +109,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -290,6 +291,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -328,6 +331,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x100
@ -296,6 +297,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -334,6 +337,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x1029
SO_DONTROUTE = 0x10
SO_ERROR = 0x1007

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x100
@ -296,6 +297,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -334,6 +337,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x1029
SO_DONTROUTE = 0x10
SO_ERROR = 0x1007

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x100
@ -296,6 +297,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -334,6 +337,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x1029
SO_DONTROUTE = 0x10
SO_ERROR = 0x1007

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x100
@ -296,6 +297,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -334,6 +337,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x1029
SO_DONTROUTE = 0x10
SO_ERROR = 0x1007

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x4000
ICANON = 0x100
IEXTEN = 0x400
@ -351,6 +352,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -389,6 +392,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x4000
ICANON = 0x100
IEXTEN = 0x400
@ -355,6 +356,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -393,6 +396,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x4000
ICANON = 0x100
IEXTEN = 0x400
@ -355,6 +356,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -393,6 +396,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -287,6 +288,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -325,6 +328,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -108,6 +108,7 @@ const (
HIDIOCGRAWINFO = 0x80084803
HIDIOCGRDESC = 0x90044802
HIDIOCGRDESCSIZE = 0x80044801
HIDIOCREVOKE = 0x4004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -359,6 +360,8 @@ const (
RTC_WIE_ON = 0x700f
RTC_WKALM_RD = 0x80287010
RTC_WKALM_SET = 0x4028700f
SCM_DEVMEM_DMABUF = 0x4f
SCM_DEVMEM_LINEAR = 0x4e
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
@ -397,6 +400,9 @@ const (
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DETACH_REUSEPORT_BPF = 0x44
SO_DEVMEM_DMABUF = 0x4f
SO_DEVMEM_DONTNEED = 0x50
SO_DEVMEM_LINEAR = 0x4e
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4

View File

@ -112,6 +112,7 @@ const (
HIDIOCGRAWINFO = 0x40084803
HIDIOCGRDESC = 0x50044802
HIDIOCGRDESCSIZE = 0x40044801
HIDIOCREVOKE = 0x8004480d
HUPCL = 0x400
ICANON = 0x2
IEXTEN = 0x8000
@ -350,6 +351,8 @@ const (
RTC_WIE_ON = 0x2000700f
RTC_WKALM_RD = 0x40287010
RTC_WKALM_SET = 0x8028700f
SCM_DEVMEM_DMABUF = 0x58
SCM_DEVMEM_LINEAR = 0x57
SCM_TIMESTAMPING = 0x23
SCM_TIMESTAMPING_OPT_STATS = 0x38
SCM_TIMESTAMPING_PKTINFO = 0x3c
@ -436,6 +439,9 @@ const (
SO_CNX_ADVICE = 0x37
SO_COOKIE = 0x3b
SO_DETACH_REUSEPORT_BPF = 0x47
SO_DEVMEM_DMABUF = 0x58
SO_DEVMEM_DONTNEED = 0x59
SO_DEVMEM_LINEAR = 0x57
SO_DOMAIN = 0x1029
SO_DONTROUTE = 0x10
SO_ERROR = 0x1007

View File

@ -462,11 +462,14 @@ type FdSet struct {
const (
SizeofIfMsghdr = 0x70
SizeofIfMsghdr2 = 0xa0
SizeofIfData = 0x60
SizeofIfData64 = 0x80
SizeofIfaMsghdr = 0x14
SizeofIfmaMsghdr = 0x10
SizeofIfmaMsghdr2 = 0x14
SizeofRtMsghdr = 0x5c
SizeofRtMsghdr2 = 0x5c
SizeofRtMetrics = 0x38
)
@ -480,6 +483,20 @@ type IfMsghdr struct {
Data IfData
}
type IfMsghdr2 struct {
Msglen uint16
Version uint8
Type uint8
Addrs int32
Flags int32
Index uint16
Snd_len int32
Snd_maxlen int32
Snd_drops int32
Timer int32
Data IfData64
}
type IfData struct {
Type uint8
Typelen uint8
@ -512,6 +529,34 @@ type IfData struct {
Reserved2 uint32
}
type IfData64 struct {
Type uint8
Typelen uint8
Physical uint8
Addrlen uint8
Hdrlen uint8
Recvquota uint8
Xmitquota uint8
Unused1 uint8
Mtu uint32
Metric uint32
Baudrate uint64
Ipackets uint64
Ierrors uint64
Opackets uint64
Oerrors uint64
Collisions uint64
Ibytes uint64
Obytes uint64
Imcasts uint64
Omcasts uint64
Iqdrops uint64
Noproto uint64
Recvtiming uint32
Xmittiming uint32
Lastchange Timeval32
}
type IfaMsghdr struct {
Msglen uint16
Version uint8
@ -557,6 +602,21 @@ type RtMsghdr struct {
Rmx RtMetrics
}
type RtMsghdr2 struct {
Msglen uint16
Version uint8
Type uint8
Index uint16
Flags int32
Addrs int32
Refcnt int32
Parentflags int32
Reserved int32
Use int32
Inits uint32
Rmx RtMetrics
}
type RtMetrics struct {
Locks uint32
Mtu uint32

View File

@ -462,11 +462,14 @@ type FdSet struct {
const (
SizeofIfMsghdr = 0x70
SizeofIfMsghdr2 = 0xa0
SizeofIfData = 0x60
SizeofIfData64 = 0x80
SizeofIfaMsghdr = 0x14
SizeofIfmaMsghdr = 0x10
SizeofIfmaMsghdr2 = 0x14
SizeofRtMsghdr = 0x5c
SizeofRtMsghdr2 = 0x5c
SizeofRtMetrics = 0x38
)
@ -480,6 +483,20 @@ type IfMsghdr struct {
Data IfData
}
type IfMsghdr2 struct {
Msglen uint16
Version uint8
Type uint8
Addrs int32
Flags int32
Index uint16
Snd_len int32
Snd_maxlen int32
Snd_drops int32
Timer int32
Data IfData64
}
type IfData struct {
Type uint8
Typelen uint8
@ -512,6 +529,34 @@ type IfData struct {
Reserved2 uint32
}
type IfData64 struct {
Type uint8
Typelen uint8
Physical uint8
Addrlen uint8
Hdrlen uint8
Recvquota uint8
Xmitquota uint8
Unused1 uint8
Mtu uint32
Metric uint32
Baudrate uint64
Ipackets uint64
Ierrors uint64
Opackets uint64
Oerrors uint64
Collisions uint64
Ibytes uint64
Obytes uint64
Imcasts uint64
Omcasts uint64
Iqdrops uint64
Noproto uint64
Recvtiming uint32
Xmittiming uint32
Lastchange Timeval32
}
type IfaMsghdr struct {
Msglen uint16
Version uint8
@ -557,6 +602,21 @@ type RtMsghdr struct {
Rmx RtMetrics
}
type RtMsghdr2 struct {
Msglen uint16
Version uint8
Type uint8
Index uint16
Flags int32
Addrs int32
Refcnt int32
Parentflags int32
Reserved int32
Use int32
Inits uint32
Rmx RtMetrics
}
type RtMetrics struct {
Locks uint32
Mtu uint32

View File

@ -2594,8 +2594,8 @@ const (
SOF_TIMESTAMPING_BIND_PHC = 0x8000
SOF_TIMESTAMPING_OPT_ID_TCP = 0x10000
SOF_TIMESTAMPING_LAST = 0x10000
SOF_TIMESTAMPING_MASK = 0x1ffff
SOF_TIMESTAMPING_LAST = 0x20000
SOF_TIMESTAMPING_MASK = 0x3ffff
SCM_TSTAMP_SND = 0x0
SCM_TSTAMP_SCHED = 0x1
@ -3541,7 +3541,7 @@ type Nhmsg struct {
type NexthopGrp struct {
Id uint32
Weight uint8
Resvd1 uint8
High uint8
Resvd2 uint16
}
@ -3802,7 +3802,7 @@ const (
ETHTOOL_MSG_PSE_GET = 0x24
ETHTOOL_MSG_PSE_SET = 0x25
ETHTOOL_MSG_RSS_GET = 0x26
ETHTOOL_MSG_USER_MAX = 0x2c
ETHTOOL_MSG_USER_MAX = 0x2d
ETHTOOL_MSG_KERNEL_NONE = 0x0
ETHTOOL_MSG_STRSET_GET_REPLY = 0x1
ETHTOOL_MSG_LINKINFO_GET_REPLY = 0x2
@ -3842,7 +3842,7 @@ const (
ETHTOOL_MSG_MODULE_NTF = 0x24
ETHTOOL_MSG_PSE_GET_REPLY = 0x25
ETHTOOL_MSG_RSS_GET_REPLY = 0x26
ETHTOOL_MSG_KERNEL_MAX = 0x2c
ETHTOOL_MSG_KERNEL_MAX = 0x2e
ETHTOOL_FLAG_COMPACT_BITSETS = 0x1
ETHTOOL_FLAG_OMIT_REPLY = 0x2
ETHTOOL_FLAG_STATS = 0x4
@ -3850,7 +3850,7 @@ const (
ETHTOOL_A_HEADER_DEV_INDEX = 0x1
ETHTOOL_A_HEADER_DEV_NAME = 0x2
ETHTOOL_A_HEADER_FLAGS = 0x3
ETHTOOL_A_HEADER_MAX = 0x3
ETHTOOL_A_HEADER_MAX = 0x4
ETHTOOL_A_BITSET_BIT_UNSPEC = 0x0
ETHTOOL_A_BITSET_BIT_INDEX = 0x1
ETHTOOL_A_BITSET_BIT_NAME = 0x2
@ -4031,11 +4031,11 @@ const (
ETHTOOL_A_CABLE_RESULT_UNSPEC = 0x0
ETHTOOL_A_CABLE_RESULT_PAIR = 0x1
ETHTOOL_A_CABLE_RESULT_CODE = 0x2
ETHTOOL_A_CABLE_RESULT_MAX = 0x2
ETHTOOL_A_CABLE_RESULT_MAX = 0x3
ETHTOOL_A_CABLE_FAULT_LENGTH_UNSPEC = 0x0
ETHTOOL_A_CABLE_FAULT_LENGTH_PAIR = 0x1
ETHTOOL_A_CABLE_FAULT_LENGTH_CM = 0x2
ETHTOOL_A_CABLE_FAULT_LENGTH_MAX = 0x2
ETHTOOL_A_CABLE_FAULT_LENGTH_MAX = 0x3
ETHTOOL_A_CABLE_TEST_NTF_STATUS_UNSPEC = 0x0
ETHTOOL_A_CABLE_TEST_NTF_STATUS_STARTED = 0x1
ETHTOOL_A_CABLE_TEST_NTF_STATUS_COMPLETED = 0x2
@ -4200,7 +4200,8 @@ type (
}
PtpSysOffsetExtended struct {
Samples uint32
Rsv [3]uint32
Clockid int32
Rsv [2]uint32
Ts [25][3]PtpClockTime
}
PtpSysOffsetPrecise struct {
@ -4399,6 +4400,7 @@ const (
type LandlockRulesetAttr struct {
Access_fs uint64
Access_net uint64
Scoped uint64
}
type LandlockPathBeneathAttr struct {

View File

@ -43,8 +43,8 @@ type DLL struct {
// LoadDLL loads DLL file into memory.
//
// Warning: using LoadDLL without an absolute path name is subject to
// DLL preloading attacks. To safely load a system DLL, use LazyDLL
// with System set to true, or use LoadLibraryEx directly.
// DLL preloading attacks. To safely load a system DLL, use [NewLazySystemDLL],
// or use [LoadLibraryEx] directly.
func LoadDLL(name string) (dll *DLL, err error) {
namep, err := UTF16PtrFromString(name)
if err != nil {
@ -271,6 +271,9 @@ func (d *LazyDLL) NewProc(name string) *LazyProc {
}
// NewLazyDLL creates new LazyDLL associated with DLL file.
//
// Warning: using NewLazyDLL without an absolute path name is subject to
// DLL preloading attacks. To safely load a system DLL, use [NewLazySystemDLL].
func NewLazyDLL(name string) *LazyDLL {
return &LazyDLL{Name: name}
}
@ -410,7 +413,3 @@ func loadLibraryEx(name string, system bool) (*DLL, error) {
}
return &DLL{Name: name, Handle: h}, nil
}
type errString string
func (s errString) Error() string { return string(s) }

View File

@ -168,6 +168,8 @@ func NewCallbackCDecl(fn interface{}) uintptr {
//sys CreateNamedPipe(name *uint16, flags uint32, pipeMode uint32, maxInstances uint32, outSize uint32, inSize uint32, defaultTimeout uint32, sa *SecurityAttributes) (handle Handle, err error) [failretval==InvalidHandle] = CreateNamedPipeW
//sys ConnectNamedPipe(pipe Handle, overlapped *Overlapped) (err error)
//sys DisconnectNamedPipe(pipe Handle) (err error)
//sys GetNamedPipeClientProcessId(pipe Handle, clientProcessID *uint32) (err error)
//sys GetNamedPipeServerProcessId(pipe Handle, serverProcessID *uint32) (err error)
//sys GetNamedPipeInfo(pipe Handle, flags *uint32, outSize *uint32, inSize *uint32, maxInstances *uint32) (err error)
//sys GetNamedPipeHandleState(pipe Handle, state *uint32, curInstances *uint32, maxCollectionCount *uint32, collectDataTimeout *uint32, userName *uint16, maxUserNameSize uint32) (err error) = GetNamedPipeHandleStateW
//sys SetNamedPipeHandleState(pipe Handle, state *uint32, maxCollectionCount *uint32, collectDataTimeout *uint32) (err error) = SetNamedPipeHandleState

View File

@ -176,6 +176,7 @@ const (
WAIT_FAILED = 0xFFFFFFFF
// Access rights for process.
PROCESS_ALL_ACCESS = 0xFFFF
PROCESS_CREATE_PROCESS = 0x0080
PROCESS_CREATE_THREAD = 0x0002
PROCESS_DUP_HANDLE = 0x0040

View File

@ -280,8 +280,10 @@ var (
procGetMaximumProcessorCount = modkernel32.NewProc("GetMaximumProcessorCount")
procGetModuleFileNameW = modkernel32.NewProc("GetModuleFileNameW")
procGetModuleHandleExW = modkernel32.NewProc("GetModuleHandleExW")
procGetNamedPipeClientProcessId = modkernel32.NewProc("GetNamedPipeClientProcessId")
procGetNamedPipeHandleStateW = modkernel32.NewProc("GetNamedPipeHandleStateW")
procGetNamedPipeInfo = modkernel32.NewProc("GetNamedPipeInfo")
procGetNamedPipeServerProcessId = modkernel32.NewProc("GetNamedPipeServerProcessId")
procGetOverlappedResult = modkernel32.NewProc("GetOverlappedResult")
procGetPriorityClass = modkernel32.NewProc("GetPriorityClass")
procGetProcAddress = modkernel32.NewProc("GetProcAddress")
@ -1612,7 +1614,7 @@ func DwmSetWindowAttribute(hwnd HWND, attribute uint32, value unsafe.Pointer, si
}
func CancelMibChangeNotify2(notificationHandle Handle) (errcode error) {
r0, _, _ := syscall.SyscallN(procCancelMibChangeNotify2.Addr(), uintptr(notificationHandle))
r0, _, _ := syscall.Syscall(procCancelMibChangeNotify2.Addr(), 1, uintptr(notificationHandle), 0, 0)
if r0 != 0 {
errcode = syscall.Errno(r0)
}
@ -1652,7 +1654,7 @@ func GetIfEntry(pIfRow *MibIfRow) (errcode error) {
}
func GetIfEntry2Ex(level uint32, row *MibIfRow2) (errcode error) {
r0, _, _ := syscall.SyscallN(procGetIfEntry2Ex.Addr(), uintptr(level), uintptr(unsafe.Pointer(row)))
r0, _, _ := syscall.Syscall(procGetIfEntry2Ex.Addr(), 2, uintptr(level), uintptr(unsafe.Pointer(row)), 0)
if r0 != 0 {
errcode = syscall.Errno(r0)
}
@ -1660,7 +1662,7 @@ func GetIfEntry2Ex(level uint32, row *MibIfRow2) (errcode error) {
}
func GetUnicastIpAddressEntry(row *MibUnicastIpAddressRow) (errcode error) {
r0, _, _ := syscall.SyscallN(procGetUnicastIpAddressEntry.Addr(), uintptr(unsafe.Pointer(row)))
r0, _, _ := syscall.Syscall(procGetUnicastIpAddressEntry.Addr(), 1, uintptr(unsafe.Pointer(row)), 0, 0)
if r0 != 0 {
errcode = syscall.Errno(r0)
}
@ -1672,7 +1674,7 @@ func NotifyIpInterfaceChange(family uint16, callback uintptr, callerContext unsa
if initialNotification {
_p0 = 1
}
r0, _, _ := syscall.SyscallN(procNotifyIpInterfaceChange.Addr(), uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle)))
r0, _, _ := syscall.Syscall6(procNotifyIpInterfaceChange.Addr(), 5, uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle)), 0)
if r0 != 0 {
errcode = syscall.Errno(r0)
}
@ -1684,7 +1686,7 @@ func NotifyUnicastIpAddressChange(family uint16, callback uintptr, callerContext
if initialNotification {
_p0 = 1
}
r0, _, _ := syscall.SyscallN(procNotifyUnicastIpAddressChange.Addr(), uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle)))
r0, _, _ := syscall.Syscall6(procNotifyUnicastIpAddressChange.Addr(), 5, uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle)), 0)
if r0 != 0 {
errcode = syscall.Errno(r0)
}
@ -2446,6 +2448,14 @@ func GetModuleHandleEx(flags uint32, moduleName *uint16, module *Handle) (err er
return
}
func GetNamedPipeClientProcessId(pipe Handle, clientProcessID *uint32) (err error) {
r1, _, e1 := syscall.Syscall(procGetNamedPipeClientProcessId.Addr(), 2, uintptr(pipe), uintptr(unsafe.Pointer(clientProcessID)), 0)
if r1 == 0 {
err = errnoErr(e1)
}
return
}
func GetNamedPipeHandleState(pipe Handle, state *uint32, curInstances *uint32, maxCollectionCount *uint32, collectDataTimeout *uint32, userName *uint16, maxUserNameSize uint32) (err error) {
r1, _, e1 := syscall.Syscall9(procGetNamedPipeHandleStateW.Addr(), 7, uintptr(pipe), uintptr(unsafe.Pointer(state)), uintptr(unsafe.Pointer(curInstances)), uintptr(unsafe.Pointer(maxCollectionCount)), uintptr(unsafe.Pointer(collectDataTimeout)), uintptr(unsafe.Pointer(userName)), uintptr(maxUserNameSize), 0, 0)
if r1 == 0 {
@ -2462,6 +2472,14 @@ func GetNamedPipeInfo(pipe Handle, flags *uint32, outSize *uint32, inSize *uint3
return
}
func GetNamedPipeServerProcessId(pipe Handle, serverProcessID *uint32) (err error) {
r1, _, e1 := syscall.Syscall(procGetNamedPipeServerProcessId.Addr(), 2, uintptr(pipe), uintptr(unsafe.Pointer(serverProcessID)), 0)
if r1 == 0 {
err = errnoErr(e1)
}
return
}
func GetOverlappedResult(handle Handle, overlapped *Overlapped, done *uint32, wait bool) (err error) {
var _p0 uint32
if wait {

162
vendor/golang.org/x/text/cases/cases.go generated vendored Normal file
View File

@ -0,0 +1,162 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run gen.go gen_trieval.go
// Package cases provides general and language-specific case mappers.
package cases // import "golang.org/x/text/cases"
import (
"golang.org/x/text/language"
"golang.org/x/text/transform"
)
// References:
// - Unicode Reference Manual Chapter 3.13, 4.2, and 5.18.
// - https://www.unicode.org/reports/tr29/
// - https://www.unicode.org/Public/6.3.0/ucd/CaseFolding.txt
// - https://www.unicode.org/Public/6.3.0/ucd/SpecialCasing.txt
// - https://www.unicode.org/Public/6.3.0/ucd/DerivedCoreProperties.txt
// - https://www.unicode.org/Public/6.3.0/ucd/auxiliary/WordBreakProperty.txt
// - https://www.unicode.org/Public/6.3.0/ucd/auxiliary/WordBreakTest.txt
// - http://userguide.icu-project.org/transforms/casemappings
// TODO:
// - Case folding
// - Wide and Narrow?
// - Segmenter option for title casing.
// - ASCII fast paths
// - Encode Soft-Dotted property within trie somehow.
// A Caser transforms given input to a certain case. It implements
// transform.Transformer.
//
// A Caser may be stateful and should therefore not be shared between
// goroutines.
type Caser struct {
t transform.SpanningTransformer
}
// Bytes returns a new byte slice with the result of converting b to the case
// form implemented by c.
func (c Caser) Bytes(b []byte) []byte {
b, _, _ = transform.Bytes(c.t, b)
return b
}
// String returns a string with the result of transforming s to the case form
// implemented by c.
func (c Caser) String(s string) string {
s, _, _ = transform.String(c.t, s)
return s
}
// Reset resets the Caser to be reused for new input after a previous call to
// Transform.
func (c Caser) Reset() { c.t.Reset() }
// Transform implements the transform.Transformer interface and transforms the
// given input to the case form implemented by c.
func (c Caser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
return c.t.Transform(dst, src, atEOF)
}
// Span implements the transform.SpanningTransformer interface.
func (c Caser) Span(src []byte, atEOF bool) (n int, err error) {
return c.t.Span(src, atEOF)
}
// Upper returns a Caser for language-specific uppercasing.
func Upper(t language.Tag, opts ...Option) Caser {
return Caser{makeUpper(t, getOpts(opts...))}
}
// Lower returns a Caser for language-specific lowercasing.
func Lower(t language.Tag, opts ...Option) Caser {
return Caser{makeLower(t, getOpts(opts...))}
}
// Title returns a Caser for language-specific title casing. It uses an
// approximation of the default Unicode Word Break algorithm.
func Title(t language.Tag, opts ...Option) Caser {
return Caser{makeTitle(t, getOpts(opts...))}
}
// Fold returns a Caser that implements Unicode case folding. The returned Caser
// is stateless and safe to use concurrently by multiple goroutines.
//
// Case folding does not normalize the input and may not preserve a normal form.
// Use the collate or search package for more convenient and linguistically
// sound comparisons. Use golang.org/x/text/secure/precis for string comparisons
// where security aspects are a concern.
func Fold(opts ...Option) Caser {
return Caser{makeFold(getOpts(opts...))}
}
// An Option is used to modify the behavior of a Caser.
type Option func(o options) options
// TODO: consider these options to take a boolean as well, like FinalSigma.
// The advantage of using this approach is that other providers of a lower-case
// algorithm could set different defaults by prefixing a user-provided slice
// of options with their own. This is handy, for instance, for the precis
// package which would override the default to not handle the Greek final sigma.
var (
// NoLower disables the lowercasing of non-leading letters for a title
// caser.
NoLower Option = noLower
// Compact omits mappings in case folding for characters that would grow the
// input. (Unimplemented.)
Compact Option = compact
)
// TODO: option to preserve a normal form, if applicable?
type options struct {
noLower bool
simple bool
// TODO: segmenter, max ignorable, alternative versions, etc.
ignoreFinalSigma bool
}
func getOpts(o ...Option) (res options) {
for _, f := range o {
res = f(res)
}
return
}
func noLower(o options) options {
o.noLower = true
return o
}
func compact(o options) options {
o.simple = true
return o
}
// HandleFinalSigma specifies whether the special handling of Greek final sigma
// should be enabled. Unicode prescribes handling the Greek final sigma for all
// locales, but standards like IDNA and PRECIS override this default.
func HandleFinalSigma(enable bool) Option {
if enable {
return handleFinalSigma
}
return ignoreFinalSigma
}
func ignoreFinalSigma(o options) options {
o.ignoreFinalSigma = true
return o
}
func handleFinalSigma(o options) options {
o.ignoreFinalSigma = false
return o
}

376
vendor/golang.org/x/text/cases/context.go generated vendored Normal file
View File

@ -0,0 +1,376 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cases
import "golang.org/x/text/transform"
// A context is used for iterating over source bytes, fetching case info and
// writing to a destination buffer.
//
// Casing operations may need more than one rune of context to decide how a rune
// should be cased. Casing implementations should call checkpoint on context
// whenever it is known to be safe to return the runes processed so far.
//
// It is recommended for implementations to not allow for more than 30 case
// ignorables as lookahead (analogous to the limit in norm) and to use state if
// unbounded lookahead is needed for cased runes.
type context struct {
dst, src []byte
atEOF bool
pDst int // pDst points past the last written rune in dst.
pSrc int // pSrc points to the start of the currently scanned rune.
// checkpoints safe to return in Transform, where nDst <= pDst and nSrc <= pSrc.
nDst, nSrc int
err error
sz int // size of current rune
info info // case information of currently scanned rune
// State preserved across calls to Transform.
isMidWord bool // false if next cased letter needs to be title-cased.
}
func (c *context) Reset() {
c.isMidWord = false
}
// ret returns the return values for the Transform method. It checks whether
// there were insufficient bytes in src to complete and introduces an error
// accordingly, if necessary.
func (c *context) ret() (nDst, nSrc int, err error) {
if c.err != nil || c.nSrc == len(c.src) {
return c.nDst, c.nSrc, c.err
}
// This point is only reached by mappers if there was no short destination
// buffer. This means that the source buffer was exhausted and that c.sz was
// set to 0 by next.
if c.atEOF && c.pSrc == len(c.src) {
return c.pDst, c.pSrc, nil
}
return c.nDst, c.nSrc, transform.ErrShortSrc
}
// retSpan returns the return values for the Span method. It checks whether
// there were insufficient bytes in src to complete and introduces an error
// accordingly, if necessary.
func (c *context) retSpan() (n int, err error) {
_, nSrc, err := c.ret()
return nSrc, err
}
// checkpoint sets the return value buffer points for Transform to the current
// positions.
func (c *context) checkpoint() {
if c.err == nil {
c.nDst, c.nSrc = c.pDst, c.pSrc+c.sz
}
}
// unreadRune causes the last rune read by next to be reread on the next
// invocation of next. Only one unreadRune may be called after a call to next.
func (c *context) unreadRune() {
c.sz = 0
}
func (c *context) next() bool {
c.pSrc += c.sz
if c.pSrc == len(c.src) || c.err != nil {
c.info, c.sz = 0, 0
return false
}
v, sz := trie.lookup(c.src[c.pSrc:])
c.info, c.sz = info(v), sz
if c.sz == 0 {
if c.atEOF {
// A zero size means we have an incomplete rune. If we are atEOF,
// this means it is an illegal rune, which we will consume one
// byte at a time.
c.sz = 1
} else {
c.err = transform.ErrShortSrc
return false
}
}
return true
}
// writeBytes adds bytes to dst.
func (c *context) writeBytes(b []byte) bool {
if len(c.dst)-c.pDst < len(b) {
c.err = transform.ErrShortDst
return false
}
// This loop is faster than using copy.
for _, ch := range b {
c.dst[c.pDst] = ch
c.pDst++
}
return true
}
// writeString writes the given string to dst.
func (c *context) writeString(s string) bool {
if len(c.dst)-c.pDst < len(s) {
c.err = transform.ErrShortDst
return false
}
// This loop is faster than using copy.
for i := 0; i < len(s); i++ {
c.dst[c.pDst] = s[i]
c.pDst++
}
return true
}
// copy writes the current rune to dst.
func (c *context) copy() bool {
return c.writeBytes(c.src[c.pSrc : c.pSrc+c.sz])
}
// copyXOR copies the current rune to dst and modifies it by applying the XOR
// pattern of the case info. It is the responsibility of the caller to ensure
// that this is a rune with a XOR pattern defined.
func (c *context) copyXOR() bool {
if !c.copy() {
return false
}
if c.info&xorIndexBit == 0 {
// Fast path for 6-bit XOR pattern, which covers most cases.
c.dst[c.pDst-1] ^= byte(c.info >> xorShift)
} else {
// Interpret XOR bits as an index.
// TODO: test performance for unrolling this loop. Verify that we have
// at least two bytes and at most three.
idx := c.info >> xorShift
for p := c.pDst - 1; ; p-- {
c.dst[p] ^= xorData[idx]
idx--
if xorData[idx] == 0 {
break
}
}
}
return true
}
// hasPrefix returns true if src[pSrc:] starts with the given string.
func (c *context) hasPrefix(s string) bool {
b := c.src[c.pSrc:]
if len(b) < len(s) {
return false
}
for i, c := range b[:len(s)] {
if c != s[i] {
return false
}
}
return true
}
// caseType returns an info with only the case bits, normalized to either
// cLower, cUpper, cTitle or cUncased.
func (c *context) caseType() info {
cm := c.info & 0x7
if cm < 4 {
return cm
}
if cm >= cXORCase {
// xor the last bit of the rune with the case type bits.
b := c.src[c.pSrc+c.sz-1]
return info(b&1) ^ cm&0x3
}
if cm == cIgnorableCased {
return cLower
}
return cUncased
}
// lower writes the lowercase version of the current rune to dst.
func lower(c *context) bool {
ct := c.caseType()
if c.info&hasMappingMask == 0 || ct == cLower {
return c.copy()
}
if c.info&exceptionBit == 0 {
return c.copyXOR()
}
e := exceptions[c.info>>exceptionShift:]
offset := 2 + e[0]&lengthMask // size of header + fold string
if nLower := (e[1] >> lengthBits) & lengthMask; nLower != noChange {
return c.writeString(e[offset : offset+nLower])
}
return c.copy()
}
func isLower(c *context) bool {
ct := c.caseType()
if c.info&hasMappingMask == 0 || ct == cLower {
return true
}
if c.info&exceptionBit == 0 {
c.err = transform.ErrEndOfSpan
return false
}
e := exceptions[c.info>>exceptionShift:]
if nLower := (e[1] >> lengthBits) & lengthMask; nLower != noChange {
c.err = transform.ErrEndOfSpan
return false
}
return true
}
// upper writes the uppercase version of the current rune to dst.
func upper(c *context) bool {
ct := c.caseType()
if c.info&hasMappingMask == 0 || ct == cUpper {
return c.copy()
}
if c.info&exceptionBit == 0 {
return c.copyXOR()
}
e := exceptions[c.info>>exceptionShift:]
offset := 2 + e[0]&lengthMask // size of header + fold string
// Get length of first special case mapping.
n := (e[1] >> lengthBits) & lengthMask
if ct == cTitle {
// The first special case mapping is for lower. Set n to the second.
if n == noChange {
n = 0
}
n, e = e[1]&lengthMask, e[n:]
}
if n != noChange {
return c.writeString(e[offset : offset+n])
}
return c.copy()
}
// isUpper writes the isUppercase version of the current rune to dst.
func isUpper(c *context) bool {
ct := c.caseType()
if c.info&hasMappingMask == 0 || ct == cUpper {
return true
}
if c.info&exceptionBit == 0 {
c.err = transform.ErrEndOfSpan
return false
}
e := exceptions[c.info>>exceptionShift:]
// Get length of first special case mapping.
n := (e[1] >> lengthBits) & lengthMask
if ct == cTitle {
n = e[1] & lengthMask
}
if n != noChange {
c.err = transform.ErrEndOfSpan
return false
}
return true
}
// title writes the title case version of the current rune to dst.
func title(c *context) bool {
ct := c.caseType()
if c.info&hasMappingMask == 0 || ct == cTitle {
return c.copy()
}
if c.info&exceptionBit == 0 {
if ct == cLower {
return c.copyXOR()
}
return c.copy()
}
// Get the exception data.
e := exceptions[c.info>>exceptionShift:]
offset := 2 + e[0]&lengthMask // size of header + fold string
nFirst := (e[1] >> lengthBits) & lengthMask
if nTitle := e[1] & lengthMask; nTitle != noChange {
if nFirst != noChange {
e = e[nFirst:]
}
return c.writeString(e[offset : offset+nTitle])
}
if ct == cLower && nFirst != noChange {
// Use the uppercase version instead.
return c.writeString(e[offset : offset+nFirst])
}
// Already in correct case.
return c.copy()
}
// isTitle reports whether the current rune is in title case.
func isTitle(c *context) bool {
ct := c.caseType()
if c.info&hasMappingMask == 0 || ct == cTitle {
return true
}
if c.info&exceptionBit == 0 {
if ct == cLower {
c.err = transform.ErrEndOfSpan
return false
}
return true
}
// Get the exception data.
e := exceptions[c.info>>exceptionShift:]
if nTitle := e[1] & lengthMask; nTitle != noChange {
c.err = transform.ErrEndOfSpan
return false
}
nFirst := (e[1] >> lengthBits) & lengthMask
if ct == cLower && nFirst != noChange {
c.err = transform.ErrEndOfSpan
return false
}
return true
}
// foldFull writes the foldFull version of the current rune to dst.
func foldFull(c *context) bool {
if c.info&hasMappingMask == 0 {
return c.copy()
}
ct := c.caseType()
if c.info&exceptionBit == 0 {
if ct != cLower || c.info&inverseFoldBit != 0 {
return c.copyXOR()
}
return c.copy()
}
e := exceptions[c.info>>exceptionShift:]
n := e[0] & lengthMask
if n == 0 {
if ct == cLower {
return c.copy()
}
n = (e[1] >> lengthBits) & lengthMask
}
return c.writeString(e[2 : 2+n])
}
// isFoldFull reports whether the current run is mapped to foldFull
func isFoldFull(c *context) bool {
if c.info&hasMappingMask == 0 {
return true
}
ct := c.caseType()
if c.info&exceptionBit == 0 {
if ct != cLower || c.info&inverseFoldBit != 0 {
c.err = transform.ErrEndOfSpan
return false
}
return true
}
e := exceptions[c.info>>exceptionShift:]
n := e[0] & lengthMask
if n == 0 && ct == cLower {
return true
}
c.err = transform.ErrEndOfSpan
return false
}

34
vendor/golang.org/x/text/cases/fold.go generated vendored Normal file
View File

@ -0,0 +1,34 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cases
import "golang.org/x/text/transform"
type caseFolder struct{ transform.NopResetter }
// caseFolder implements the Transformer interface for doing case folding.
func (t *caseFolder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
c := context{dst: dst, src: src, atEOF: atEOF}
for c.next() {
foldFull(&c)
c.checkpoint()
}
return c.ret()
}
func (t *caseFolder) Span(src []byte, atEOF bool) (n int, err error) {
c := context{src: src, atEOF: atEOF}
for c.next() && isFoldFull(&c) {
c.checkpoint()
}
return c.retSpan()
}
func makeFold(o options) transform.SpanningTransformer {
// TODO: Special case folding, through option Language, Special/Turkic, or
// both.
// TODO: Implement Compact options.
return &caseFolder{}
}

61
vendor/golang.org/x/text/cases/icu.go generated vendored Normal file
View File

@ -0,0 +1,61 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build icu
package cases
// Ideally these functions would be defined in a test file, but go test doesn't
// allow CGO in tests. The build tag should ensure either way that these
// functions will not end up in the package.
// TODO: Ensure that the correct ICU version is set.
/*
#cgo LDFLAGS: -licui18n.57 -licuuc.57
#include <stdlib.h>
#include <unicode/ustring.h>
#include <unicode/utypes.h>
#include <unicode/localpointer.h>
#include <unicode/ucasemap.h>
*/
import "C"
import "unsafe"
func doICU(tag, caser, input string) string {
err := C.UErrorCode(0)
loc := C.CString(tag)
cm := C.ucasemap_open(loc, C.uint32_t(0), &err)
buf := make([]byte, len(input)*4)
dst := (*C.char)(unsafe.Pointer(&buf[0]))
src := C.CString(input)
cn := C.int32_t(0)
switch caser {
case "fold":
cn = C.ucasemap_utf8FoldCase(cm,
dst, C.int32_t(len(buf)),
src, C.int32_t(len(input)),
&err)
case "lower":
cn = C.ucasemap_utf8ToLower(cm,
dst, C.int32_t(len(buf)),
src, C.int32_t(len(input)),
&err)
case "upper":
cn = C.ucasemap_utf8ToUpper(cm,
dst, C.int32_t(len(buf)),
src, C.int32_t(len(input)),
&err)
case "title":
cn = C.ucasemap_utf8ToTitle(cm,
dst, C.int32_t(len(buf)),
src, C.int32_t(len(input)),
&err)
}
return string(buf[:cn])
}

82
vendor/golang.org/x/text/cases/info.go generated vendored Normal file
View File

@ -0,0 +1,82 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cases
func (c info) cccVal() info {
if c&exceptionBit != 0 {
return info(exceptions[c>>exceptionShift]) & cccMask
}
return c & cccMask
}
func (c info) cccType() info {
ccc := c.cccVal()
if ccc <= cccZero {
return cccZero
}
return ccc
}
// TODO: Implement full Unicode breaking algorithm:
// 1) Implement breaking in separate package.
// 2) Use the breaker here.
// 3) Compare table size and performance of using the more generic breaker.
//
// Note that we can extend the current algorithm to be much more accurate. This
// only makes sense, though, if the performance and/or space penalty of using
// the generic breaker is big. Extra data will only be needed for non-cased
// runes, which means there are sufficient bits left in the caseType.
// ICU prohibits breaking in such cases as well.
// For the purpose of title casing we use an approximation of the Unicode Word
// Breaking algorithm defined in Annex #29:
// https://www.unicode.org/reports/tr29/#Default_Grapheme_Cluster_Table.
//
// For our approximation, we group the Word Break types into the following
// categories, with associated rules:
//
// 1) Letter:
// ALetter, Hebrew_Letter, Numeric, ExtendNumLet, Extend, Format_FE, ZWJ.
// Rule: Never break between consecutive runes of this category.
//
// 2) Mid:
// MidLetter, MidNumLet, Single_Quote.
// (Cf. case-ignorable: MidLetter, MidNumLet, Single_Quote or cat is Mn,
// Me, Cf, Lm or Sk).
// Rule: Don't break between Letter and Mid, but break between two Mids.
//
// 3) Break:
// Any other category: NewLine, MidNum, CR, LF, Double_Quote, Katakana, and
// Other.
// These categories should always result in a break between two cased letters.
// Rule: Always break.
//
// Note 1: the Katakana and MidNum categories can, in esoteric cases, result in
// preventing a break between two cased letters. For now we will ignore this
// (e.g. [ALetter] [ExtendNumLet] [Katakana] [ExtendNumLet] [ALetter] and
// [ALetter] [Numeric] [MidNum] [Numeric] [ALetter].)
//
// Note 2: the rule for Mid is very approximate, but works in most cases. To
// improve, we could store the categories in the trie value and use a FA to
// manage breaks. See TODO comment above.
//
// Note 3: according to the spec, it is possible for the Extend category to
// introduce breaks between other categories grouped in Letter. However, this
// is undesirable for our purposes. ICU prevents breaks in such cases as well.
// isBreak returns whether this rune should introduce a break.
func (c info) isBreak() bool {
return c.cccVal() == cccBreak
}
// isLetter returns whether the rune is of break type ALetter, Hebrew_Letter,
// Numeric, ExtendNumLet, or Extend.
func (c info) isLetter() bool {
ccc := c.cccVal()
if ccc == cccZero {
return !c.isCaseIgnorable()
}
return ccc != cccBreak
}

816
vendor/golang.org/x/text/cases/map.go generated vendored Normal file
View File

@ -0,0 +1,816 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cases
// This file contains the definitions of case mappings for all supported
// languages. The rules for the language-specific tailorings were taken and
// modified from the CLDR transform definitions in common/transforms.
import (
"strings"
"unicode"
"unicode/utf8"
"golang.org/x/text/internal"
"golang.org/x/text/language"
"golang.org/x/text/transform"
"golang.org/x/text/unicode/norm"
)
// A mapFunc takes a context set to the current rune and writes the mapped
// version to the same context. It may advance the context to the next rune. It
// returns whether a checkpoint is possible: whether the pDst bytes written to
// dst so far won't need changing as we see more source bytes.
type mapFunc func(*context) bool
// A spanFunc takes a context set to the current rune and returns whether this
// rune would be altered when written to the output. It may advance the context
// to the next rune. It returns whether a checkpoint is possible.
type spanFunc func(*context) bool
// maxIgnorable defines the maximum number of ignorables to consider for
// lookahead operations.
const maxIgnorable = 30
// supported lists the language tags for which we have tailorings.
const supported = "und af az el lt nl tr"
func init() {
tags := []language.Tag{}
for _, s := range strings.Split(supported, " ") {
tags = append(tags, language.MustParse(s))
}
matcher = internal.NewInheritanceMatcher(tags)
Supported = language.NewCoverage(tags)
}
var (
matcher *internal.InheritanceMatcher
Supported language.Coverage
// We keep the following lists separate, instead of having a single per-
// language struct, to give the compiler a chance to remove unused code.
// Some uppercase mappers are stateless, so we can precompute the
// Transformers and save a bit on runtime allocations.
upperFunc = []struct {
upper mapFunc
span spanFunc
}{
{nil, nil}, // und
{nil, nil}, // af
{aztrUpper(upper), isUpper}, // az
{elUpper, noSpan}, // el
{ltUpper(upper), noSpan}, // lt
{nil, nil}, // nl
{aztrUpper(upper), isUpper}, // tr
}
undUpper transform.SpanningTransformer = &undUpperCaser{}
undLower transform.SpanningTransformer = &undLowerCaser{}
undLowerIgnoreSigma transform.SpanningTransformer = &undLowerIgnoreSigmaCaser{}
lowerFunc = []mapFunc{
nil, // und
nil, // af
aztrLower, // az
nil, // el
ltLower, // lt
nil, // nl
aztrLower, // tr
}
titleInfos = []struct {
title mapFunc
lower mapFunc
titleSpan spanFunc
rewrite func(*context)
}{
{title, lower, isTitle, nil}, // und
{title, lower, isTitle, afnlRewrite}, // af
{aztrUpper(title), aztrLower, isTitle, nil}, // az
{title, lower, isTitle, nil}, // el
{ltUpper(title), ltLower, noSpan, nil}, // lt
{nlTitle, lower, nlTitleSpan, afnlRewrite}, // nl
{aztrUpper(title), aztrLower, isTitle, nil}, // tr
}
)
func makeUpper(t language.Tag, o options) transform.SpanningTransformer {
_, i, _ := matcher.Match(t)
f := upperFunc[i].upper
if f == nil {
return undUpper
}
return &simpleCaser{f: f, span: upperFunc[i].span}
}
func makeLower(t language.Tag, o options) transform.SpanningTransformer {
_, i, _ := matcher.Match(t)
f := lowerFunc[i]
if f == nil {
if o.ignoreFinalSigma {
return undLowerIgnoreSigma
}
return undLower
}
if o.ignoreFinalSigma {
return &simpleCaser{f: f, span: isLower}
}
return &lowerCaser{
first: f,
midWord: finalSigma(f),
}
}
func makeTitle(t language.Tag, o options) transform.SpanningTransformer {
_, i, _ := matcher.Match(t)
x := &titleInfos[i]
lower := x.lower
if o.noLower {
lower = (*context).copy
} else if !o.ignoreFinalSigma {
lower = finalSigma(lower)
}
return &titleCaser{
title: x.title,
lower: lower,
titleSpan: x.titleSpan,
rewrite: x.rewrite,
}
}
func noSpan(c *context) bool {
c.err = transform.ErrEndOfSpan
return false
}
// TODO: consider a similar special case for the fast majority lower case. This
// is a bit more involved so will require some more precise benchmarking to
// justify it.
type undUpperCaser struct{ transform.NopResetter }
// undUpperCaser implements the Transformer interface for doing an upper case
// mapping for the root locale (und). It eliminates the need for an allocation
// as it prevents escaping by not using function pointers.
func (t undUpperCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
c := context{dst: dst, src: src, atEOF: atEOF}
for c.next() {
upper(&c)
c.checkpoint()
}
return c.ret()
}
func (t undUpperCaser) Span(src []byte, atEOF bool) (n int, err error) {
c := context{src: src, atEOF: atEOF}
for c.next() && isUpper(&c) {
c.checkpoint()
}
return c.retSpan()
}
// undLowerIgnoreSigmaCaser implements the Transformer interface for doing
// a lower case mapping for the root locale (und) ignoring final sigma
// handling. This casing algorithm is used in some performance-critical packages
// like secure/precis and x/net/http/idna, which warrants its special-casing.
type undLowerIgnoreSigmaCaser struct{ transform.NopResetter }
func (t undLowerIgnoreSigmaCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
c := context{dst: dst, src: src, atEOF: atEOF}
for c.next() && lower(&c) {
c.checkpoint()
}
return c.ret()
}
// Span implements a generic lower-casing. This is possible as isLower works
// for all lowercasing variants. All lowercase variants only vary in how they
// transform a non-lowercase letter. They will never change an already lowercase
// letter. In addition, there is no state.
func (t undLowerIgnoreSigmaCaser) Span(src []byte, atEOF bool) (n int, err error) {
c := context{src: src, atEOF: atEOF}
for c.next() && isLower(&c) {
c.checkpoint()
}
return c.retSpan()
}
type simpleCaser struct {
context
f mapFunc
span spanFunc
}
// simpleCaser implements the Transformer interface for doing a case operation
// on a rune-by-rune basis.
func (t *simpleCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
c := context{dst: dst, src: src, atEOF: atEOF}
for c.next() && t.f(&c) {
c.checkpoint()
}
return c.ret()
}
func (t *simpleCaser) Span(src []byte, atEOF bool) (n int, err error) {
c := context{src: src, atEOF: atEOF}
for c.next() && t.span(&c) {
c.checkpoint()
}
return c.retSpan()
}
// undLowerCaser implements the Transformer interface for doing a lower case
// mapping for the root locale (und) ignoring final sigma handling. This casing
// algorithm is used in some performance-critical packages like secure/precis
// and x/net/http/idna, which warrants its special-casing.
type undLowerCaser struct{ transform.NopResetter }
func (t undLowerCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
c := context{dst: dst, src: src, atEOF: atEOF}
for isInterWord := true; c.next(); {
if isInterWord {
if c.info.isCased() {
if !lower(&c) {
break
}
isInterWord = false
} else if !c.copy() {
break
}
} else {
if c.info.isNotCasedAndNotCaseIgnorable() {
if !c.copy() {
break
}
isInterWord = true
} else if !c.hasPrefix("Σ") {
if !lower(&c) {
break
}
} else if !finalSigmaBody(&c) {
break
}
}
c.checkpoint()
}
return c.ret()
}
func (t undLowerCaser) Span(src []byte, atEOF bool) (n int, err error) {
c := context{src: src, atEOF: atEOF}
for c.next() && isLower(&c) {
c.checkpoint()
}
return c.retSpan()
}
// lowerCaser implements the Transformer interface. The default Unicode lower
// casing requires different treatment for the first and subsequent characters
// of a word, most notably to handle the Greek final Sigma.
type lowerCaser struct {
undLowerIgnoreSigmaCaser
context
first, midWord mapFunc
}
func (t *lowerCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
t.context = context{dst: dst, src: src, atEOF: atEOF}
c := &t.context
for isInterWord := true; c.next(); {
if isInterWord {
if c.info.isCased() {
if !t.first(c) {
break
}
isInterWord = false
} else if !c.copy() {
break
}
} else {
if c.info.isNotCasedAndNotCaseIgnorable() {
if !c.copy() {
break
}
isInterWord = true
} else if !t.midWord(c) {
break
}
}
c.checkpoint()
}
return c.ret()
}
// titleCaser implements the Transformer interface. Title casing algorithms
// distinguish between the first letter of a word and subsequent letters of the
// same word. It uses state to avoid requiring a potentially infinite lookahead.
type titleCaser struct {
context
// rune mappings used by the actual casing algorithms.
title mapFunc
lower mapFunc
titleSpan spanFunc
rewrite func(*context)
}
// Transform implements the standard Unicode title case algorithm as defined in
// Chapter 3 of The Unicode Standard:
// toTitlecase(X): Find the word boundaries in X according to Unicode Standard
// Annex #29, "Unicode Text Segmentation." For each word boundary, find the
// first cased character F following the word boundary. If F exists, map F to
// Titlecase_Mapping(F); then map all characters C between F and the following
// word boundary to Lowercase_Mapping(C).
func (t *titleCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
t.context = context{dst: dst, src: src, atEOF: atEOF, isMidWord: t.isMidWord}
c := &t.context
if !c.next() {
return c.ret()
}
for {
p := c.info
if t.rewrite != nil {
t.rewrite(c)
}
wasMid := p.isMid()
// Break out of this loop on failure to ensure we do not modify the
// state incorrectly.
if p.isCased() {
if !c.isMidWord {
if !t.title(c) {
break
}
c.isMidWord = true
} else if !t.lower(c) {
break
}
} else if !c.copy() {
break
} else if p.isBreak() {
c.isMidWord = false
}
// As we save the state of the transformer, it is safe to call
// checkpoint after any successful write.
if !(c.isMidWord && wasMid) {
c.checkpoint()
}
if !c.next() {
break
}
if wasMid && c.info.isMid() {
c.isMidWord = false
}
}
return c.ret()
}
func (t *titleCaser) Span(src []byte, atEOF bool) (n int, err error) {
t.context = context{src: src, atEOF: atEOF, isMidWord: t.isMidWord}
c := &t.context
if !c.next() {
return c.retSpan()
}
for {
p := c.info
if t.rewrite != nil {
t.rewrite(c)
}
wasMid := p.isMid()
// Break out of this loop on failure to ensure we do not modify the
// state incorrectly.
if p.isCased() {
if !c.isMidWord {
if !t.titleSpan(c) {
break
}
c.isMidWord = true
} else if !isLower(c) {
break
}
} else if p.isBreak() {
c.isMidWord = false
}
// As we save the state of the transformer, it is safe to call
// checkpoint after any successful write.
if !(c.isMidWord && wasMid) {
c.checkpoint()
}
if !c.next() {
break
}
if wasMid && c.info.isMid() {
c.isMidWord = false
}
}
return c.retSpan()
}
// finalSigma adds Greek final Sigma handing to another casing function. It
// determines whether a lowercased sigma should be σ or ς, by looking ahead for
// case-ignorables and a cased letters.
func finalSigma(f mapFunc) mapFunc {
return func(c *context) bool {
if !c.hasPrefix("Σ") {
return f(c)
}
return finalSigmaBody(c)
}
}
func finalSigmaBody(c *context) bool {
// Current rune must be ∑.
// ::NFD();
// # 03A3; 03C2; 03A3; 03A3; Final_Sigma; # GREEK CAPITAL LETTER SIGMA
// Σ } [:case-ignorable:]* [:cased:] → σ;
// [:cased:] [:case-ignorable:]* { Σ → ς;
// ::Any-Lower;
// ::NFC();
p := c.pDst
c.writeString("ς")
// TODO: we should do this here, but right now this will never have an
// effect as this is called when the prefix is Sigma, whereas Dutch and
// Afrikaans only test for an apostrophe.
//
// if t.rewrite != nil {
// t.rewrite(c)
// }
// We need to do one more iteration after maxIgnorable, as a cased
// letter is not an ignorable and may modify the result.
wasMid := false
for i := 0; i < maxIgnorable+1; i++ {
if !c.next() {
return false
}
if !c.info.isCaseIgnorable() {
// All Midword runes are also case ignorable, so we are
// guaranteed to have a letter or word break here. As we are
// unreading the run, there is no need to unset c.isMidWord;
// the title caser will handle this.
if c.info.isCased() {
// p+1 is guaranteed to be in bounds: if writing ς was
// successful, p+1 will contain the second byte of ς. If not,
// this function will have returned after c.next returned false.
c.dst[p+1]++ // ς → σ
}
c.unreadRune()
return true
}
// A case ignorable may also introduce a word break, so we may need
// to continue searching even after detecting a break.
isMid := c.info.isMid()
if (wasMid && isMid) || c.info.isBreak() {
c.isMidWord = false
}
wasMid = isMid
c.copy()
}
return true
}
// finalSigmaSpan would be the same as isLower.
// elUpper implements Greek upper casing, which entails removing a predefined
// set of non-blocked modifiers. Note that these accents should not be removed
// for title casing!
// Example: "Οδός" -> "ΟΔΟΣ".
func elUpper(c *context) bool {
// From CLDR:
// [:Greek:] [^[:ccc=Not_Reordered:][:ccc=Above:]]*? { [\u0313\u0314\u0301\u0300\u0306\u0342\u0308\u0304] → ;
// [:Greek:] [^[:ccc=Not_Reordered:][:ccc=Iota_Subscript:]]*? { \u0345 → ;
r, _ := utf8.DecodeRune(c.src[c.pSrc:])
oldPDst := c.pDst
if !upper(c) {
return false
}
if !unicode.Is(unicode.Greek, r) {
return true
}
i := 0
// Take the properties of the uppercased rune that is already written to the
// destination. This saves us the trouble of having to uppercase the
// decomposed rune again.
if b := norm.NFD.Properties(c.dst[oldPDst:]).Decomposition(); b != nil {
// Restore the destination position and process the decomposed rune.
r, sz := utf8.DecodeRune(b)
if r <= 0xFF { // See A.6.1
return true
}
c.pDst = oldPDst
// Insert the first rune and ignore the modifiers. See A.6.2.
c.writeBytes(b[:sz])
i = len(b[sz:]) / 2 // Greek modifiers are always of length 2.
}
for ; i < maxIgnorable && c.next(); i++ {
switch r, _ := utf8.DecodeRune(c.src[c.pSrc:]); r {
// Above and Iota Subscript
case 0x0300, // U+0300 COMBINING GRAVE ACCENT
0x0301, // U+0301 COMBINING ACUTE ACCENT
0x0304, // U+0304 COMBINING MACRON
0x0306, // U+0306 COMBINING BREVE
0x0308, // U+0308 COMBINING DIAERESIS
0x0313, // U+0313 COMBINING COMMA ABOVE
0x0314, // U+0314 COMBINING REVERSED COMMA ABOVE
0x0342, // U+0342 COMBINING GREEK PERISPOMENI
0x0345: // U+0345 COMBINING GREEK YPOGEGRAMMENI
// No-op. Gobble the modifier.
default:
switch v, _ := trie.lookup(c.src[c.pSrc:]); info(v).cccType() {
case cccZero:
c.unreadRune()
return true
// We don't need to test for IotaSubscript as the only rune that
// qualifies (U+0345) was already excluded in the switch statement
// above. See A.4.
case cccAbove:
return c.copy()
default:
// Some other modifier. We're still allowed to gobble Greek
// modifiers after this.
c.copy()
}
}
}
return i == maxIgnorable
}
// TODO: implement elUpperSpan (low-priority: complex and infrequent).
func ltLower(c *context) bool {
// From CLDR:
// # Introduce an explicit dot above when lowercasing capital I's and J's
// # whenever there are more accents above.
// # (of the accents used in Lithuanian: grave, acute, tilde above, and ogonek)
// # 0049; 0069 0307; 0049; 0049; lt More_Above; # LATIN CAPITAL LETTER I
// # 004A; 006A 0307; 004A; 004A; lt More_Above; # LATIN CAPITAL LETTER J
// # 012E; 012F 0307; 012E; 012E; lt More_Above; # LATIN CAPITAL LETTER I WITH OGONEK
// # 00CC; 0069 0307 0300; 00CC; 00CC; lt; # LATIN CAPITAL LETTER I WITH GRAVE
// # 00CD; 0069 0307 0301; 00CD; 00CD; lt; # LATIN CAPITAL LETTER I WITH ACUTE
// # 0128; 0069 0307 0303; 0128; 0128; lt; # LATIN CAPITAL LETTER I WITH TILDE
// ::NFD();
// I } [^[:ccc=Not_Reordered:][:ccc=Above:]]* [:ccc=Above:] → i \u0307;
// J } [^[:ccc=Not_Reordered:][:ccc=Above:]]* [:ccc=Above:] → j \u0307;
// I \u0328 (Į) } [^[:ccc=Not_Reordered:][:ccc=Above:]]* [:ccc=Above:] → i \u0328 \u0307;
// I \u0300 (Ì) → i \u0307 \u0300;
// I \u0301 (Í) → i \u0307 \u0301;
// I \u0303 (Ĩ) → i \u0307 \u0303;
// ::Any-Lower();
// ::NFC();
i := 0
if r := c.src[c.pSrc]; r < utf8.RuneSelf {
lower(c)
if r != 'I' && r != 'J' {
return true
}
} else {
p := norm.NFD.Properties(c.src[c.pSrc:])
if d := p.Decomposition(); len(d) >= 3 && (d[0] == 'I' || d[0] == 'J') {
// UTF-8 optimization: the decomposition will only have an above
// modifier if the last rune of the decomposition is in [U+300-U+311].
// In all other cases, a decomposition starting with I is always
// an I followed by modifiers that are not cased themselves. See A.2.
if d[1] == 0xCC && d[2] <= 0x91 { // A.2.4.
if !c.writeBytes(d[:1]) {
return false
}
c.dst[c.pDst-1] += 'a' - 'A' // lower
// Assumption: modifier never changes on lowercase. See A.1.
// Assumption: all modifiers added have CCC = Above. See A.2.3.
return c.writeString("\u0307") && c.writeBytes(d[1:])
}
// In all other cases the additional modifiers will have a CCC
// that is less than 230 (Above). We will insert the U+0307, if
// needed, after these modifiers so that a string in FCD form
// will remain so. See A.2.2.
lower(c)
i = 1
} else {
return lower(c)
}
}
for ; i < maxIgnorable && c.next(); i++ {
switch c.info.cccType() {
case cccZero:
c.unreadRune()
return true
case cccAbove:
return c.writeString("\u0307") && c.copy() // See A.1.
default:
c.copy() // See A.1.
}
}
return i == maxIgnorable
}
// ltLowerSpan would be the same as isLower.
func ltUpper(f mapFunc) mapFunc {
return func(c *context) bool {
// Unicode:
// 0307; 0307; ; ; lt After_Soft_Dotted; # COMBINING DOT ABOVE
//
// From CLDR:
// # Remove \u0307 following soft-dotteds (i, j, and the like), with possible
// # intervening non-230 marks.
// ::NFD();
// [:Soft_Dotted:] [^[:ccc=Not_Reordered:][:ccc=Above:]]* { \u0307 → ;
// ::Any-Upper();
// ::NFC();
// TODO: See A.5. A soft-dotted rune never has an exception. This would
// allow us to overload the exception bit and encode this property in
// info. Need to measure performance impact of this.
r, _ := utf8.DecodeRune(c.src[c.pSrc:])
oldPDst := c.pDst
if !f(c) {
return false
}
if !unicode.Is(unicode.Soft_Dotted, r) {
return true
}
// We don't need to do an NFD normalization, as a soft-dotted rune never
// contains U+0307. See A.3.
i := 0
for ; i < maxIgnorable && c.next(); i++ {
switch c.info.cccType() {
case cccZero:
c.unreadRune()
return true
case cccAbove:
if c.hasPrefix("\u0307") {
// We don't do a full NFC, but rather combine runes for
// some of the common cases. (Returning NFC or
// preserving normal form is neither a requirement nor
// a possibility anyway).
if !c.next() {
return false
}
if c.dst[oldPDst] == 'I' && c.pDst == oldPDst+1 && c.src[c.pSrc] == 0xcc {
s := ""
switch c.src[c.pSrc+1] {
case 0x80: // U+0300 COMBINING GRAVE ACCENT
s = "\u00cc" // U+00CC LATIN CAPITAL LETTER I WITH GRAVE
case 0x81: // U+0301 COMBINING ACUTE ACCENT
s = "\u00cd" // U+00CD LATIN CAPITAL LETTER I WITH ACUTE
case 0x83: // U+0303 COMBINING TILDE
s = "\u0128" // U+0128 LATIN CAPITAL LETTER I WITH TILDE
case 0x88: // U+0308 COMBINING DIAERESIS
s = "\u00cf" // U+00CF LATIN CAPITAL LETTER I WITH DIAERESIS
default:
}
if s != "" {
c.pDst = oldPDst
return c.writeString(s)
}
}
}
return c.copy()
default:
c.copy()
}
}
return i == maxIgnorable
}
}
// TODO: implement ltUpperSpan (low priority: complex and infrequent).
func aztrUpper(f mapFunc) mapFunc {
return func(c *context) bool {
// i→İ;
if c.src[c.pSrc] == 'i' {
return c.writeString("İ")
}
return f(c)
}
}
func aztrLower(c *context) (done bool) {
// From CLDR:
// # I and i-dotless; I-dot and i are case pairs in Turkish and Azeri
// # 0130; 0069; 0130; 0130; tr; # LATIN CAPITAL LETTER I WITH DOT ABOVE
// İ→i;
// # When lowercasing, remove dot_above in the sequence I + dot_above, which will turn into i.
// # This matches the behavior of the canonically equivalent I-dot_above
// # 0307; ; 0307; 0307; tr After_I; # COMBINING DOT ABOVE
// # When lowercasing, unless an I is before a dot_above, it turns into a dotless i.
// # 0049; 0131; 0049; 0049; tr Not_Before_Dot; # LATIN CAPITAL LETTER I
// I([^[:ccc=Not_Reordered:][:ccc=Above:]]*)\u0307 → i$1 ;
// I→ı ;
// ::Any-Lower();
if c.hasPrefix("\u0130") { // İ
return c.writeString("i")
}
if c.src[c.pSrc] != 'I' {
return lower(c)
}
// We ignore the lower-case I for now, but insert it later when we know
// which form we need.
start := c.pSrc + c.sz
i := 0
Loop:
// We check for up to n ignorables before \u0307. As \u0307 is an
// ignorable as well, n is maxIgnorable-1.
for ; i < maxIgnorable && c.next(); i++ {
switch c.info.cccType() {
case cccAbove:
if c.hasPrefix("\u0307") {
return c.writeString("i") && c.writeBytes(c.src[start:c.pSrc]) // ignore U+0307
}
done = true
break Loop
case cccZero:
c.unreadRune()
done = true
break Loop
default:
// We'll write this rune after we know which starter to use.
}
}
if i == maxIgnorable {
done = true
}
return c.writeString("ı") && c.writeBytes(c.src[start:c.pSrc+c.sz]) && done
}
// aztrLowerSpan would be the same as isLower.
func nlTitle(c *context) bool {
// From CLDR:
// # Special titlecasing for Dutch initial "ij".
// ::Any-Title();
// # Fix up Ij at the beginning of a "word" (per Any-Title, notUAX #29)
// [:^WB=ALetter:] [:WB=Extend:]* [[:WB=MidLetter:][:WB=MidNumLet:]]? { Ij } → IJ ;
if c.src[c.pSrc] != 'I' && c.src[c.pSrc] != 'i' {
return title(c)
}
if !c.writeString("I") || !c.next() {
return false
}
if c.src[c.pSrc] == 'j' || c.src[c.pSrc] == 'J' {
return c.writeString("J")
}
c.unreadRune()
return true
}
func nlTitleSpan(c *context) bool {
// From CLDR:
// # Special titlecasing for Dutch initial "ij".
// ::Any-Title();
// # Fix up Ij at the beginning of a "word" (per Any-Title, notUAX #29)
// [:^WB=ALetter:] [:WB=Extend:]* [[:WB=MidLetter:][:WB=MidNumLet:]]? { Ij } → IJ ;
if c.src[c.pSrc] != 'I' {
return isTitle(c)
}
if !c.next() || c.src[c.pSrc] == 'j' {
return false
}
if c.src[c.pSrc] != 'J' {
c.unreadRune()
}
return true
}
// Not part of CLDR, but see https://unicode.org/cldr/trac/ticket/7078.
func afnlRewrite(c *context) {
if c.hasPrefix("'") || c.hasPrefix("") {
c.isMidWord = true
}
}

2255
vendor/golang.org/x/text/cases/tables10.0.0.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

2316
vendor/golang.org/x/text/cases/tables11.0.0.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

2359
vendor/golang.org/x/text/cases/tables12.0.0.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

2399
vendor/golang.org/x/text/cases/tables13.0.0.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

2527
vendor/golang.org/x/text/cases/tables15.0.0.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

2215
vendor/golang.org/x/text/cases/tables9.0.0.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

217
vendor/golang.org/x/text/cases/trieval.go generated vendored Normal file
View File

@ -0,0 +1,217 @@
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
package cases
// This file contains definitions for interpreting the trie value of the case
// trie generated by "go run gen*.go". It is shared by both the generator
// program and the resultant package. Sharing is achieved by the generator
// copying gen_trieval.go to trieval.go and changing what's above this comment.
// info holds case information for a single rune. It is the value returned
// by a trie lookup. Most mapping information can be stored in a single 16-bit
// value. If not, for example when a rune is mapped to multiple runes, the value
// stores some basic case data and an index into an array with additional data.
//
// The per-rune values have the following format:
//
// if (exception) {
// 15..4 unsigned exception index
// } else {
// 15..8 XOR pattern or index to XOR pattern for case mapping
// Only 13..8 are used for XOR patterns.
// 7 inverseFold (fold to upper, not to lower)
// 6 index: interpret the XOR pattern as an index
// or isMid if case mode is cIgnorableUncased.
// 5..4 CCC: zero (normal or break), above or other
// }
// 3 exception: interpret this value as an exception index
// (TODO: is this bit necessary? Probably implied from case mode.)
// 2..0 case mode
//
// For the non-exceptional cases, a rune must be either uncased, lowercase or
// uppercase. If the rune is cased, the XOR pattern maps either a lowercase
// rune to uppercase or an uppercase rune to lowercase (applied to the 10
// least-significant bits of the rune).
//
// See the definitions below for a more detailed description of the various
// bits.
type info uint16
const (
casedMask = 0x0003
fullCasedMask = 0x0007
ignorableMask = 0x0006
ignorableValue = 0x0004
inverseFoldBit = 1 << 7
isMidBit = 1 << 6
exceptionBit = 1 << 3
exceptionShift = 4
numExceptionBits = 12
xorIndexBit = 1 << 6
xorShift = 8
// There is no mapping if all xor bits and the exception bit are zero.
hasMappingMask = 0xff80 | exceptionBit
)
// The case mode bits encodes the case type of a rune. This includes uncased,
// title, upper and lower case and case ignorable. (For a definition of these
// terms see Chapter 3 of The Unicode Standard Core Specification.) In some rare
// cases, a rune can be both cased and case-ignorable. This is encoded by
// cIgnorableCased. A rune of this type is always lower case. Some runes are
// cased while not having a mapping.
//
// A common pattern for scripts in the Unicode standard is for upper and lower
// case runes to alternate for increasing rune values (e.g. the accented Latin
// ranges starting from U+0100 and U+1E00 among others and some Cyrillic
// characters). We use this property by defining a cXORCase mode, where the case
// mode (always upper or lower case) is derived from the rune value. As the XOR
// pattern for case mappings is often identical for successive runes, using
// cXORCase can result in large series of identical trie values. This, in turn,
// allows us to better compress the trie blocks.
const (
cUncased info = iota // 000
cTitle // 001
cLower // 010
cUpper // 011
cIgnorableUncased // 100
cIgnorableCased // 101 // lower case if mappings exist
cXORCase // 11x // case is cLower | ((rune&1) ^ x)
maxCaseMode = cUpper
)
func (c info) isCased() bool {
return c&casedMask != 0
}
func (c info) isCaseIgnorable() bool {
return c&ignorableMask == ignorableValue
}
func (c info) isNotCasedAndNotCaseIgnorable() bool {
return c&fullCasedMask == 0
}
func (c info) isCaseIgnorableAndNotCased() bool {
return c&fullCasedMask == cIgnorableUncased
}
func (c info) isMid() bool {
return c&(fullCasedMask|isMidBit) == isMidBit|cIgnorableUncased
}
// The case mapping implementation will need to know about various Canonical
// Combining Class (CCC) values. We encode two of these in the trie value:
// cccZero (0) and cccAbove (230). If the value is cccOther, it means that
// CCC(r) > 0, but not 230. A value of cccBreak means that CCC(r) == 0 and that
// the rune also has the break category Break (see below).
const (
cccBreak info = iota << 4
cccZero
cccAbove
cccOther
cccMask = cccBreak | cccZero | cccAbove | cccOther
)
const (
starter = 0
above = 230
iotaSubscript = 240
)
// The exceptions slice holds data that does not fit in a normal info entry.
// The entry is pointed to by the exception index in an entry. It has the
// following format:
//
// Header:
//
// byte 0:
// 7..6 unused
// 5..4 CCC type (same bits as entry)
// 3 unused
// 2..0 length of fold
//
// byte 1:
// 7..6 unused
// 5..3 length of 1st mapping of case type
// 2..0 length of 2nd mapping of case type
//
// case 1st 2nd
// lower -> upper, title
// upper -> lower, title
// title -> lower, upper
//
// Lengths with the value 0x7 indicate no value and implies no change.
// A length of 0 indicates a mapping to zero-length string.
//
// Body bytes:
//
// case folding bytes
// lowercase mapping bytes
// uppercase mapping bytes
// titlecase mapping bytes
// closure mapping bytes (for NFKC_Casefold). (TODO)
//
// Fallbacks:
//
// missing fold -> lower
// missing title -> upper
// all missing -> original rune
//
// exceptions starts with a dummy byte to enforce that there is no zero index
// value.
const (
lengthMask = 0x07
lengthBits = 3
noChange = 0
)
// References to generated trie.
var trie = newCaseTrie(0)
var sparse = sparseBlocks{
values: sparseValues[:],
offsets: sparseOffsets[:],
}
// Sparse block lookup code.
// valueRange is an entry in a sparse block.
type valueRange struct {
value uint16
lo, hi byte
}
type sparseBlocks struct {
values []valueRange
offsets []uint16
}
// lookup returns the value from values block n for byte b using binary search.
func (s *sparseBlocks) lookup(n uint32, b byte) uint16 {
lo := s.offsets[n]
hi := s.offsets[n+1]
for lo < hi {
m := lo + (hi-lo)/2
r := s.values[m]
if r.lo <= b && b <= r.hi {
return r.value
}
if b < r.lo {
hi = m
} else {
lo = m + 1
}
}
return 0
}
// lastRuneForTesting is the last rune used for testing. Everything after this
// is boring.
const lastRuneForTesting = rune(0x1FFFF)

49
vendor/golang.org/x/text/internal/internal.go generated vendored Normal file
View File

@ -0,0 +1,49 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package internal contains non-exported functionality that are used by
// packages in the text repository.
package internal // import "golang.org/x/text/internal"
import (
"sort"
"golang.org/x/text/language"
)
// SortTags sorts tags in place.
func SortTags(tags []language.Tag) {
sort.Sort(sorter(tags))
}
type sorter []language.Tag
func (s sorter) Len() int {
return len(s)
}
func (s sorter) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
func (s sorter) Less(i, j int) bool {
return s[i].String() < s[j].String()
}
// UniqueTags sorts and filters duplicate tags in place and returns a slice with
// only unique tags.
func UniqueTags(tags []language.Tag) []language.Tag {
if len(tags) <= 1 {
return tags
}
SortTags(tags)
k := 0
for i := 1; i < len(tags); i++ {
if tags[k].String() < tags[i].String() {
k++
tags[k] = tags[i]
}
}
return tags[:k+1]
}

16
vendor/golang.org/x/text/internal/language/common.go generated vendored Normal file
View File

@ -0,0 +1,16 @@
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
package language
// This file contains code common to the maketables.go and the package code.
// AliasType is the type of an alias in AliasMap.
type AliasType int8
const (
Deprecated AliasType = iota
Macro
Legacy
AliasTypeUnknown AliasType = -1
)

29
vendor/golang.org/x/text/internal/language/compact.go generated vendored Normal file
View File

@ -0,0 +1,29 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
// CompactCoreInfo is a compact integer with the three core tags encoded.
type CompactCoreInfo uint32
// GetCompactCore generates a uint32 value that is guaranteed to be unique for
// different language, region, and script values.
func GetCompactCore(t Tag) (cci CompactCoreInfo, ok bool) {
if t.LangID > langNoIndexOffset {
return 0, false
}
cci |= CompactCoreInfo(t.LangID) << (8 + 12)
cci |= CompactCoreInfo(t.ScriptID) << 12
cci |= CompactCoreInfo(t.RegionID)
return cci, true
}
// Tag generates a tag from c.
func (c CompactCoreInfo) Tag() Tag {
return Tag{
LangID: Language(c >> 20),
RegionID: Region(c & 0x3ff),
ScriptID: Script(c>>12) & 0xff,
}
}

View File

@ -0,0 +1,61 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package compact defines a compact representation of language tags.
//
// Common language tags (at least all for which locale information is defined
// in CLDR) are assigned a unique index. Each Tag is associated with such an
// ID for selecting language-related resources (such as translations) as well
// as one for selecting regional defaults (currency, number formatting, etc.)
//
// It may want to export this functionality at some point, but at this point
// this is only available for use within x/text.
package compact // import "golang.org/x/text/internal/language/compact"
import (
"sort"
"strings"
"golang.org/x/text/internal/language"
)
// ID is an integer identifying a single tag.
type ID uint16
func getCoreIndex(t language.Tag) (id ID, ok bool) {
cci, ok := language.GetCompactCore(t)
if !ok {
return 0, false
}
i := sort.Search(len(coreTags), func(i int) bool {
return cci <= coreTags[i]
})
if i == len(coreTags) || coreTags[i] != cci {
return 0, false
}
return ID(i), true
}
// Parent returns the ID of the parent or the root ID if id is already the root.
func (id ID) Parent() ID {
return parents[id]
}
// Tag converts id to an internal language Tag.
func (id ID) Tag() language.Tag {
if int(id) >= len(coreTags) {
return specialTags[int(id)-len(coreTags)]
}
return coreTags[id].Tag()
}
var specialTags []language.Tag
func init() {
tags := strings.Split(specialTagsStr, " ")
specialTags = make([]language.Tag, len(tags))
for i, t := range tags {
specialTags[i] = language.MustParse(t)
}
}

View File

@ -0,0 +1,260 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run gen.go gen_index.go -output tables.go
//go:generate go run gen_parents.go
package compact
// TODO: Remove above NOTE after:
// - verifying that tables are dropped correctly (most notably matcher tables).
import (
"strings"
"golang.org/x/text/internal/language"
)
// Tag represents a BCP 47 language tag. It is used to specify an instance of a
// specific language or locale. All language tag values are guaranteed to be
// well-formed.
type Tag struct {
// NOTE: exported tags will become part of the public API.
language ID
locale ID
full fullTag // always a language.Tag for now.
}
const _und = 0
type fullTag interface {
IsRoot() bool
Parent() language.Tag
}
// Make a compact Tag from a fully specified internal language Tag.
func Make(t language.Tag) (tag Tag) {
if region := t.TypeForKey("rg"); len(region) == 6 && region[2:] == "zzzz" {
if r, err := language.ParseRegion(region[:2]); err == nil {
tFull := t
t, _ = t.SetTypeForKey("rg", "")
// TODO: should we not consider "va" for the language tag?
var exact1, exact2 bool
tag.language, exact1 = FromTag(t)
t.RegionID = r
tag.locale, exact2 = FromTag(t)
if !exact1 || !exact2 {
tag.full = tFull
}
return tag
}
}
lang, ok := FromTag(t)
tag.language = lang
tag.locale = lang
if !ok {
tag.full = t
}
return tag
}
// Tag returns an internal language Tag version of this tag.
func (t Tag) Tag() language.Tag {
if t.full != nil {
return t.full.(language.Tag)
}
tag := t.language.Tag()
if t.language != t.locale {
loc := t.locale.Tag()
tag, _ = tag.SetTypeForKey("rg", strings.ToLower(loc.RegionID.String())+"zzzz")
}
return tag
}
// IsCompact reports whether this tag is fully defined in terms of ID.
func (t *Tag) IsCompact() bool {
return t.full == nil
}
// MayHaveVariants reports whether a tag may have variants. If it returns false
// it is guaranteed the tag does not have variants.
func (t Tag) MayHaveVariants() bool {
return t.full != nil || int(t.language) >= len(coreTags)
}
// MayHaveExtensions reports whether a tag may have extensions. If it returns
// false it is guaranteed the tag does not have them.
func (t Tag) MayHaveExtensions() bool {
return t.full != nil ||
int(t.language) >= len(coreTags) ||
t.language != t.locale
}
// IsRoot returns true if t is equal to language "und".
func (t Tag) IsRoot() bool {
if t.full != nil {
return t.full.IsRoot()
}
return t.language == _und
}
// Parent returns the CLDR parent of t. In CLDR, missing fields in data for a
// specific language are substituted with fields from the parent language.
// The parent for a language may change for newer versions of CLDR.
func (t Tag) Parent() Tag {
if t.full != nil {
return Make(t.full.Parent())
}
if t.language != t.locale {
// Simulate stripping -u-rg-xxxxxx
return Tag{language: t.language, locale: t.language}
}
// TODO: use parent lookup table once cycle from internal package is
// removed. Probably by internalizing the table and declaring this fast
// enough.
// lang := compactID(internal.Parent(uint16(t.language)))
lang, _ := FromTag(t.language.Tag().Parent())
return Tag{language: lang, locale: lang}
}
// nextToken returns token t and the rest of the string.
func nextToken(s string) (t, tail string) {
p := strings.Index(s[1:], "-")
if p == -1 {
return s[1:], ""
}
p++
return s[1:p], s[p:]
}
// LanguageID returns an index, where 0 <= index < NumCompactTags, for tags
// for which data exists in the text repository.The index will change over time
// and should not be stored in persistent storage. If t does not match a compact
// index, exact will be false and the compact index will be returned for the
// first match after repeatedly taking the Parent of t.
func LanguageID(t Tag) (id ID, exact bool) {
return t.language, t.full == nil
}
// RegionalID returns the ID for the regional variant of this tag. This index is
// used to indicate region-specific overrides, such as default currency, default
// calendar and week data, default time cycle, and default measurement system
// and unit preferences.
//
// For instance, the tag en-GB-u-rg-uszzzz specifies British English with US
// settings for currency, number formatting, etc. The CompactIndex for this tag
// will be that for en-GB, while the RegionalID will be the one corresponding to
// en-US.
func RegionalID(t Tag) (id ID, exact bool) {
return t.locale, t.full == nil
}
// LanguageTag returns t stripped of regional variant indicators.
//
// At the moment this means it is stripped of a regional and variant subtag "rg"
// and "va" in the "u" extension.
func (t Tag) LanguageTag() Tag {
if t.full == nil {
return Tag{language: t.language, locale: t.language}
}
tt := t.Tag()
tt.SetTypeForKey("rg", "")
tt.SetTypeForKey("va", "")
return Make(tt)
}
// RegionalTag returns the regional variant of the tag.
//
// At the moment this means that the region is set from the regional subtag
// "rg" in the "u" extension.
func (t Tag) RegionalTag() Tag {
rt := Tag{language: t.locale, locale: t.locale}
if t.full == nil {
return rt
}
b := language.Builder{}
tag := t.Tag()
// tag, _ = tag.SetTypeForKey("rg", "")
b.SetTag(t.locale.Tag())
if v := tag.Variants(); v != "" {
for _, v := range strings.Split(v, "-") {
b.AddVariant(v)
}
}
for _, e := range tag.Extensions() {
b.AddExt(e)
}
return t
}
// FromTag reports closest matching ID for an internal language Tag.
func FromTag(t language.Tag) (id ID, exact bool) {
// TODO: perhaps give more frequent tags a lower index.
// TODO: we could make the indexes stable. This will excluded some
// possibilities for optimization, so don't do this quite yet.
exact = true
b, s, r := t.Raw()
if t.HasString() {
if t.IsPrivateUse() {
// We have no entries for user-defined tags.
return 0, false
}
hasExtra := false
if t.HasVariants() {
if t.HasExtensions() {
build := language.Builder{}
build.SetTag(language.Tag{LangID: b, ScriptID: s, RegionID: r})
build.AddVariant(t.Variants())
exact = false
t = build.Make()
}
hasExtra = true
} else if _, ok := t.Extension('u'); ok {
// TODO: va may mean something else. Consider not considering it.
// Strip all but the 'va' entry.
old := t
variant := t.TypeForKey("va")
t = language.Tag{LangID: b, ScriptID: s, RegionID: r}
if variant != "" {
t, _ = t.SetTypeForKey("va", variant)
hasExtra = true
}
exact = old == t
} else {
exact = false
}
if hasExtra {
// We have some variants.
for i, s := range specialTags {
if s == t {
return ID(i + len(coreTags)), exact
}
}
exact = false
}
}
if x, ok := getCoreIndex(t); ok {
return x, exact
}
exact = false
if r != 0 && s == 0 {
// Deal with cases where an extra script is inserted for the region.
t, _ := t.Maximize()
if x, ok := getCoreIndex(t); ok {
return x, exact
}
}
for t = t.Parent(); t != root; t = t.Parent() {
// No variants specified: just compare core components.
// The key has the form lllssrrr, where l, s, and r are nibbles for
// respectively the langID, scriptID, and regionID.
if x, ok := getCoreIndex(t); ok {
return x, exact
}
}
return 0, exact
}
var root = language.Tag{}

View File

@ -0,0 +1,120 @@
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
package compact
// parents maps a compact index of a tag to the compact index of the parent of
// this tag.
var parents = []ID{ // 775 elements
// Entry 0 - 3F
0x0000, 0x0000, 0x0001, 0x0001, 0x0000, 0x0004, 0x0000, 0x0006,
0x0000, 0x0008, 0x0000, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a,
0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a,
0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a,
0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x0000,
0x0000, 0x0028, 0x0000, 0x002a, 0x0000, 0x002c, 0x0000, 0x0000,
0x002f, 0x002e, 0x002e, 0x0000, 0x0033, 0x0000, 0x0035, 0x0000,
0x0037, 0x0000, 0x0039, 0x0000, 0x003b, 0x0000, 0x0000, 0x003e,
// Entry 40 - 7F
0x0000, 0x0040, 0x0040, 0x0000, 0x0043, 0x0043, 0x0000, 0x0046,
0x0000, 0x0048, 0x0000, 0x0000, 0x004b, 0x004a, 0x004a, 0x0000,
0x004f, 0x004f, 0x004f, 0x004f, 0x0000, 0x0054, 0x0054, 0x0000,
0x0057, 0x0000, 0x0059, 0x0000, 0x005b, 0x0000, 0x005d, 0x005d,
0x0000, 0x0060, 0x0000, 0x0062, 0x0000, 0x0064, 0x0000, 0x0066,
0x0066, 0x0000, 0x0069, 0x0000, 0x006b, 0x006b, 0x006b, 0x006b,
0x006b, 0x006b, 0x006b, 0x0000, 0x0073, 0x0000, 0x0075, 0x0000,
0x0077, 0x0000, 0x0000, 0x007a, 0x0000, 0x007c, 0x0000, 0x007e,
// Entry 80 - BF
0x0000, 0x0080, 0x0080, 0x0000, 0x0083, 0x0083, 0x0000, 0x0086,
0x0087, 0x0087, 0x0087, 0x0086, 0x0088, 0x0087, 0x0087, 0x0087,
0x0086, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0088,
0x0087, 0x0087, 0x0087, 0x0087, 0x0088, 0x0087, 0x0088, 0x0087,
0x0087, 0x0088, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
0x0087, 0x0087, 0x0087, 0x0086, 0x0087, 0x0087, 0x0087, 0x0087,
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0086, 0x0087, 0x0086,
// Entry C0 - FF
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
0x0088, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
0x0086, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0088, 0x0087,
0x0087, 0x0088, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0086, 0x0086, 0x0087,
0x0087, 0x0086, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0000,
0x00ef, 0x0000, 0x00f1, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f2,
0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f1, 0x00f2, 0x00f1, 0x00f1,
// Entry 100 - 13F
0x00f2, 0x00f2, 0x00f1, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f1,
0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x0000, 0x010e,
0x0000, 0x0110, 0x0000, 0x0112, 0x0000, 0x0114, 0x0114, 0x0000,
0x0117, 0x0117, 0x0117, 0x0117, 0x0000, 0x011c, 0x0000, 0x011e,
0x0000, 0x0120, 0x0120, 0x0000, 0x0123, 0x0123, 0x0123, 0x0123,
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
// Entry 140 - 17F
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
0x0123, 0x0123, 0x0000, 0x0152, 0x0000, 0x0154, 0x0000, 0x0156,
0x0000, 0x0158, 0x0000, 0x015a, 0x0000, 0x015c, 0x015c, 0x015c,
0x0000, 0x0160, 0x0000, 0x0000, 0x0163, 0x0000, 0x0165, 0x0000,
0x0167, 0x0167, 0x0167, 0x0000, 0x016b, 0x0000, 0x016d, 0x0000,
0x016f, 0x0000, 0x0171, 0x0171, 0x0000, 0x0174, 0x0000, 0x0176,
0x0000, 0x0178, 0x0000, 0x017a, 0x0000, 0x017c, 0x0000, 0x017e,
// Entry 180 - 1BF
0x0000, 0x0000, 0x0000, 0x0182, 0x0000, 0x0184, 0x0184, 0x0184,
0x0184, 0x0000, 0x0000, 0x0000, 0x018b, 0x0000, 0x0000, 0x018e,
0x0000, 0x0000, 0x0191, 0x0000, 0x0000, 0x0000, 0x0195, 0x0000,
0x0197, 0x0000, 0x0000, 0x019a, 0x0000, 0x0000, 0x019d, 0x0000,
0x019f, 0x0000, 0x01a1, 0x0000, 0x01a3, 0x0000, 0x01a5, 0x0000,
0x01a7, 0x0000, 0x01a9, 0x0000, 0x01ab, 0x0000, 0x01ad, 0x0000,
0x01af, 0x0000, 0x01b1, 0x01b1, 0x0000, 0x01b4, 0x0000, 0x01b6,
0x0000, 0x01b8, 0x0000, 0x01ba, 0x0000, 0x01bc, 0x0000, 0x0000,
// Entry 1C0 - 1FF
0x01bf, 0x0000, 0x01c1, 0x0000, 0x01c3, 0x0000, 0x01c5, 0x0000,
0x01c7, 0x0000, 0x01c9, 0x0000, 0x01cb, 0x01cb, 0x01cb, 0x01cb,
0x0000, 0x01d0, 0x0000, 0x01d2, 0x01d2, 0x0000, 0x01d5, 0x0000,
0x01d7, 0x0000, 0x01d9, 0x0000, 0x01db, 0x0000, 0x01dd, 0x0000,
0x01df, 0x01df, 0x0000, 0x01e2, 0x0000, 0x01e4, 0x0000, 0x01e6,
0x0000, 0x01e8, 0x0000, 0x01ea, 0x0000, 0x01ec, 0x0000, 0x01ee,
0x0000, 0x01f0, 0x0000, 0x0000, 0x01f3, 0x0000, 0x01f5, 0x01f5,
0x01f5, 0x0000, 0x01f9, 0x0000, 0x01fb, 0x0000, 0x01fd, 0x0000,
// Entry 200 - 23F
0x01ff, 0x0000, 0x0000, 0x0202, 0x0000, 0x0204, 0x0204, 0x0000,
0x0207, 0x0000, 0x0209, 0x0209, 0x0000, 0x020c, 0x020c, 0x0000,
0x020f, 0x020f, 0x020f, 0x020f, 0x020f, 0x020f, 0x020f, 0x0000,
0x0217, 0x0000, 0x0219, 0x0000, 0x021b, 0x0000, 0x0000, 0x0000,
0x0000, 0x0000, 0x0221, 0x0000, 0x0000, 0x0224, 0x0000, 0x0226,
0x0226, 0x0000, 0x0229, 0x0000, 0x022b, 0x022b, 0x0000, 0x0000,
0x022f, 0x022e, 0x022e, 0x0000, 0x0000, 0x0234, 0x0000, 0x0236,
0x0000, 0x0238, 0x0000, 0x0244, 0x023a, 0x0244, 0x0244, 0x0244,
// Entry 240 - 27F
0x0244, 0x0244, 0x0244, 0x0244, 0x023a, 0x0244, 0x0244, 0x0000,
0x0247, 0x0247, 0x0247, 0x0000, 0x024b, 0x0000, 0x024d, 0x0000,
0x024f, 0x024f, 0x0000, 0x0252, 0x0000, 0x0254, 0x0254, 0x0254,
0x0254, 0x0254, 0x0254, 0x0000, 0x025b, 0x0000, 0x025d, 0x0000,
0x025f, 0x0000, 0x0261, 0x0000, 0x0263, 0x0000, 0x0265, 0x0000,
0x0000, 0x0268, 0x0268, 0x0268, 0x0000, 0x026c, 0x0000, 0x026e,
0x0000, 0x0270, 0x0000, 0x0000, 0x0000, 0x0274, 0x0273, 0x0273,
0x0000, 0x0278, 0x0000, 0x027a, 0x0000, 0x027c, 0x0000, 0x0000,
// Entry 280 - 2BF
0x0000, 0x0000, 0x0281, 0x0000, 0x0000, 0x0284, 0x0000, 0x0286,
0x0286, 0x0286, 0x0286, 0x0000, 0x028b, 0x028b, 0x028b, 0x0000,
0x028f, 0x028f, 0x028f, 0x028f, 0x028f, 0x0000, 0x0295, 0x0295,
0x0295, 0x0295, 0x0000, 0x0000, 0x0000, 0x0000, 0x029d, 0x029d,
0x029d, 0x0000, 0x02a1, 0x02a1, 0x02a1, 0x02a1, 0x0000, 0x0000,
0x02a7, 0x02a7, 0x02a7, 0x02a7, 0x0000, 0x02ac, 0x0000, 0x02ae,
0x02ae, 0x0000, 0x02b1, 0x0000, 0x02b3, 0x0000, 0x02b5, 0x02b5,
0x0000, 0x0000, 0x02b9, 0x0000, 0x0000, 0x0000, 0x02bd, 0x0000,
// Entry 2C0 - 2FF
0x02bf, 0x02bf, 0x0000, 0x0000, 0x02c3, 0x0000, 0x02c5, 0x0000,
0x02c7, 0x0000, 0x02c9, 0x0000, 0x02cb, 0x0000, 0x02cd, 0x02cd,
0x0000, 0x0000, 0x02d1, 0x0000, 0x02d3, 0x02d0, 0x02d0, 0x0000,
0x0000, 0x02d8, 0x02d7, 0x02d7, 0x0000, 0x0000, 0x02dd, 0x0000,
0x02df, 0x0000, 0x02e1, 0x0000, 0x0000, 0x02e4, 0x0000, 0x02e6,
0x0000, 0x0000, 0x02e9, 0x0000, 0x02eb, 0x0000, 0x02ed, 0x0000,
0x02ef, 0x02ef, 0x0000, 0x0000, 0x02f3, 0x02f2, 0x02f2, 0x0000,
0x02f7, 0x0000, 0x02f9, 0x02f9, 0x02f9, 0x02f9, 0x02f9, 0x0000,
// Entry 300 - 33F
0x02ff, 0x0300, 0x02ff, 0x0000, 0x0303, 0x0051, 0x00e6,
} // Size: 1574 bytes
// Total table size 1574 bytes (1KiB); checksum: 895AAF0B

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,91 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package compact
var (
und = Tag{}
Und Tag = Tag{}
Afrikaans Tag = Tag{language: afIndex, locale: afIndex}
Amharic Tag = Tag{language: amIndex, locale: amIndex}
Arabic Tag = Tag{language: arIndex, locale: arIndex}
ModernStandardArabic Tag = Tag{language: ar001Index, locale: ar001Index}
Azerbaijani Tag = Tag{language: azIndex, locale: azIndex}
Bulgarian Tag = Tag{language: bgIndex, locale: bgIndex}
Bengali Tag = Tag{language: bnIndex, locale: bnIndex}
Catalan Tag = Tag{language: caIndex, locale: caIndex}
Czech Tag = Tag{language: csIndex, locale: csIndex}
Danish Tag = Tag{language: daIndex, locale: daIndex}
German Tag = Tag{language: deIndex, locale: deIndex}
Greek Tag = Tag{language: elIndex, locale: elIndex}
English Tag = Tag{language: enIndex, locale: enIndex}
AmericanEnglish Tag = Tag{language: enUSIndex, locale: enUSIndex}
BritishEnglish Tag = Tag{language: enGBIndex, locale: enGBIndex}
Spanish Tag = Tag{language: esIndex, locale: esIndex}
EuropeanSpanish Tag = Tag{language: esESIndex, locale: esESIndex}
LatinAmericanSpanish Tag = Tag{language: es419Index, locale: es419Index}
Estonian Tag = Tag{language: etIndex, locale: etIndex}
Persian Tag = Tag{language: faIndex, locale: faIndex}
Finnish Tag = Tag{language: fiIndex, locale: fiIndex}
Filipino Tag = Tag{language: filIndex, locale: filIndex}
French Tag = Tag{language: frIndex, locale: frIndex}
CanadianFrench Tag = Tag{language: frCAIndex, locale: frCAIndex}
Gujarati Tag = Tag{language: guIndex, locale: guIndex}
Hebrew Tag = Tag{language: heIndex, locale: heIndex}
Hindi Tag = Tag{language: hiIndex, locale: hiIndex}
Croatian Tag = Tag{language: hrIndex, locale: hrIndex}
Hungarian Tag = Tag{language: huIndex, locale: huIndex}
Armenian Tag = Tag{language: hyIndex, locale: hyIndex}
Indonesian Tag = Tag{language: idIndex, locale: idIndex}
Icelandic Tag = Tag{language: isIndex, locale: isIndex}
Italian Tag = Tag{language: itIndex, locale: itIndex}
Japanese Tag = Tag{language: jaIndex, locale: jaIndex}
Georgian Tag = Tag{language: kaIndex, locale: kaIndex}
Kazakh Tag = Tag{language: kkIndex, locale: kkIndex}
Khmer Tag = Tag{language: kmIndex, locale: kmIndex}
Kannada Tag = Tag{language: knIndex, locale: knIndex}
Korean Tag = Tag{language: koIndex, locale: koIndex}
Kirghiz Tag = Tag{language: kyIndex, locale: kyIndex}
Lao Tag = Tag{language: loIndex, locale: loIndex}
Lithuanian Tag = Tag{language: ltIndex, locale: ltIndex}
Latvian Tag = Tag{language: lvIndex, locale: lvIndex}
Macedonian Tag = Tag{language: mkIndex, locale: mkIndex}
Malayalam Tag = Tag{language: mlIndex, locale: mlIndex}
Mongolian Tag = Tag{language: mnIndex, locale: mnIndex}
Marathi Tag = Tag{language: mrIndex, locale: mrIndex}
Malay Tag = Tag{language: msIndex, locale: msIndex}
Burmese Tag = Tag{language: myIndex, locale: myIndex}
Nepali Tag = Tag{language: neIndex, locale: neIndex}
Dutch Tag = Tag{language: nlIndex, locale: nlIndex}
Norwegian Tag = Tag{language: noIndex, locale: noIndex}
Punjabi Tag = Tag{language: paIndex, locale: paIndex}
Polish Tag = Tag{language: plIndex, locale: plIndex}
Portuguese Tag = Tag{language: ptIndex, locale: ptIndex}
BrazilianPortuguese Tag = Tag{language: ptBRIndex, locale: ptBRIndex}
EuropeanPortuguese Tag = Tag{language: ptPTIndex, locale: ptPTIndex}
Romanian Tag = Tag{language: roIndex, locale: roIndex}
Russian Tag = Tag{language: ruIndex, locale: ruIndex}
Sinhala Tag = Tag{language: siIndex, locale: siIndex}
Slovak Tag = Tag{language: skIndex, locale: skIndex}
Slovenian Tag = Tag{language: slIndex, locale: slIndex}
Albanian Tag = Tag{language: sqIndex, locale: sqIndex}
Serbian Tag = Tag{language: srIndex, locale: srIndex}
SerbianLatin Tag = Tag{language: srLatnIndex, locale: srLatnIndex}
Swedish Tag = Tag{language: svIndex, locale: svIndex}
Swahili Tag = Tag{language: swIndex, locale: swIndex}
Tamil Tag = Tag{language: taIndex, locale: taIndex}
Telugu Tag = Tag{language: teIndex, locale: teIndex}
Thai Tag = Tag{language: thIndex, locale: thIndex}
Turkish Tag = Tag{language: trIndex, locale: trIndex}
Ukrainian Tag = Tag{language: ukIndex, locale: ukIndex}
Urdu Tag = Tag{language: urIndex, locale: urIndex}
Uzbek Tag = Tag{language: uzIndex, locale: uzIndex}
Vietnamese Tag = Tag{language: viIndex, locale: viIndex}
Chinese Tag = Tag{language: zhIndex, locale: zhIndex}
SimplifiedChinese Tag = Tag{language: zhHansIndex, locale: zhHansIndex}
TraditionalChinese Tag = Tag{language: zhHantIndex, locale: zhHantIndex}
Zulu Tag = Tag{language: zuIndex, locale: zuIndex}
)

167
vendor/golang.org/x/text/internal/language/compose.go generated vendored Normal file
View File

@ -0,0 +1,167 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"sort"
"strings"
)
// A Builder allows constructing a Tag from individual components.
// Its main user is Compose in the top-level language package.
type Builder struct {
Tag Tag
private string // the x extension
variants []string
extensions []string
}
// Make returns a new Tag from the current settings.
func (b *Builder) Make() Tag {
t := b.Tag
if len(b.extensions) > 0 || len(b.variants) > 0 {
sort.Sort(sortVariants(b.variants))
sort.Strings(b.extensions)
if b.private != "" {
b.extensions = append(b.extensions, b.private)
}
n := maxCoreSize + tokenLen(b.variants...) + tokenLen(b.extensions...)
buf := make([]byte, n)
p := t.genCoreBytes(buf)
t.pVariant = byte(p)
p += appendTokens(buf[p:], b.variants...)
t.pExt = uint16(p)
p += appendTokens(buf[p:], b.extensions...)
t.str = string(buf[:p])
// We may not always need to remake the string, but when or when not
// to do so is rather tricky.
scan := makeScanner(buf[:p])
t, _ = parse(&scan, "")
return t
} else if b.private != "" {
t.str = b.private
t.RemakeString()
}
return t
}
// SetTag copies all the settings from a given Tag. Any previously set values
// are discarded.
func (b *Builder) SetTag(t Tag) {
b.Tag.LangID = t.LangID
b.Tag.RegionID = t.RegionID
b.Tag.ScriptID = t.ScriptID
// TODO: optimize
b.variants = b.variants[:0]
if variants := t.Variants(); variants != "" {
for _, vr := range strings.Split(variants[1:], "-") {
b.variants = append(b.variants, vr)
}
}
b.extensions, b.private = b.extensions[:0], ""
for _, e := range t.Extensions() {
b.AddExt(e)
}
}
// AddExt adds extension e to the tag. e must be a valid extension as returned
// by Tag.Extension. If the extension already exists, it will be discarded,
// except for a -u extension, where non-existing key-type pairs will added.
func (b *Builder) AddExt(e string) {
if e[0] == 'x' {
if b.private == "" {
b.private = e
}
return
}
for i, s := range b.extensions {
if s[0] == e[0] {
if e[0] == 'u' {
b.extensions[i] += e[1:]
}
return
}
}
b.extensions = append(b.extensions, e)
}
// SetExt sets the extension e to the tag. e must be a valid extension as
// returned by Tag.Extension. If the extension already exists, it will be
// overwritten, except for a -u extension, where the individual key-type pairs
// will be set.
func (b *Builder) SetExt(e string) {
if e[0] == 'x' {
b.private = e
return
}
for i, s := range b.extensions {
if s[0] == e[0] {
if e[0] == 'u' {
b.extensions[i] = e + s[1:]
} else {
b.extensions[i] = e
}
return
}
}
b.extensions = append(b.extensions, e)
}
// AddVariant adds any number of variants.
func (b *Builder) AddVariant(v ...string) {
for _, v := range v {
if v != "" {
b.variants = append(b.variants, v)
}
}
}
// ClearVariants removes any variants previously added, including those
// copied from a Tag in SetTag.
func (b *Builder) ClearVariants() {
b.variants = b.variants[:0]
}
// ClearExtensions removes any extensions previously added, including those
// copied from a Tag in SetTag.
func (b *Builder) ClearExtensions() {
b.private = ""
b.extensions = b.extensions[:0]
}
func tokenLen(token ...string) (n int) {
for _, t := range token {
n += len(t) + 1
}
return
}
func appendTokens(b []byte, token ...string) int {
p := 0
for _, t := range token {
b[p] = '-'
copy(b[p+1:], t)
p += 1 + len(t)
}
return p
}
type sortVariants []string
func (s sortVariants) Len() int {
return len(s)
}
func (s sortVariants) Swap(i, j int) {
s[j], s[i] = s[i], s[j]
}
func (s sortVariants) Less(i, j int) bool {
return variantIndex[s[i]] < variantIndex[s[j]]
}

28
vendor/golang.org/x/text/internal/language/coverage.go generated vendored Normal file
View File

@ -0,0 +1,28 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
// BaseLanguages returns the list of all supported base languages. It generates
// the list by traversing the internal structures.
func BaseLanguages() []Language {
base := make([]Language, 0, NumLanguages)
for i := 0; i < langNoIndexOffset; i++ {
// We included "und" already for the value 0.
if i != nonCanonicalUnd {
base = append(base, Language(i))
}
}
i := langNoIndexOffset
for _, v := range langNoIndex {
for k := 0; k < 8; k++ {
if v&1 == 1 {
base = append(base, Language(i))
}
v >>= 1
i++
}
}
return base
}

627
vendor/golang.org/x/text/internal/language/language.go generated vendored Normal file
View File

@ -0,0 +1,627 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run gen.go gen_common.go -output tables.go
package language // import "golang.org/x/text/internal/language"
// TODO: Remove above NOTE after:
// - verifying that tables are dropped correctly (most notably matcher tables).
import (
"errors"
"fmt"
"strings"
)
const (
// maxCoreSize is the maximum size of a BCP 47 tag without variants and
// extensions. Equals max lang (3) + script (4) + max reg (3) + 2 dashes.
maxCoreSize = 12
// max99thPercentileSize is a somewhat arbitrary buffer size that presumably
// is large enough to hold at least 99% of the BCP 47 tags.
max99thPercentileSize = 32
// maxSimpleUExtensionSize is the maximum size of a -u extension with one
// key-type pair. Equals len("-u-") + key (2) + dash + max value (8).
maxSimpleUExtensionSize = 14
)
// Tag represents a BCP 47 language tag. It is used to specify an instance of a
// specific language or locale. All language tag values are guaranteed to be
// well-formed. The zero value of Tag is Und.
type Tag struct {
// TODO: the following fields have the form TagTypeID. This name is chosen
// to allow refactoring the public package without conflicting with its
// Base, Script, and Region methods. Once the transition is fully completed
// the ID can be stripped from the name.
LangID Language
RegionID Region
// TODO: we will soon run out of positions for ScriptID. Idea: instead of
// storing lang, region, and ScriptID codes, store only the compact index and
// have a lookup table from this code to its expansion. This greatly speeds
// up table lookup, speed up common variant cases.
// This will also immediately free up 3 extra bytes. Also, the pVariant
// field can now be moved to the lookup table, as the compact index uniquely
// determines the offset of a possible variant.
ScriptID Script
pVariant byte // offset in str, includes preceding '-'
pExt uint16 // offset of first extension, includes preceding '-'
// str is the string representation of the Tag. It will only be used if the
// tag has variants or extensions.
str string
}
// Make is a convenience wrapper for Parse that omits the error.
// In case of an error, a sensible default is returned.
func Make(s string) Tag {
t, _ := Parse(s)
return t
}
// Raw returns the raw base language, script and region, without making an
// attempt to infer their values.
// TODO: consider removing
func (t Tag) Raw() (b Language, s Script, r Region) {
return t.LangID, t.ScriptID, t.RegionID
}
// equalTags compares language, script and region subtags only.
func (t Tag) equalTags(a Tag) bool {
return t.LangID == a.LangID && t.ScriptID == a.ScriptID && t.RegionID == a.RegionID
}
// IsRoot returns true if t is equal to language "und".
func (t Tag) IsRoot() bool {
if int(t.pVariant) < len(t.str) {
return false
}
return t.equalTags(Und)
}
// IsPrivateUse reports whether the Tag consists solely of an IsPrivateUse use
// tag.
func (t Tag) IsPrivateUse() bool {
return t.str != "" && t.pVariant == 0
}
// RemakeString is used to update t.str in case lang, script or region changed.
// It is assumed that pExt and pVariant still point to the start of the
// respective parts.
func (t *Tag) RemakeString() {
if t.str == "" {
return
}
extra := t.str[t.pVariant:]
if t.pVariant > 0 {
extra = extra[1:]
}
if t.equalTags(Und) && strings.HasPrefix(extra, "x-") {
t.str = extra
t.pVariant = 0
t.pExt = 0
return
}
var buf [max99thPercentileSize]byte // avoid extra memory allocation in most cases.
b := buf[:t.genCoreBytes(buf[:])]
if extra != "" {
diff := len(b) - int(t.pVariant)
b = append(b, '-')
b = append(b, extra...)
t.pVariant = uint8(int(t.pVariant) + diff)
t.pExt = uint16(int(t.pExt) + diff)
} else {
t.pVariant = uint8(len(b))
t.pExt = uint16(len(b))
}
t.str = string(b)
}
// genCoreBytes writes a string for the base languages, script and region tags
// to the given buffer and returns the number of bytes written. It will never
// write more than maxCoreSize bytes.
func (t *Tag) genCoreBytes(buf []byte) int {
n := t.LangID.StringToBuf(buf[:])
if t.ScriptID != 0 {
n += copy(buf[n:], "-")
n += copy(buf[n:], t.ScriptID.String())
}
if t.RegionID != 0 {
n += copy(buf[n:], "-")
n += copy(buf[n:], t.RegionID.String())
}
return n
}
// String returns the canonical string representation of the language tag.
func (t Tag) String() string {
if t.str != "" {
return t.str
}
if t.ScriptID == 0 && t.RegionID == 0 {
return t.LangID.String()
}
buf := [maxCoreSize]byte{}
return string(buf[:t.genCoreBytes(buf[:])])
}
// MarshalText implements encoding.TextMarshaler.
func (t Tag) MarshalText() (text []byte, err error) {
if t.str != "" {
text = append(text, t.str...)
} else if t.ScriptID == 0 && t.RegionID == 0 {
text = append(text, t.LangID.String()...)
} else {
buf := [maxCoreSize]byte{}
text = buf[:t.genCoreBytes(buf[:])]
}
return text, nil
}
// UnmarshalText implements encoding.TextUnmarshaler.
func (t *Tag) UnmarshalText(text []byte) error {
tag, err := Parse(string(text))
*t = tag
return err
}
// Variants returns the part of the tag holding all variants or the empty string
// if there are no variants defined.
func (t Tag) Variants() string {
if t.pVariant == 0 {
return ""
}
return t.str[t.pVariant:t.pExt]
}
// VariantOrPrivateUseTags returns variants or private use tags.
func (t Tag) VariantOrPrivateUseTags() string {
if t.pExt > 0 {
return t.str[t.pVariant:t.pExt]
}
return t.str[t.pVariant:]
}
// HasString reports whether this tag defines more than just the raw
// components.
func (t Tag) HasString() bool {
return t.str != ""
}
// Parent returns the CLDR parent of t. In CLDR, missing fields in data for a
// specific language are substituted with fields from the parent language.
// The parent for a language may change for newer versions of CLDR.
func (t Tag) Parent() Tag {
if t.str != "" {
// Strip the variants and extensions.
b, s, r := t.Raw()
t = Tag{LangID: b, ScriptID: s, RegionID: r}
if t.RegionID == 0 && t.ScriptID != 0 && t.LangID != 0 {
base, _ := addTags(Tag{LangID: t.LangID})
if base.ScriptID == t.ScriptID {
return Tag{LangID: t.LangID}
}
}
return t
}
if t.LangID != 0 {
if t.RegionID != 0 {
maxScript := t.ScriptID
if maxScript == 0 {
max, _ := addTags(t)
maxScript = max.ScriptID
}
for i := range parents {
if Language(parents[i].lang) == t.LangID && Script(parents[i].maxScript) == maxScript {
for _, r := range parents[i].fromRegion {
if Region(r) == t.RegionID {
return Tag{
LangID: t.LangID,
ScriptID: Script(parents[i].script),
RegionID: Region(parents[i].toRegion),
}
}
}
}
}
// Strip the script if it is the default one.
base, _ := addTags(Tag{LangID: t.LangID})
if base.ScriptID != maxScript {
return Tag{LangID: t.LangID, ScriptID: maxScript}
}
return Tag{LangID: t.LangID}
} else if t.ScriptID != 0 {
// The parent for an base-script pair with a non-default script is
// "und" instead of the base language.
base, _ := addTags(Tag{LangID: t.LangID})
if base.ScriptID != t.ScriptID {
return Und
}
return Tag{LangID: t.LangID}
}
}
return Und
}
// ParseExtension parses s as an extension and returns it on success.
func ParseExtension(s string) (ext string, err error) {
defer func() {
if recover() != nil {
ext = ""
err = ErrSyntax
}
}()
scan := makeScannerString(s)
var end int
if n := len(scan.token); n != 1 {
return "", ErrSyntax
}
scan.toLower(0, len(scan.b))
end = parseExtension(&scan)
if end != len(s) {
return "", ErrSyntax
}
return string(scan.b), nil
}
// HasVariants reports whether t has variants.
func (t Tag) HasVariants() bool {
return uint16(t.pVariant) < t.pExt
}
// HasExtensions reports whether t has extensions.
func (t Tag) HasExtensions() bool {
return int(t.pExt) < len(t.str)
}
// Extension returns the extension of type x for tag t. It will return
// false for ok if t does not have the requested extension. The returned
// extension will be invalid in this case.
func (t Tag) Extension(x byte) (ext string, ok bool) {
for i := int(t.pExt); i < len(t.str)-1; {
var ext string
i, ext = getExtension(t.str, i)
if ext[0] == x {
return ext, true
}
}
return "", false
}
// Extensions returns all extensions of t.
func (t Tag) Extensions() []string {
e := []string{}
for i := int(t.pExt); i < len(t.str)-1; {
var ext string
i, ext = getExtension(t.str, i)
e = append(e, ext)
}
return e
}
// TypeForKey returns the type associated with the given key, where key and type
// are of the allowed values defined for the Unicode locale extension ('u') in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
// TypeForKey will traverse the inheritance chain to get the correct value.
//
// If there are multiple types associated with a key, only the first will be
// returned. If there is no type associated with a key, it returns the empty
// string.
func (t Tag) TypeForKey(key string) string {
if _, start, end, _ := t.findTypeForKey(key); end != start {
s := t.str[start:end]
if p := strings.IndexByte(s, '-'); p >= 0 {
s = s[:p]
}
return s
}
return ""
}
var (
errPrivateUse = errors.New("cannot set a key on a private use tag")
errInvalidArguments = errors.New("invalid key or type")
)
// SetTypeForKey returns a new Tag with the key set to type, where key and type
// are of the allowed values defined for the Unicode locale extension ('u') in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
// An empty value removes an existing pair with the same key.
func (t Tag) SetTypeForKey(key, value string) (Tag, error) {
if t.IsPrivateUse() {
return t, errPrivateUse
}
if len(key) != 2 {
return t, errInvalidArguments
}
// Remove the setting if value is "".
if value == "" {
start, sep, end, _ := t.findTypeForKey(key)
if start != sep {
// Remove a possible empty extension.
switch {
case t.str[start-2] != '-': // has previous elements.
case end == len(t.str), // end of string
end+2 < len(t.str) && t.str[end+2] == '-': // end of extension
start -= 2
}
if start == int(t.pVariant) && end == len(t.str) {
t.str = ""
t.pVariant, t.pExt = 0, 0
} else {
t.str = fmt.Sprintf("%s%s", t.str[:start], t.str[end:])
}
}
return t, nil
}
if len(value) < 3 || len(value) > 8 {
return t, errInvalidArguments
}
var (
buf [maxCoreSize + maxSimpleUExtensionSize]byte
uStart int // start of the -u extension.
)
// Generate the tag string if needed.
if t.str == "" {
uStart = t.genCoreBytes(buf[:])
buf[uStart] = '-'
uStart++
}
// Create new key-type pair and parse it to verify.
b := buf[uStart:]
copy(b, "u-")
copy(b[2:], key)
b[4] = '-'
b = b[:5+copy(b[5:], value)]
scan := makeScanner(b)
if parseExtensions(&scan); scan.err != nil {
return t, scan.err
}
// Assemble the replacement string.
if t.str == "" {
t.pVariant, t.pExt = byte(uStart-1), uint16(uStart-1)
t.str = string(buf[:uStart+len(b)])
} else {
s := t.str
start, sep, end, hasExt := t.findTypeForKey(key)
if start == sep {
if hasExt {
b = b[2:]
}
t.str = fmt.Sprintf("%s-%s%s", s[:sep], b, s[end:])
} else {
t.str = fmt.Sprintf("%s-%s%s", s[:start+3], value, s[end:])
}
}
return t, nil
}
// findTypeForKey returns the start and end position for the type corresponding
// to key or the point at which to insert the key-value pair if the type
// wasn't found. The hasExt return value reports whether an -u extension was present.
// Note: the extensions are typically very small and are likely to contain
// only one key-type pair.
func (t Tag) findTypeForKey(key string) (start, sep, end int, hasExt bool) {
p := int(t.pExt)
if len(key) != 2 || p == len(t.str) || p == 0 {
return p, p, p, false
}
s := t.str
// Find the correct extension.
for p++; s[p] != 'u'; p++ {
if s[p] > 'u' {
p--
return p, p, p, false
}
if p = nextExtension(s, p); p == len(s) {
return len(s), len(s), len(s), false
}
}
// Proceed to the hyphen following the extension name.
p++
// curKey is the key currently being processed.
curKey := ""
// Iterate over keys until we get the end of a section.
for {
end = p
for p++; p < len(s) && s[p] != '-'; p++ {
}
n := p - end - 1
if n <= 2 && curKey == key {
if sep < end {
sep++
}
return start, sep, end, true
}
switch n {
case 0, // invalid string
1: // next extension
return end, end, end, true
case 2:
// next key
curKey = s[end+1 : p]
if curKey > key {
return end, end, end, true
}
start = end
sep = p
}
}
}
// ParseBase parses a 2- or 3-letter ISO 639 code.
// It returns a ValueError if s is a well-formed but unknown language identifier
// or another error if another error occurred.
func ParseBase(s string) (l Language, err error) {
defer func() {
if recover() != nil {
l = 0
err = ErrSyntax
}
}()
if n := len(s); n < 2 || 3 < n {
return 0, ErrSyntax
}
var buf [3]byte
return getLangID(buf[:copy(buf[:], s)])
}
// ParseScript parses a 4-letter ISO 15924 code.
// It returns a ValueError if s is a well-formed but unknown script identifier
// or another error if another error occurred.
func ParseScript(s string) (scr Script, err error) {
defer func() {
if recover() != nil {
scr = 0
err = ErrSyntax
}
}()
if len(s) != 4 {
return 0, ErrSyntax
}
var buf [4]byte
return getScriptID(script, buf[:copy(buf[:], s)])
}
// EncodeM49 returns the Region for the given UN M.49 code.
// It returns an error if r is not a valid code.
func EncodeM49(r int) (Region, error) {
return getRegionM49(r)
}
// ParseRegion parses a 2- or 3-letter ISO 3166-1 or a UN M.49 code.
// It returns a ValueError if s is a well-formed but unknown region identifier
// or another error if another error occurred.
func ParseRegion(s string) (r Region, err error) {
defer func() {
if recover() != nil {
r = 0
err = ErrSyntax
}
}()
if n := len(s); n < 2 || 3 < n {
return 0, ErrSyntax
}
var buf [3]byte
return getRegionID(buf[:copy(buf[:], s)])
}
// IsCountry returns whether this region is a country or autonomous area. This
// includes non-standard definitions from CLDR.
func (r Region) IsCountry() bool {
if r == 0 || r.IsGroup() || r.IsPrivateUse() && r != _XK {
return false
}
return true
}
// IsGroup returns whether this region defines a collection of regions. This
// includes non-standard definitions from CLDR.
func (r Region) IsGroup() bool {
if r == 0 {
return false
}
return int(regionInclusion[r]) < len(regionContainment)
}
// Contains returns whether Region c is contained by Region r. It returns true
// if c == r.
func (r Region) Contains(c Region) bool {
if r == c {
return true
}
g := regionInclusion[r]
if g >= nRegionGroups {
return false
}
m := regionContainment[g]
d := regionInclusion[c]
b := regionInclusionBits[d]
// A contained country may belong to multiple disjoint groups. Matching any
// of these indicates containment. If the contained region is a group, it
// must strictly be a subset.
if d >= nRegionGroups {
return b&m != 0
}
return b&^m == 0
}
var errNoTLD = errors.New("language: region is not a valid ccTLD")
// TLD returns the country code top-level domain (ccTLD). UK is returned for GB.
// In all other cases it returns either the region itself or an error.
//
// This method may return an error for a region for which there exists a
// canonical form with a ccTLD. To get that ccTLD canonicalize r first. The
// region will already be canonicalized it was obtained from a Tag that was
// obtained using any of the default methods.
func (r Region) TLD() (Region, error) {
// See http://en.wikipedia.org/wiki/Country_code_top-level_domain for the
// difference between ISO 3166-1 and IANA ccTLD.
if r == _GB {
r = _UK
}
if (r.typ() & ccTLD) == 0 {
return 0, errNoTLD
}
return r, nil
}
// Canonicalize returns the region or a possible replacement if the region is
// deprecated. It will not return a replacement for deprecated regions that
// are split into multiple regions.
func (r Region) Canonicalize() Region {
if cr := normRegion(r); cr != 0 {
return cr
}
return r
}
// Variant represents a registered variant of a language as defined by BCP 47.
type Variant struct {
ID uint8
str string
}
// ParseVariant parses and returns a Variant. An error is returned if s is not
// a valid variant.
func ParseVariant(s string) (v Variant, err error) {
defer func() {
if recover() != nil {
v = Variant{}
err = ErrSyntax
}
}()
s = strings.ToLower(s)
if id, ok := variantIndex[s]; ok {
return Variant{id, s}, nil
}
return Variant{}, NewValueError([]byte(s))
}
// String returns the string representation of the variant.
func (v Variant) String() string {
return v.str
}

412
vendor/golang.org/x/text/internal/language/lookup.go generated vendored Normal file
View File

@ -0,0 +1,412 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"bytes"
"fmt"
"sort"
"strconv"
"golang.org/x/text/internal/tag"
)
// findIndex tries to find the given tag in idx and returns a standardized error
// if it could not be found.
func findIndex(idx tag.Index, key []byte, form string) (index int, err error) {
if !tag.FixCase(form, key) {
return 0, ErrSyntax
}
i := idx.Index(key)
if i == -1 {
return 0, NewValueError(key)
}
return i, nil
}
func searchUint(imap []uint16, key uint16) int {
return sort.Search(len(imap), func(i int) bool {
return imap[i] >= key
})
}
type Language uint16
// getLangID returns the langID of s if s is a canonical subtag
// or langUnknown if s is not a canonical subtag.
func getLangID(s []byte) (Language, error) {
if len(s) == 2 {
return getLangISO2(s)
}
return getLangISO3(s)
}
// TODO language normalization as well as the AliasMaps could be moved to the
// higher level package, but it is a bit tricky to separate the generation.
func (id Language) Canonicalize() (Language, AliasType) {
return normLang(id)
}
// normLang returns the mapped langID of id according to mapping m.
func normLang(id Language) (Language, AliasType) {
k := sort.Search(len(AliasMap), func(i int) bool {
return AliasMap[i].From >= uint16(id)
})
if k < len(AliasMap) && AliasMap[k].From == uint16(id) {
return Language(AliasMap[k].To), AliasTypes[k]
}
return id, AliasTypeUnknown
}
// getLangISO2 returns the langID for the given 2-letter ISO language code
// or unknownLang if this does not exist.
func getLangISO2(s []byte) (Language, error) {
if !tag.FixCase("zz", s) {
return 0, ErrSyntax
}
if i := lang.Index(s); i != -1 && lang.Elem(i)[3] != 0 {
return Language(i), nil
}
return 0, NewValueError(s)
}
const base = 'z' - 'a' + 1
func strToInt(s []byte) uint {
v := uint(0)
for i := 0; i < len(s); i++ {
v *= base
v += uint(s[i] - 'a')
}
return v
}
// converts the given integer to the original ASCII string passed to strToInt.
// len(s) must match the number of characters obtained.
func intToStr(v uint, s []byte) {
for i := len(s) - 1; i >= 0; i-- {
s[i] = byte(v%base) + 'a'
v /= base
}
}
// getLangISO3 returns the langID for the given 3-letter ISO language code
// or unknownLang if this does not exist.
func getLangISO3(s []byte) (Language, error) {
if tag.FixCase("und", s) {
// first try to match canonical 3-letter entries
for i := lang.Index(s[:2]); i != -1; i = lang.Next(s[:2], i) {
if e := lang.Elem(i); e[3] == 0 && e[2] == s[2] {
// We treat "und" as special and always translate it to "unspecified".
// Note that ZZ and Zzzz are private use and are not treated as
// unspecified by default.
id := Language(i)
if id == nonCanonicalUnd {
return 0, nil
}
return id, nil
}
}
if i := altLangISO3.Index(s); i != -1 {
return Language(altLangIndex[altLangISO3.Elem(i)[3]]), nil
}
n := strToInt(s)
if langNoIndex[n/8]&(1<<(n%8)) != 0 {
return Language(n) + langNoIndexOffset, nil
}
// Check for non-canonical uses of ISO3.
for i := lang.Index(s[:1]); i != -1; i = lang.Next(s[:1], i) {
if e := lang.Elem(i); e[2] == s[1] && e[3] == s[2] {
return Language(i), nil
}
}
return 0, NewValueError(s)
}
return 0, ErrSyntax
}
// StringToBuf writes the string to b and returns the number of bytes
// written. cap(b) must be >= 3.
func (id Language) StringToBuf(b []byte) int {
if id >= langNoIndexOffset {
intToStr(uint(id)-langNoIndexOffset, b[:3])
return 3
} else if id == 0 {
return copy(b, "und")
}
l := lang[id<<2:]
if l[3] == 0 {
return copy(b, l[:3])
}
return copy(b, l[:2])
}
// String returns the BCP 47 representation of the langID.
// Use b as variable name, instead of id, to ensure the variable
// used is consistent with that of Base in which this type is embedded.
func (b Language) String() string {
if b == 0 {
return "und"
} else if b >= langNoIndexOffset {
b -= langNoIndexOffset
buf := [3]byte{}
intToStr(uint(b), buf[:])
return string(buf[:])
}
l := lang.Elem(int(b))
if l[3] == 0 {
return l[:3]
}
return l[:2]
}
// ISO3 returns the ISO 639-3 language code.
func (b Language) ISO3() string {
if b == 0 || b >= langNoIndexOffset {
return b.String()
}
l := lang.Elem(int(b))
if l[3] == 0 {
return l[:3]
} else if l[2] == 0 {
return altLangISO3.Elem(int(l[3]))[:3]
}
// This allocation will only happen for 3-letter ISO codes
// that are non-canonical BCP 47 language identifiers.
return l[0:1] + l[2:4]
}
// IsPrivateUse reports whether this language code is reserved for private use.
func (b Language) IsPrivateUse() bool {
return langPrivateStart <= b && b <= langPrivateEnd
}
// SuppressScript returns the script marked as SuppressScript in the IANA
// language tag repository, or 0 if there is no such script.
func (b Language) SuppressScript() Script {
if b < langNoIndexOffset {
return Script(suppressScript[b])
}
return 0
}
type Region uint16
// getRegionID returns the region id for s if s is a valid 2-letter region code
// or unknownRegion.
func getRegionID(s []byte) (Region, error) {
if len(s) == 3 {
if isAlpha(s[0]) {
return getRegionISO3(s)
}
if i, err := strconv.ParseUint(string(s), 10, 10); err == nil {
return getRegionM49(int(i))
}
}
return getRegionISO2(s)
}
// getRegionISO2 returns the regionID for the given 2-letter ISO country code
// or unknownRegion if this does not exist.
func getRegionISO2(s []byte) (Region, error) {
i, err := findIndex(regionISO, s, "ZZ")
if err != nil {
return 0, err
}
return Region(i) + isoRegionOffset, nil
}
// getRegionISO3 returns the regionID for the given 3-letter ISO country code
// or unknownRegion if this does not exist.
func getRegionISO3(s []byte) (Region, error) {
if tag.FixCase("ZZZ", s) {
for i := regionISO.Index(s[:1]); i != -1; i = regionISO.Next(s[:1], i) {
if e := regionISO.Elem(i); e[2] == s[1] && e[3] == s[2] {
return Region(i) + isoRegionOffset, nil
}
}
for i := 0; i < len(altRegionISO3); i += 3 {
if tag.Compare(altRegionISO3[i:i+3], s) == 0 {
return Region(altRegionIDs[i/3]), nil
}
}
return 0, NewValueError(s)
}
return 0, ErrSyntax
}
func getRegionM49(n int) (Region, error) {
if 0 < n && n <= 999 {
const (
searchBits = 7
regionBits = 9
regionMask = 1<<regionBits - 1
)
idx := n >> searchBits
buf := fromM49[m49Index[idx]:m49Index[idx+1]]
val := uint16(n) << regionBits // we rely on bits shifting out
i := sort.Search(len(buf), func(i int) bool {
return buf[i] >= val
})
if r := fromM49[int(m49Index[idx])+i]; r&^regionMask == val {
return Region(r & regionMask), nil
}
}
var e ValueError
fmt.Fprint(bytes.NewBuffer([]byte(e.v[:])), n)
return 0, e
}
// normRegion returns a region if r is deprecated or 0 otherwise.
// TODO: consider supporting BYS (-> BLR), CSK (-> 200 or CZ), PHI (-> PHL) and AFI (-> DJ).
// TODO: consider mapping split up regions to new most populous one (like CLDR).
func normRegion(r Region) Region {
m := regionOldMap
k := sort.Search(len(m), func(i int) bool {
return m[i].From >= uint16(r)
})
if k < len(m) && m[k].From == uint16(r) {
return Region(m[k].To)
}
return 0
}
const (
iso3166UserAssigned = 1 << iota
ccTLD
bcp47Region
)
func (r Region) typ() byte {
return regionTypes[r]
}
// String returns the BCP 47 representation for the region.
// It returns "ZZ" for an unspecified region.
func (r Region) String() string {
if r < isoRegionOffset {
if r == 0 {
return "ZZ"
}
return fmt.Sprintf("%03d", r.M49())
}
r -= isoRegionOffset
return regionISO.Elem(int(r))[:2]
}
// ISO3 returns the 3-letter ISO code of r.
// Note that not all regions have a 3-letter ISO code.
// In such cases this method returns "ZZZ".
func (r Region) ISO3() string {
if r < isoRegionOffset {
return "ZZZ"
}
r -= isoRegionOffset
reg := regionISO.Elem(int(r))
switch reg[2] {
case 0:
return altRegionISO3[reg[3]:][:3]
case ' ':
return "ZZZ"
}
return reg[0:1] + reg[2:4]
}
// M49 returns the UN M.49 encoding of r, or 0 if this encoding
// is not defined for r.
func (r Region) M49() int {
return int(m49[r])
}
// IsPrivateUse reports whether r has the ISO 3166 User-assigned status. This
// may include private-use tags that are assigned by CLDR and used in this
// implementation. So IsPrivateUse and IsCountry can be simultaneously true.
func (r Region) IsPrivateUse() bool {
return r.typ()&iso3166UserAssigned != 0
}
type Script uint16
// getScriptID returns the script id for string s. It assumes that s
// is of the format [A-Z][a-z]{3}.
func getScriptID(idx tag.Index, s []byte) (Script, error) {
i, err := findIndex(idx, s, "Zzzz")
return Script(i), err
}
// String returns the script code in title case.
// It returns "Zzzz" for an unspecified script.
func (s Script) String() string {
if s == 0 {
return "Zzzz"
}
return script.Elem(int(s))
}
// IsPrivateUse reports whether this script code is reserved for private use.
func (s Script) IsPrivateUse() bool {
return _Qaaa <= s && s <= _Qabx
}
const (
maxAltTaglen = len("en-US-POSIX")
maxLen = maxAltTaglen
)
var (
// grandfatheredMap holds a mapping from legacy and grandfathered tags to
// their base language or index to more elaborate tag.
grandfatheredMap = map[[maxLen]byte]int16{
[maxLen]byte{'a', 'r', 't', '-', 'l', 'o', 'j', 'b', 'a', 'n'}: _jbo, // art-lojban
[maxLen]byte{'i', '-', 'a', 'm', 'i'}: _ami, // i-ami
[maxLen]byte{'i', '-', 'b', 'n', 'n'}: _bnn, // i-bnn
[maxLen]byte{'i', '-', 'h', 'a', 'k'}: _hak, // i-hak
[maxLen]byte{'i', '-', 'k', 'l', 'i', 'n', 'g', 'o', 'n'}: _tlh, // i-klingon
[maxLen]byte{'i', '-', 'l', 'u', 'x'}: _lb, // i-lux
[maxLen]byte{'i', '-', 'n', 'a', 'v', 'a', 'j', 'o'}: _nv, // i-navajo
[maxLen]byte{'i', '-', 'p', 'w', 'n'}: _pwn, // i-pwn
[maxLen]byte{'i', '-', 't', 'a', 'o'}: _tao, // i-tao
[maxLen]byte{'i', '-', 't', 'a', 'y'}: _tay, // i-tay
[maxLen]byte{'i', '-', 't', 's', 'u'}: _tsu, // i-tsu
[maxLen]byte{'n', 'o', '-', 'b', 'o', 'k'}: _nb, // no-bok
[maxLen]byte{'n', 'o', '-', 'n', 'y', 'n'}: _nn, // no-nyn
[maxLen]byte{'s', 'g', 'n', '-', 'b', 'e', '-', 'f', 'r'}: _sfb, // sgn-BE-FR
[maxLen]byte{'s', 'g', 'n', '-', 'b', 'e', '-', 'n', 'l'}: _vgt, // sgn-BE-NL
[maxLen]byte{'s', 'g', 'n', '-', 'c', 'h', '-', 'd', 'e'}: _sgg, // sgn-CH-DE
[maxLen]byte{'z', 'h', '-', 'g', 'u', 'o', 'y', 'u'}: _cmn, // zh-guoyu
[maxLen]byte{'z', 'h', '-', 'h', 'a', 'k', 'k', 'a'}: _hak, // zh-hakka
[maxLen]byte{'z', 'h', '-', 'm', 'i', 'n', '-', 'n', 'a', 'n'}: _nan, // zh-min-nan
[maxLen]byte{'z', 'h', '-', 'x', 'i', 'a', 'n', 'g'}: _hsn, // zh-xiang
// Grandfathered tags with no modern replacement will be converted as
// follows:
[maxLen]byte{'c', 'e', 'l', '-', 'g', 'a', 'u', 'l', 'i', 's', 'h'}: -1, // cel-gaulish
[maxLen]byte{'e', 'n', '-', 'g', 'b', '-', 'o', 'e', 'd'}: -2, // en-GB-oed
[maxLen]byte{'i', '-', 'd', 'e', 'f', 'a', 'u', 'l', 't'}: -3, // i-default
[maxLen]byte{'i', '-', 'e', 'n', 'o', 'c', 'h', 'i', 'a', 'n'}: -4, // i-enochian
[maxLen]byte{'i', '-', 'm', 'i', 'n', 'g', 'o'}: -5, // i-mingo
[maxLen]byte{'z', 'h', '-', 'm', 'i', 'n'}: -6, // zh-min
// CLDR-specific tag.
[maxLen]byte{'r', 'o', 'o', 't'}: 0, // root
[maxLen]byte{'e', 'n', '-', 'u', 's', '-', 'p', 'o', 's', 'i', 'x'}: -7, // en_US_POSIX"
}
altTagIndex = [...]uint8{0, 17, 31, 45, 61, 74, 86, 102}
altTags = "xtg-x-cel-gaulishen-GB-oxendicten-x-i-defaultund-x-i-enochiansee-x-i-mingonan-x-zh-minen-US-u-va-posix"
)
func grandfathered(s [maxAltTaglen]byte) (t Tag, ok bool) {
if v, ok := grandfatheredMap[s]; ok {
if v < 0 {
return Make(altTags[altTagIndex[-v-1]:altTagIndex[-v]]), true
}
t.LangID = Language(v)
return t, true
}
return t, false
}

226
vendor/golang.org/x/text/internal/language/match.go generated vendored Normal file
View File

@ -0,0 +1,226 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import "errors"
type scriptRegionFlags uint8
const (
isList = 1 << iota
scriptInFrom
regionInFrom
)
func (t *Tag) setUndefinedLang(id Language) {
if t.LangID == 0 {
t.LangID = id
}
}
func (t *Tag) setUndefinedScript(id Script) {
if t.ScriptID == 0 {
t.ScriptID = id
}
}
func (t *Tag) setUndefinedRegion(id Region) {
if t.RegionID == 0 || t.RegionID.Contains(id) {
t.RegionID = id
}
}
// ErrMissingLikelyTagsData indicates no information was available
// to compute likely values of missing tags.
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
// addLikelySubtags sets subtags to their most likely value, given the locale.
// In most cases this means setting fields for unknown values, but in some
// cases it may alter a value. It returns an ErrMissingLikelyTagsData error
// if the given locale cannot be expanded.
func (t Tag) addLikelySubtags() (Tag, error) {
id, err := addTags(t)
if err != nil {
return t, err
} else if id.equalTags(t) {
return t, nil
}
id.RemakeString()
return id, nil
}
// specializeRegion attempts to specialize a group region.
func specializeRegion(t *Tag) bool {
if i := regionInclusion[t.RegionID]; i < nRegionGroups {
x := likelyRegionGroup[i]
if Language(x.lang) == t.LangID && Script(x.script) == t.ScriptID {
t.RegionID = Region(x.region)
}
return true
}
return false
}
// Maximize returns a new tag with missing tags filled in.
func (t Tag) Maximize() (Tag, error) {
return addTags(t)
}
func addTags(t Tag) (Tag, error) {
// We leave private use identifiers alone.
if t.IsPrivateUse() {
return t, nil
}
if t.ScriptID != 0 && t.RegionID != 0 {
if t.LangID != 0 {
// already fully specified
specializeRegion(&t)
return t, nil
}
// Search matches for und-script-region. Note that for these cases
// region will never be a group so there is no need to check for this.
list := likelyRegion[t.RegionID : t.RegionID+1]
if x := list[0]; x.flags&isList != 0 {
list = likelyRegionList[x.lang : x.lang+uint16(x.script)]
}
for _, x := range list {
// Deviating from the spec. See match_test.go for details.
if Script(x.script) == t.ScriptID {
t.setUndefinedLang(Language(x.lang))
return t, nil
}
}
}
if t.LangID != 0 {
// Search matches for lang-script and lang-region, where lang != und.
if t.LangID < langNoIndexOffset {
x := likelyLang[t.LangID]
if x.flags&isList != 0 {
list := likelyLangList[x.region : x.region+uint16(x.script)]
if t.ScriptID != 0 {
for _, x := range list {
if Script(x.script) == t.ScriptID && x.flags&scriptInFrom != 0 {
t.setUndefinedRegion(Region(x.region))
return t, nil
}
}
} else if t.RegionID != 0 {
count := 0
goodScript := true
tt := t
for _, x := range list {
// We visit all entries for which the script was not
// defined, including the ones where the region was not
// defined. This allows for proper disambiguation within
// regions.
if x.flags&scriptInFrom == 0 && t.RegionID.Contains(Region(x.region)) {
tt.RegionID = Region(x.region)
tt.setUndefinedScript(Script(x.script))
goodScript = goodScript && tt.ScriptID == Script(x.script)
count++
}
}
if count == 1 {
return tt, nil
}
// Even if we fail to find a unique Region, we might have
// an unambiguous script.
if goodScript {
t.ScriptID = tt.ScriptID
}
}
}
}
} else {
// Search matches for und-script.
if t.ScriptID != 0 {
x := likelyScript[t.ScriptID]
if x.region != 0 {
t.setUndefinedRegion(Region(x.region))
t.setUndefinedLang(Language(x.lang))
return t, nil
}
}
// Search matches for und-region. If und-script-region exists, it would
// have been found earlier.
if t.RegionID != 0 {
if i := regionInclusion[t.RegionID]; i < nRegionGroups {
x := likelyRegionGroup[i]
if x.region != 0 {
t.setUndefinedLang(Language(x.lang))
t.setUndefinedScript(Script(x.script))
t.RegionID = Region(x.region)
}
} else {
x := likelyRegion[t.RegionID]
if x.flags&isList != 0 {
x = likelyRegionList[x.lang]
}
if x.script != 0 && x.flags != scriptInFrom {
t.setUndefinedLang(Language(x.lang))
t.setUndefinedScript(Script(x.script))
return t, nil
}
}
}
}
// Search matches for lang.
if t.LangID < langNoIndexOffset {
x := likelyLang[t.LangID]
if x.flags&isList != 0 {
x = likelyLangList[x.region]
}
if x.region != 0 {
t.setUndefinedScript(Script(x.script))
t.setUndefinedRegion(Region(x.region))
}
specializeRegion(&t)
if t.LangID == 0 {
t.LangID = _en // default language
}
return t, nil
}
return t, ErrMissingLikelyTagsData
}
func (t *Tag) setTagsFrom(id Tag) {
t.LangID = id.LangID
t.ScriptID = id.ScriptID
t.RegionID = id.RegionID
}
// minimize removes the region or script subtags from t such that
// t.addLikelySubtags() == t.minimize().addLikelySubtags().
func (t Tag) minimize() (Tag, error) {
t, err := minimizeTags(t)
if err != nil {
return t, err
}
t.RemakeString()
return t, nil
}
// minimizeTags mimics the behavior of the ICU 51 C implementation.
func minimizeTags(t Tag) (Tag, error) {
if t.equalTags(Und) {
return t, nil
}
max, err := addTags(t)
if err != nil {
return t, err
}
for _, id := range [...]Tag{
{LangID: t.LangID},
{LangID: t.LangID, RegionID: t.RegionID},
{LangID: t.LangID, ScriptID: t.ScriptID},
} {
if x, err := addTags(id); err == nil && max.equalTags(x) {
t.setTagsFrom(id)
break
}
}
return t, nil
}

608
vendor/golang.org/x/text/internal/language/parse.go generated vendored Normal file
View File

@ -0,0 +1,608 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"bytes"
"errors"
"fmt"
"sort"
"golang.org/x/text/internal/tag"
)
// isAlpha returns true if the byte is not a digit.
// b must be an ASCII letter or digit.
func isAlpha(b byte) bool {
return b > '9'
}
// isAlphaNum returns true if the string contains only ASCII letters or digits.
func isAlphaNum(s []byte) bool {
for _, c := range s {
if !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9') {
return false
}
}
return true
}
// ErrSyntax is returned by any of the parsing functions when the
// input is not well-formed, according to BCP 47.
// TODO: return the position at which the syntax error occurred?
var ErrSyntax = errors.New("language: tag is not well-formed")
// ErrDuplicateKey is returned when a tag contains the same key twice with
// different values in the -u section.
var ErrDuplicateKey = errors.New("language: different values for same key in -u extension")
// ValueError is returned by any of the parsing functions when the
// input is well-formed but the respective subtag is not recognized
// as a valid value.
type ValueError struct {
v [8]byte
}
// NewValueError creates a new ValueError.
func NewValueError(tag []byte) ValueError {
var e ValueError
copy(e.v[:], tag)
return e
}
func (e ValueError) tag() []byte {
n := bytes.IndexByte(e.v[:], 0)
if n == -1 {
n = 8
}
return e.v[:n]
}
// Error implements the error interface.
func (e ValueError) Error() string {
return fmt.Sprintf("language: subtag %q is well-formed but unknown", e.tag())
}
// Subtag returns the subtag for which the error occurred.
func (e ValueError) Subtag() string {
return string(e.tag())
}
// scanner is used to scan BCP 47 tokens, which are separated by _ or -.
type scanner struct {
b []byte
bytes [max99thPercentileSize]byte
token []byte
start int // start position of the current token
end int // end position of the current token
next int // next point for scan
err error
done bool
}
func makeScannerString(s string) scanner {
scan := scanner{}
if len(s) <= len(scan.bytes) {
scan.b = scan.bytes[:copy(scan.bytes[:], s)]
} else {
scan.b = []byte(s)
}
scan.init()
return scan
}
// makeScanner returns a scanner using b as the input buffer.
// b is not copied and may be modified by the scanner routines.
func makeScanner(b []byte) scanner {
scan := scanner{b: b}
scan.init()
return scan
}
func (s *scanner) init() {
for i, c := range s.b {
if c == '_' {
s.b[i] = '-'
}
}
s.scan()
}
// restToLower converts the string between start and end to lower case.
func (s *scanner) toLower(start, end int) {
for i := start; i < end; i++ {
c := s.b[i]
if 'A' <= c && c <= 'Z' {
s.b[i] += 'a' - 'A'
}
}
}
func (s *scanner) setError(e error) {
if s.err == nil || (e == ErrSyntax && s.err != ErrSyntax) {
s.err = e
}
}
// resizeRange shrinks or grows the array at position oldStart such that
// a new string of size newSize can fit between oldStart and oldEnd.
// Sets the scan point to after the resized range.
func (s *scanner) resizeRange(oldStart, oldEnd, newSize int) {
s.start = oldStart
if end := oldStart + newSize; end != oldEnd {
diff := end - oldEnd
var b []byte
if n := len(s.b) + diff; n > cap(s.b) {
b = make([]byte, n)
copy(b, s.b[:oldStart])
} else {
b = s.b[:n]
}
copy(b[end:], s.b[oldEnd:])
s.b = b
s.next = end + (s.next - s.end)
s.end = end
}
}
// replace replaces the current token with repl.
func (s *scanner) replace(repl string) {
s.resizeRange(s.start, s.end, len(repl))
copy(s.b[s.start:], repl)
}
// gobble removes the current token from the input.
// Caller must call scan after calling gobble.
func (s *scanner) gobble(e error) {
s.setError(e)
if s.start == 0 {
s.b = s.b[:+copy(s.b, s.b[s.next:])]
s.end = 0
} else {
s.b = s.b[:s.start-1+copy(s.b[s.start-1:], s.b[s.end:])]
s.end = s.start - 1
}
s.next = s.start
}
// deleteRange removes the given range from s.b before the current token.
func (s *scanner) deleteRange(start, end int) {
s.b = s.b[:start+copy(s.b[start:], s.b[end:])]
diff := end - start
s.next -= diff
s.start -= diff
s.end -= diff
}
// scan parses the next token of a BCP 47 string. Tokens that are larger
// than 8 characters or include non-alphanumeric characters result in an error
// and are gobbled and removed from the output.
// It returns the end position of the last token consumed.
func (s *scanner) scan() (end int) {
end = s.end
s.token = nil
for s.start = s.next; s.next < len(s.b); {
i := bytes.IndexByte(s.b[s.next:], '-')
if i == -1 {
s.end = len(s.b)
s.next = len(s.b)
i = s.end - s.start
} else {
s.end = s.next + i
s.next = s.end + 1
}
token := s.b[s.start:s.end]
if i < 1 || i > 8 || !isAlphaNum(token) {
s.gobble(ErrSyntax)
continue
}
s.token = token
return end
}
if n := len(s.b); n > 0 && s.b[n-1] == '-' {
s.setError(ErrSyntax)
s.b = s.b[:len(s.b)-1]
}
s.done = true
return end
}
// acceptMinSize parses multiple tokens of the given size or greater.
// It returns the end position of the last token consumed.
func (s *scanner) acceptMinSize(min int) (end int) {
end = s.end
s.scan()
for ; len(s.token) >= min; s.scan() {
end = s.end
}
return end
}
// Parse parses the given BCP 47 string and returns a valid Tag. If parsing
// failed it returns an error and any part of the tag that could be parsed.
// If parsing succeeded but an unknown value was found, it returns
// ValueError. The Tag returned in this case is just stripped of the unknown
// value. All other values are preserved. It accepts tags in the BCP 47 format
// and extensions to this standard defined in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
func Parse(s string) (t Tag, err error) {
// TODO: consider supporting old-style locale key-value pairs.
if s == "" {
return Und, ErrSyntax
}
defer func() {
if recover() != nil {
t = Und
err = ErrSyntax
return
}
}()
if len(s) <= maxAltTaglen {
b := [maxAltTaglen]byte{}
for i, c := range s {
// Generating invalid UTF-8 is okay as it won't match.
if 'A' <= c && c <= 'Z' {
c += 'a' - 'A'
} else if c == '_' {
c = '-'
}
b[i] = byte(c)
}
if t, ok := grandfathered(b); ok {
return t, nil
}
}
scan := makeScannerString(s)
return parse(&scan, s)
}
func parse(scan *scanner, s string) (t Tag, err error) {
t = Und
var end int
if n := len(scan.token); n <= 1 {
scan.toLower(0, len(scan.b))
if n == 0 || scan.token[0] != 'x' {
return t, ErrSyntax
}
end = parseExtensions(scan)
} else if n >= 4 {
return Und, ErrSyntax
} else { // the usual case
t, end = parseTag(scan, true)
if n := len(scan.token); n == 1 {
t.pExt = uint16(end)
end = parseExtensions(scan)
} else if end < len(scan.b) {
scan.setError(ErrSyntax)
scan.b = scan.b[:end]
}
}
if int(t.pVariant) < len(scan.b) {
if end < len(s) {
s = s[:end]
}
if len(s) > 0 && tag.Compare(s, scan.b) == 0 {
t.str = s
} else {
t.str = string(scan.b)
}
} else {
t.pVariant, t.pExt = 0, 0
}
return t, scan.err
}
// parseTag parses language, script, region and variants.
// It returns a Tag and the end position in the input that was parsed.
// If doNorm is true, then <lang>-<extlang> will be normalized to <extlang>.
func parseTag(scan *scanner, doNorm bool) (t Tag, end int) {
var e error
// TODO: set an error if an unknown lang, script or region is encountered.
t.LangID, e = getLangID(scan.token)
scan.setError(e)
scan.replace(t.LangID.String())
langStart := scan.start
end = scan.scan()
for len(scan.token) == 3 && isAlpha(scan.token[0]) {
// From http://tools.ietf.org/html/bcp47, <lang>-<extlang> tags are equivalent
// to a tag of the form <extlang>.
if doNorm {
lang, e := getLangID(scan.token)
if lang != 0 {
t.LangID = lang
langStr := lang.String()
copy(scan.b[langStart:], langStr)
scan.b[langStart+len(langStr)] = '-'
scan.start = langStart + len(langStr) + 1
}
scan.gobble(e)
}
end = scan.scan()
}
if len(scan.token) == 4 && isAlpha(scan.token[0]) {
t.ScriptID, e = getScriptID(script, scan.token)
if t.ScriptID == 0 {
scan.gobble(e)
}
end = scan.scan()
}
if n := len(scan.token); n >= 2 && n <= 3 {
t.RegionID, e = getRegionID(scan.token)
if t.RegionID == 0 {
scan.gobble(e)
} else {
scan.replace(t.RegionID.String())
}
end = scan.scan()
}
scan.toLower(scan.start, len(scan.b))
t.pVariant = byte(end)
end = parseVariants(scan, end, t)
t.pExt = uint16(end)
return t, end
}
var separator = []byte{'-'}
// parseVariants scans tokens as long as each token is a valid variant string.
// Duplicate variants are removed.
func parseVariants(scan *scanner, end int, t Tag) int {
start := scan.start
varIDBuf := [4]uint8{}
variantBuf := [4][]byte{}
varID := varIDBuf[:0]
variant := variantBuf[:0]
last := -1
needSort := false
for ; len(scan.token) >= 4; scan.scan() {
// TODO: measure the impact of needing this conversion and redesign
// the data structure if there is an issue.
v, ok := variantIndex[string(scan.token)]
if !ok {
// unknown variant
// TODO: allow user-defined variants?
scan.gobble(NewValueError(scan.token))
continue
}
varID = append(varID, v)
variant = append(variant, scan.token)
if !needSort {
if last < int(v) {
last = int(v)
} else {
needSort = true
// There is no legal combinations of more than 7 variants
// (and this is by no means a useful sequence).
const maxVariants = 8
if len(varID) > maxVariants {
break
}
}
}
end = scan.end
}
if needSort {
sort.Sort(variantsSort{varID, variant})
k, l := 0, -1
for i, v := range varID {
w := int(v)
if l == w {
// Remove duplicates.
continue
}
varID[k] = varID[i]
variant[k] = variant[i]
k++
l = w
}
if str := bytes.Join(variant[:k], separator); len(str) == 0 {
end = start - 1
} else {
scan.resizeRange(start, end, len(str))
copy(scan.b[scan.start:], str)
end = scan.end
}
}
return end
}
type variantsSort struct {
i []uint8
v [][]byte
}
func (s variantsSort) Len() int {
return len(s.i)
}
func (s variantsSort) Swap(i, j int) {
s.i[i], s.i[j] = s.i[j], s.i[i]
s.v[i], s.v[j] = s.v[j], s.v[i]
}
func (s variantsSort) Less(i, j int) bool {
return s.i[i] < s.i[j]
}
type bytesSort struct {
b [][]byte
n int // first n bytes to compare
}
func (b bytesSort) Len() int {
return len(b.b)
}
func (b bytesSort) Swap(i, j int) {
b.b[i], b.b[j] = b.b[j], b.b[i]
}
func (b bytesSort) Less(i, j int) bool {
for k := 0; k < b.n; k++ {
if b.b[i][k] == b.b[j][k] {
continue
}
return b.b[i][k] < b.b[j][k]
}
return false
}
// parseExtensions parses and normalizes the extensions in the buffer.
// It returns the last position of scan.b that is part of any extension.
// It also trims scan.b to remove excess parts accordingly.
func parseExtensions(scan *scanner) int {
start := scan.start
exts := [][]byte{}
private := []byte{}
end := scan.end
for len(scan.token) == 1 {
extStart := scan.start
ext := scan.token[0]
end = parseExtension(scan)
extension := scan.b[extStart:end]
if len(extension) < 3 || (ext != 'x' && len(extension) < 4) {
scan.setError(ErrSyntax)
end = extStart
continue
} else if start == extStart && (ext == 'x' || scan.start == len(scan.b)) {
scan.b = scan.b[:end]
return end
} else if ext == 'x' {
private = extension
break
}
exts = append(exts, extension)
}
sort.Sort(bytesSort{exts, 1})
if len(private) > 0 {
exts = append(exts, private)
}
scan.b = scan.b[:start]
if len(exts) > 0 {
scan.b = append(scan.b, bytes.Join(exts, separator)...)
} else if start > 0 {
// Strip trailing '-'.
scan.b = scan.b[:start-1]
}
return end
}
// parseExtension parses a single extension and returns the position of
// the extension end.
func parseExtension(scan *scanner) int {
start, end := scan.start, scan.end
switch scan.token[0] {
case 'u': // https://www.ietf.org/rfc/rfc6067.txt
attrStart := end
scan.scan()
for last := []byte{}; len(scan.token) > 2; scan.scan() {
if bytes.Compare(scan.token, last) != -1 {
// Attributes are unsorted. Start over from scratch.
p := attrStart + 1
scan.next = p
attrs := [][]byte{}
for scan.scan(); len(scan.token) > 2; scan.scan() {
attrs = append(attrs, scan.token)
end = scan.end
}
sort.Sort(bytesSort{attrs, 3})
copy(scan.b[p:], bytes.Join(attrs, separator))
break
}
last = scan.token
end = scan.end
}
// Scan key-type sequences. A key is of length 2 and may be followed
// by 0 or more "type" subtags from 3 to the maximum of 8 letters.
var last, key []byte
for attrEnd := end; len(scan.token) == 2; last = key {
key = scan.token
end = scan.end
for scan.scan(); end < scan.end && len(scan.token) > 2; scan.scan() {
end = scan.end
}
// TODO: check key value validity
if bytes.Compare(key, last) != 1 || scan.err != nil {
// We have an invalid key or the keys are not sorted.
// Start scanning keys from scratch and reorder.
p := attrEnd + 1
scan.next = p
keys := [][]byte{}
for scan.scan(); len(scan.token) == 2; {
keyStart := scan.start
end = scan.end
for scan.scan(); end < scan.end && len(scan.token) > 2; scan.scan() {
end = scan.end
}
keys = append(keys, scan.b[keyStart:end])
}
sort.Stable(bytesSort{keys, 2})
if n := len(keys); n > 0 {
k := 0
for i := 1; i < n; i++ {
if !bytes.Equal(keys[k][:2], keys[i][:2]) {
k++
keys[k] = keys[i]
} else if !bytes.Equal(keys[k], keys[i]) {
scan.setError(ErrDuplicateKey)
}
}
keys = keys[:k+1]
}
reordered := bytes.Join(keys, separator)
if e := p + len(reordered); e < end {
scan.deleteRange(e, end)
end = e
}
copy(scan.b[p:], reordered)
break
}
}
case 't': // https://www.ietf.org/rfc/rfc6497.txt
scan.scan()
if n := len(scan.token); n >= 2 && n <= 3 && isAlpha(scan.token[1]) {
_, end = parseTag(scan, false)
scan.toLower(start, end)
}
for len(scan.token) == 2 && !isAlpha(scan.token[1]) {
end = scan.acceptMinSize(3)
}
case 'x':
end = scan.acceptMinSize(1)
default:
end = scan.acceptMinSize(2)
}
return end
}
// getExtension returns the name, body and end position of the extension.
func getExtension(s string, p int) (end int, ext string) {
if s[p] == '-' {
p++
}
if s[p] == 'x' {
return len(s), s[p:]
}
end = nextExtension(s, p)
return end, s[p:end]
}
// nextExtension finds the next extension within the string, searching
// for the -<char>- pattern from position p.
// In the fast majority of cases, language tags will have at most
// one extension and extensions tend to be small.
func nextExtension(s string, p int) int {
for n := len(s) - 3; p < n; {
if s[p] == '-' {
if s[p+2] == '-' {
return p
}
p += 3
} else {
p++
}
}
return len(s)
}

3494
vendor/golang.org/x/text/internal/language/tables.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

48
vendor/golang.org/x/text/internal/language/tags.go generated vendored Normal file
View File

@ -0,0 +1,48 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
// MustParse is like Parse, but panics if the given BCP 47 tag cannot be parsed.
// It simplifies safe initialization of Tag values.
func MustParse(s string) Tag {
t, err := Parse(s)
if err != nil {
panic(err)
}
return t
}
// MustParseBase is like ParseBase, but panics if the given base cannot be parsed.
// It simplifies safe initialization of Base values.
func MustParseBase(s string) Language {
b, err := ParseBase(s)
if err != nil {
panic(err)
}
return b
}
// MustParseScript is like ParseScript, but panics if the given script cannot be
// parsed. It simplifies safe initialization of Script values.
func MustParseScript(s string) Script {
scr, err := ParseScript(s)
if err != nil {
panic(err)
}
return scr
}
// MustParseRegion is like ParseRegion, but panics if the given region cannot be
// parsed. It simplifies safe initialization of Region values.
func MustParseRegion(s string) Region {
r, err := ParseRegion(s)
if err != nil {
panic(err)
}
return r
}
// Und is the root language.
var Und Tag

67
vendor/golang.org/x/text/internal/match.go generated vendored Normal file
View File

@ -0,0 +1,67 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package internal
// This file contains matchers that implement CLDR inheritance.
//
// See https://unicode.org/reports/tr35/#Locale_Inheritance.
//
// Some of the inheritance described in this document is already handled by
// the cldr package.
import (
"golang.org/x/text/language"
)
// TODO: consider if (some of the) matching algorithm needs to be public after
// getting some feel about what is generic and what is specific.
// NewInheritanceMatcher returns a matcher that matches based on the inheritance
// chain.
//
// The matcher uses canonicalization and the parent relationship to find a
// match. The resulting match will always be either Und or a language with the
// same language and script as the requested language. It will not match
// languages for which there is understood to be mutual or one-directional
// intelligibility.
//
// A Match will indicate an Exact match if the language matches after
// canonicalization and High if the matched tag is a parent.
func NewInheritanceMatcher(t []language.Tag) *InheritanceMatcher {
tags := &InheritanceMatcher{make(map[language.Tag]int)}
for i, tag := range t {
ct, err := language.All.Canonicalize(tag)
if err != nil {
ct = tag
}
tags.index[ct] = i
}
return tags
}
type InheritanceMatcher struct {
index map[language.Tag]int
}
func (m InheritanceMatcher) Match(want ...language.Tag) (language.Tag, int, language.Confidence) {
for _, t := range want {
ct, err := language.All.Canonicalize(t)
if err != nil {
ct = t
}
conf := language.Exact
for {
if index, ok := m.index[ct]; ok {
return ct, index, conf
}
if ct == language.Und {
break
}
ct = ct.Parent()
conf = language.High
}
}
return language.Und, 0, language.No
}

100
vendor/golang.org/x/text/internal/tag/tag.go generated vendored Normal file
View File

@ -0,0 +1,100 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package tag contains functionality handling tags and related data.
package tag // import "golang.org/x/text/internal/tag"
import "sort"
// An Index converts tags to a compact numeric value.
//
// All elements are of size 4. Tags may be up to 4 bytes long. Excess bytes can
// be used to store additional information about the tag.
type Index string
// Elem returns the element data at the given index.
func (s Index) Elem(x int) string {
return string(s[x*4 : x*4+4])
}
// Index reports the index of the given key or -1 if it could not be found.
// Only the first len(key) bytes from the start of the 4-byte entries will be
// considered for the search and the first match in Index will be returned.
func (s Index) Index(key []byte) int {
n := len(key)
// search the index of the first entry with an equal or higher value than
// key in s.
index := sort.Search(len(s)/4, func(i int) bool {
return cmp(s[i*4:i*4+n], key) != -1
})
i := index * 4
if cmp(s[i:i+len(key)], key) != 0 {
return -1
}
return index
}
// Next finds the next occurrence of key after index x, which must have been
// obtained from a call to Index using the same key. It returns x+1 or -1.
func (s Index) Next(key []byte, x int) int {
if x++; x*4 < len(s) && cmp(s[x*4:x*4+len(key)], key) == 0 {
return x
}
return -1
}
// cmp returns an integer comparing a and b lexicographically.
func cmp(a Index, b []byte) int {
n := len(a)
if len(b) < n {
n = len(b)
}
for i, c := range b[:n] {
switch {
case a[i] > c:
return 1
case a[i] < c:
return -1
}
}
switch {
case len(a) < len(b):
return -1
case len(a) > len(b):
return 1
}
return 0
}
// Compare returns an integer comparing a and b lexicographically.
func Compare(a string, b []byte) int {
return cmp(Index(a), b)
}
// FixCase reformats b to the same pattern of cases as form.
// If returns false if string b is malformed.
func FixCase(form string, b []byte) bool {
if len(form) != len(b) {
return false
}
for i, c := range b {
if form[i] <= 'Z' {
if c >= 'a' {
c -= 'z' - 'Z'
}
if c < 'A' || 'Z' < c {
return false
}
} else {
if c <= 'Z' {
c += 'z' - 'Z'
}
if c < 'a' || 'z' < c {
return false
}
}
b[i] = c
}
return true
}

187
vendor/golang.org/x/text/language/coverage.go generated vendored Normal file
View File

@ -0,0 +1,187 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"fmt"
"sort"
"golang.org/x/text/internal/language"
)
// The Coverage interface is used to define the level of coverage of an
// internationalization service. Note that not all types are supported by all
// services. As lists may be generated on the fly, it is recommended that users
// of a Coverage cache the results.
type Coverage interface {
// Tags returns the list of supported tags.
Tags() []Tag
// BaseLanguages returns the list of supported base languages.
BaseLanguages() []Base
// Scripts returns the list of supported scripts.
Scripts() []Script
// Regions returns the list of supported regions.
Regions() []Region
}
var (
// Supported defines a Coverage that lists all supported subtags. Tags
// always returns nil.
Supported Coverage = allSubtags{}
)
// TODO:
// - Support Variants, numbering systems.
// - CLDR coverage levels.
// - Set of common tags defined in this package.
type allSubtags struct{}
// Regions returns the list of supported regions. As all regions are in a
// consecutive range, it simply returns a slice of numbers in increasing order.
// The "undefined" region is not returned.
func (s allSubtags) Regions() []Region {
reg := make([]Region, language.NumRegions)
for i := range reg {
reg[i] = Region{language.Region(i + 1)}
}
return reg
}
// Scripts returns the list of supported scripts. As all scripts are in a
// consecutive range, it simply returns a slice of numbers in increasing order.
// The "undefined" script is not returned.
func (s allSubtags) Scripts() []Script {
scr := make([]Script, language.NumScripts)
for i := range scr {
scr[i] = Script{language.Script(i + 1)}
}
return scr
}
// BaseLanguages returns the list of all supported base languages. It generates
// the list by traversing the internal structures.
func (s allSubtags) BaseLanguages() []Base {
bs := language.BaseLanguages()
base := make([]Base, len(bs))
for i, b := range bs {
base[i] = Base{b}
}
return base
}
// Tags always returns nil.
func (s allSubtags) Tags() []Tag {
return nil
}
// coverage is used by NewCoverage which is used as a convenient way for
// creating Coverage implementations for partially defined data. Very often a
// package will only need to define a subset of slices. coverage provides a
// convenient way to do this. Moreover, packages using NewCoverage, instead of
// their own implementation, will not break if later new slice types are added.
type coverage struct {
tags func() []Tag
bases func() []Base
scripts func() []Script
regions func() []Region
}
func (s *coverage) Tags() []Tag {
if s.tags == nil {
return nil
}
return s.tags()
}
// bases implements sort.Interface and is used to sort base languages.
type bases []Base
func (b bases) Len() int {
return len(b)
}
func (b bases) Swap(i, j int) {
b[i], b[j] = b[j], b[i]
}
func (b bases) Less(i, j int) bool {
return b[i].langID < b[j].langID
}
// BaseLanguages returns the result from calling s.bases if it is specified or
// otherwise derives the set of supported base languages from tags.
func (s *coverage) BaseLanguages() []Base {
if s.bases == nil {
tags := s.Tags()
if len(tags) == 0 {
return nil
}
a := make([]Base, len(tags))
for i, t := range tags {
a[i] = Base{language.Language(t.lang())}
}
sort.Sort(bases(a))
k := 0
for i := 1; i < len(a); i++ {
if a[k] != a[i] {
k++
a[k] = a[i]
}
}
return a[:k+1]
}
return s.bases()
}
func (s *coverage) Scripts() []Script {
if s.scripts == nil {
return nil
}
return s.scripts()
}
func (s *coverage) Regions() []Region {
if s.regions == nil {
return nil
}
return s.regions()
}
// NewCoverage returns a Coverage for the given lists. It is typically used by
// packages providing internationalization services to define their level of
// coverage. A list may be of type []T or func() []T, where T is either Tag,
// Base, Script or Region. The returned Coverage derives the value for Bases
// from Tags if no func or slice for []Base is specified. For other unspecified
// types the returned Coverage will return nil for the respective methods.
func NewCoverage(list ...interface{}) Coverage {
s := &coverage{}
for _, x := range list {
switch v := x.(type) {
case func() []Base:
s.bases = v
case func() []Script:
s.scripts = v
case func() []Region:
s.regions = v
case func() []Tag:
s.tags = v
case []Base:
s.bases = func() []Base { return v }
case []Script:
s.scripts = func() []Script { return v }
case []Region:
s.regions = func() []Region { return v }
case []Tag:
s.tags = func() []Tag { return v }
default:
panic(fmt.Sprintf("language: unsupported set type %T", v))
}
}
return s
}

98
vendor/golang.org/x/text/language/doc.go generated vendored Normal file
View File

@ -0,0 +1,98 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package language implements BCP 47 language tags and related functionality.
//
// The most important function of package language is to match a list of
// user-preferred languages to a list of supported languages.
// It alleviates the developer of dealing with the complexity of this process
// and provides the user with the best experience
// (see https://blog.golang.org/matchlang).
//
// # Matching preferred against supported languages
//
// A Matcher for an application that supports English, Australian English,
// Danish, and standard Mandarin can be created as follows:
//
// var matcher = language.NewMatcher([]language.Tag{
// language.English, // The first language is used as fallback.
// language.MustParse("en-AU"),
// language.Danish,
// language.Chinese,
// })
//
// This list of supported languages is typically implied by the languages for
// which there exists translations of the user interface.
//
// User-preferred languages usually come as a comma-separated list of BCP 47
// language tags.
// The MatchString finds best matches for such strings:
//
// handler(w http.ResponseWriter, r *http.Request) {
// lang, _ := r.Cookie("lang")
// accept := r.Header.Get("Accept-Language")
// tag, _ := language.MatchStrings(matcher, lang.String(), accept)
//
// // tag should now be used for the initialization of any
// // locale-specific service.
// }
//
// The Matcher's Match method can be used to match Tags directly.
//
// Matchers are aware of the intricacies of equivalence between languages, such
// as deprecated subtags, legacy tags, macro languages, mutual
// intelligibility between scripts and languages, and transparently passing
// BCP 47 user configuration.
// For instance, it will know that a reader of Bokmål Danish can read Norwegian
// and will know that Cantonese ("yue") is a good match for "zh-HK".
//
// # Using match results
//
// To guarantee a consistent user experience to the user it is important to
// use the same language tag for the selection of any locale-specific services.
// For example, it is utterly confusing to substitute spelled-out numbers
// or dates in one language in text of another language.
// More subtly confusing is using the wrong sorting order or casing
// algorithm for a certain language.
//
// All the packages in x/text that provide locale-specific services
// (e.g. collate, cases) should be initialized with the tag that was
// obtained at the start of an interaction with the user.
//
// Note that Tag that is returned by Match and MatchString may differ from any
// of the supported languages, as it may contain carried over settings from
// the user tags.
// This may be inconvenient when your application has some additional
// locale-specific data for your supported languages.
// Match and MatchString both return the index of the matched supported tag
// to simplify associating such data with the matched tag.
//
// # Canonicalization
//
// If one uses the Matcher to compare languages one does not need to
// worry about canonicalization.
//
// The meaning of a Tag varies per application. The language package
// therefore delays canonicalization and preserves information as much
// as possible. The Matcher, however, will always take into account that
// two different tags may represent the same language.
//
// By default, only legacy and deprecated tags are converted into their
// canonical equivalent. All other information is preserved. This approach makes
// the confidence scores more accurate and allows matchers to distinguish
// between variants that are otherwise lost.
//
// As a consequence, two tags that should be treated as identical according to
// BCP 47 or CLDR, like "en-Latn" and "en", will be represented differently. The
// Matcher handles such distinctions, though, and is aware of the
// equivalence relations. The CanonType type can be used to alter the
// canonicalization form.
//
// # References
//
// BCP 47 - Tags for Identifying Languages http://tools.ietf.org/html/bcp47
package language // import "golang.org/x/text/language"
// TODO: explanation on how to match languages for your own locale-specific
// service.

605
vendor/golang.org/x/text/language/language.go generated vendored Normal file
View File

@ -0,0 +1,605 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run gen.go -output tables.go
package language
// TODO: Remove above NOTE after:
// - verifying that tables are dropped correctly (most notably matcher tables).
import (
"strings"
"golang.org/x/text/internal/language"
"golang.org/x/text/internal/language/compact"
)
// Tag represents a BCP 47 language tag. It is used to specify an instance of a
// specific language or locale. All language tag values are guaranteed to be
// well-formed.
type Tag compact.Tag
func makeTag(t language.Tag) (tag Tag) {
return Tag(compact.Make(t))
}
func (t *Tag) tag() language.Tag {
return (*compact.Tag)(t).Tag()
}
func (t *Tag) isCompact() bool {
return (*compact.Tag)(t).IsCompact()
}
// TODO: improve performance.
func (t *Tag) lang() language.Language { return t.tag().LangID }
func (t *Tag) region() language.Region { return t.tag().RegionID }
func (t *Tag) script() language.Script { return t.tag().ScriptID }
// Make is a convenience wrapper for Parse that omits the error.
// In case of an error, a sensible default is returned.
func Make(s string) Tag {
return Default.Make(s)
}
// Make is a convenience wrapper for c.Parse that omits the error.
// In case of an error, a sensible default is returned.
func (c CanonType) Make(s string) Tag {
t, _ := c.Parse(s)
return t
}
// Raw returns the raw base language, script and region, without making an
// attempt to infer their values.
func (t Tag) Raw() (b Base, s Script, r Region) {
tt := t.tag()
return Base{tt.LangID}, Script{tt.ScriptID}, Region{tt.RegionID}
}
// IsRoot returns true if t is equal to language "und".
func (t Tag) IsRoot() bool {
return compact.Tag(t).IsRoot()
}
// CanonType can be used to enable or disable various types of canonicalization.
type CanonType int
const (
// Replace deprecated base languages with their preferred replacements.
DeprecatedBase CanonType = 1 << iota
// Replace deprecated scripts with their preferred replacements.
DeprecatedScript
// Replace deprecated regions with their preferred replacements.
DeprecatedRegion
// Remove redundant scripts.
SuppressScript
// Normalize legacy encodings. This includes legacy languages defined in
// CLDR as well as bibliographic codes defined in ISO-639.
Legacy
// Map the dominant language of a macro language group to the macro language
// subtag. For example cmn -> zh.
Macro
// The CLDR flag should be used if full compatibility with CLDR is required.
// There are a few cases where language.Tag may differ from CLDR. To follow all
// of CLDR's suggestions, use All|CLDR.
CLDR
// Raw can be used to Compose or Parse without Canonicalization.
Raw CanonType = 0
// Replace all deprecated tags with their preferred replacements.
Deprecated = DeprecatedBase | DeprecatedScript | DeprecatedRegion
// All canonicalizations recommended by BCP 47.
BCP47 = Deprecated | SuppressScript
// All canonicalizations.
All = BCP47 | Legacy | Macro
// Default is the canonicalization used by Parse, Make and Compose. To
// preserve as much information as possible, canonicalizations that remove
// potentially valuable information are not included. The Matcher is
// designed to recognize similar tags that would be the same if
// they were canonicalized using All.
Default = Deprecated | Legacy
canonLang = DeprecatedBase | Legacy | Macro
// TODO: LikelyScript, LikelyRegion: suppress similar to ICU.
)
// canonicalize returns the canonicalized equivalent of the tag and
// whether there was any change.
func canonicalize(c CanonType, t language.Tag) (language.Tag, bool) {
if c == Raw {
return t, false
}
changed := false
if c&SuppressScript != 0 {
if t.LangID.SuppressScript() == t.ScriptID {
t.ScriptID = 0
changed = true
}
}
if c&canonLang != 0 {
for {
if l, aliasType := t.LangID.Canonicalize(); l != t.LangID {
switch aliasType {
case language.Legacy:
if c&Legacy != 0 {
if t.LangID == _sh && t.ScriptID == 0 {
t.ScriptID = _Latn
}
t.LangID = l
changed = true
}
case language.Macro:
if c&Macro != 0 {
// We deviate here from CLDR. The mapping "nb" -> "no"
// qualifies as a typical Macro language mapping. However,
// for legacy reasons, CLDR maps "no", the macro language
// code for Norwegian, to the dominant variant "nb". This
// change is currently under consideration for CLDR as well.
// See https://unicode.org/cldr/trac/ticket/2698 and also
// https://unicode.org/cldr/trac/ticket/1790 for some of the
// practical implications. TODO: this check could be removed
// if CLDR adopts this change.
if c&CLDR == 0 || t.LangID != _nb {
changed = true
t.LangID = l
}
}
case language.Deprecated:
if c&DeprecatedBase != 0 {
if t.LangID == _mo && t.RegionID == 0 {
t.RegionID = _MD
}
t.LangID = l
changed = true
// Other canonicalization types may still apply.
continue
}
}
} else if c&Legacy != 0 && t.LangID == _no && c&CLDR != 0 {
t.LangID = _nb
changed = true
}
break
}
}
if c&DeprecatedScript != 0 {
if t.ScriptID == _Qaai {
changed = true
t.ScriptID = _Zinh
}
}
if c&DeprecatedRegion != 0 {
if r := t.RegionID.Canonicalize(); r != t.RegionID {
changed = true
t.RegionID = r
}
}
return t, changed
}
// Canonicalize returns the canonicalized equivalent of the tag.
func (c CanonType) Canonicalize(t Tag) (Tag, error) {
// First try fast path.
if t.isCompact() {
if _, changed := canonicalize(c, compact.Tag(t).Tag()); !changed {
return t, nil
}
}
// It is unlikely that one will canonicalize a tag after matching. So do
// a slow but simple approach here.
if tag, changed := canonicalize(c, t.tag()); changed {
tag.RemakeString()
return makeTag(tag), nil
}
return t, nil
}
// Confidence indicates the level of certainty for a given return value.
// For example, Serbian may be written in Cyrillic or Latin script.
// The confidence level indicates whether a value was explicitly specified,
// whether it is typically the only possible value, or whether there is
// an ambiguity.
type Confidence int
const (
No Confidence = iota // full confidence that there was no match
Low // most likely value picked out of a set of alternatives
High // value is generally assumed to be the correct match
Exact // exact match or explicitly specified value
)
var confName = []string{"No", "Low", "High", "Exact"}
func (c Confidence) String() string {
return confName[c]
}
// String returns the canonical string representation of the language tag.
func (t Tag) String() string {
return t.tag().String()
}
// MarshalText implements encoding.TextMarshaler.
func (t Tag) MarshalText() (text []byte, err error) {
return t.tag().MarshalText()
}
// UnmarshalText implements encoding.TextUnmarshaler.
func (t *Tag) UnmarshalText(text []byte) error {
var tag language.Tag
err := tag.UnmarshalText(text)
*t = makeTag(tag)
return err
}
// Base returns the base language of the language tag. If the base language is
// unspecified, an attempt will be made to infer it from the context.
// It uses a variant of CLDR's Add Likely Subtags algorithm. This is subject to change.
func (t Tag) Base() (Base, Confidence) {
if b := t.lang(); b != 0 {
return Base{b}, Exact
}
tt := t.tag()
c := High
if tt.ScriptID == 0 && !tt.RegionID.IsCountry() {
c = Low
}
if tag, err := tt.Maximize(); err == nil && tag.LangID != 0 {
return Base{tag.LangID}, c
}
return Base{0}, No
}
// Script infers the script for the language tag. If it was not explicitly given, it will infer
// a most likely candidate.
// If more than one script is commonly used for a language, the most likely one
// is returned with a low confidence indication. For example, it returns (Cyrl, Low)
// for Serbian.
// If a script cannot be inferred (Zzzz, No) is returned. We do not use Zyyy (undetermined)
// as one would suspect from the IANA registry for BCP 47. In a Unicode context Zyyy marks
// common characters (like 1, 2, 3, '.', etc.) and is therefore more like multiple scripts.
// See https://www.unicode.org/reports/tr24/#Values for more details. Zzzz is also used for
// unknown value in CLDR. (Zzzz, Exact) is returned if Zzzz was explicitly specified.
// Note that an inferred script is never guaranteed to be the correct one. Latin is
// almost exclusively used for Afrikaans, but Arabic has been used for some texts
// in the past. Also, the script that is commonly used may change over time.
// It uses a variant of CLDR's Add Likely Subtags algorithm. This is subject to change.
func (t Tag) Script() (Script, Confidence) {
if scr := t.script(); scr != 0 {
return Script{scr}, Exact
}
tt := t.tag()
sc, c := language.Script(_Zzzz), No
if scr := tt.LangID.SuppressScript(); scr != 0 {
// Note: it is not always the case that a language with a suppress
// script value is only written in one script (e.g. kk, ms, pa).
if tt.RegionID == 0 {
return Script{scr}, High
}
sc, c = scr, High
}
if tag, err := tt.Maximize(); err == nil {
if tag.ScriptID != sc {
sc, c = tag.ScriptID, Low
}
} else {
tt, _ = canonicalize(Deprecated|Macro, tt)
if tag, err := tt.Maximize(); err == nil && tag.ScriptID != sc {
sc, c = tag.ScriptID, Low
}
}
return Script{sc}, c
}
// Region returns the region for the language tag. If it was not explicitly given, it will
// infer a most likely candidate from the context.
// It uses a variant of CLDR's Add Likely Subtags algorithm. This is subject to change.
func (t Tag) Region() (Region, Confidence) {
if r := t.region(); r != 0 {
return Region{r}, Exact
}
tt := t.tag()
if tt, err := tt.Maximize(); err == nil {
return Region{tt.RegionID}, Low // TODO: differentiate between high and low.
}
tt, _ = canonicalize(Deprecated|Macro, tt)
if tag, err := tt.Maximize(); err == nil {
return Region{tag.RegionID}, Low
}
return Region{_ZZ}, No // TODO: return world instead of undetermined?
}
// Variants returns the variants specified explicitly for this language tag.
// or nil if no variant was specified.
func (t Tag) Variants() []Variant {
if !compact.Tag(t).MayHaveVariants() {
return nil
}
v := []Variant{}
x, str := "", t.tag().Variants()
for str != "" {
x, str = nextToken(str)
v = append(v, Variant{x})
}
return v
}
// Parent returns the CLDR parent of t. In CLDR, missing fields in data for a
// specific language are substituted with fields from the parent language.
// The parent for a language may change for newer versions of CLDR.
//
// Parent returns a tag for a less specific language that is mutually
// intelligible or Und if there is no such language. This may not be the same as
// simply stripping the last BCP 47 subtag. For instance, the parent of "zh-TW"
// is "zh-Hant", and the parent of "zh-Hant" is "und".
func (t Tag) Parent() Tag {
return Tag(compact.Tag(t).Parent())
}
// nextToken returns token t and the rest of the string.
func nextToken(s string) (t, tail string) {
p := strings.Index(s[1:], "-")
if p == -1 {
return s[1:], ""
}
p++
return s[1:p], s[p:]
}
// Extension is a single BCP 47 extension.
type Extension struct {
s string
}
// String returns the string representation of the extension, including the
// type tag.
func (e Extension) String() string {
return e.s
}
// ParseExtension parses s as an extension and returns it on success.
func ParseExtension(s string) (e Extension, err error) {
ext, err := language.ParseExtension(s)
return Extension{ext}, err
}
// Type returns the one-byte extension type of e. It returns 0 for the zero
// exception.
func (e Extension) Type() byte {
if e.s == "" {
return 0
}
return e.s[0]
}
// Tokens returns the list of tokens of e.
func (e Extension) Tokens() []string {
return strings.Split(e.s, "-")
}
// Extension returns the extension of type x for tag t. It will return
// false for ok if t does not have the requested extension. The returned
// extension will be invalid in this case.
func (t Tag) Extension(x byte) (ext Extension, ok bool) {
if !compact.Tag(t).MayHaveExtensions() {
return Extension{}, false
}
e, ok := t.tag().Extension(x)
return Extension{e}, ok
}
// Extensions returns all extensions of t.
func (t Tag) Extensions() []Extension {
if !compact.Tag(t).MayHaveExtensions() {
return nil
}
e := []Extension{}
for _, ext := range t.tag().Extensions() {
e = append(e, Extension{ext})
}
return e
}
// TypeForKey returns the type associated with the given key, where key and type
// are of the allowed values defined for the Unicode locale extension ('u') in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
// TypeForKey will traverse the inheritance chain to get the correct value.
//
// If there are multiple types associated with a key, only the first will be
// returned. If there is no type associated with a key, it returns the empty
// string.
func (t Tag) TypeForKey(key string) string {
if !compact.Tag(t).MayHaveExtensions() {
if key != "rg" && key != "va" {
return ""
}
}
return t.tag().TypeForKey(key)
}
// SetTypeForKey returns a new Tag with the key set to type, where key and type
// are of the allowed values defined for the Unicode locale extension ('u') in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
// An empty value removes an existing pair with the same key.
func (t Tag) SetTypeForKey(key, value string) (Tag, error) {
tt, err := t.tag().SetTypeForKey(key, value)
return makeTag(tt), err
}
// NumCompactTags is the number of compact tags. The maximum tag is
// NumCompactTags-1.
const NumCompactTags = compact.NumCompactTags
// CompactIndex returns an index, where 0 <= index < NumCompactTags, for tags
// for which data exists in the text repository.The index will change over time
// and should not be stored in persistent storage. If t does not match a compact
// index, exact will be false and the compact index will be returned for the
// first match after repeatedly taking the Parent of t.
func CompactIndex(t Tag) (index int, exact bool) {
id, exact := compact.LanguageID(compact.Tag(t))
return int(id), exact
}
var root = language.Tag{}
// Base is an ISO 639 language code, used for encoding the base language
// of a language tag.
type Base struct {
langID language.Language
}
// ParseBase parses a 2- or 3-letter ISO 639 code.
// It returns a ValueError if s is a well-formed but unknown language identifier
// or another error if another error occurred.
func ParseBase(s string) (Base, error) {
l, err := language.ParseBase(s)
return Base{l}, err
}
// String returns the BCP 47 representation of the base language.
func (b Base) String() string {
return b.langID.String()
}
// ISO3 returns the ISO 639-3 language code.
func (b Base) ISO3() string {
return b.langID.ISO3()
}
// IsPrivateUse reports whether this language code is reserved for private use.
func (b Base) IsPrivateUse() bool {
return b.langID.IsPrivateUse()
}
// Script is a 4-letter ISO 15924 code for representing scripts.
// It is idiomatically represented in title case.
type Script struct {
scriptID language.Script
}
// ParseScript parses a 4-letter ISO 15924 code.
// It returns a ValueError if s is a well-formed but unknown script identifier
// or another error if another error occurred.
func ParseScript(s string) (Script, error) {
sc, err := language.ParseScript(s)
return Script{sc}, err
}
// String returns the script code in title case.
// It returns "Zzzz" for an unspecified script.
func (s Script) String() string {
return s.scriptID.String()
}
// IsPrivateUse reports whether this script code is reserved for private use.
func (s Script) IsPrivateUse() bool {
return s.scriptID.IsPrivateUse()
}
// Region is an ISO 3166-1 or UN M.49 code for representing countries and regions.
type Region struct {
regionID language.Region
}
// EncodeM49 returns the Region for the given UN M.49 code.
// It returns an error if r is not a valid code.
func EncodeM49(r int) (Region, error) {
rid, err := language.EncodeM49(r)
return Region{rid}, err
}
// ParseRegion parses a 2- or 3-letter ISO 3166-1 or a UN M.49 code.
// It returns a ValueError if s is a well-formed but unknown region identifier
// or another error if another error occurred.
func ParseRegion(s string) (Region, error) {
r, err := language.ParseRegion(s)
return Region{r}, err
}
// String returns the BCP 47 representation for the region.
// It returns "ZZ" for an unspecified region.
func (r Region) String() string {
return r.regionID.String()
}
// ISO3 returns the 3-letter ISO code of r.
// Note that not all regions have a 3-letter ISO code.
// In such cases this method returns "ZZZ".
func (r Region) ISO3() string {
return r.regionID.ISO3()
}
// M49 returns the UN M.49 encoding of r, or 0 if this encoding
// is not defined for r.
func (r Region) M49() int {
return r.regionID.M49()
}
// IsPrivateUse reports whether r has the ISO 3166 User-assigned status. This
// may include private-use tags that are assigned by CLDR and used in this
// implementation. So IsPrivateUse and IsCountry can be simultaneously true.
func (r Region) IsPrivateUse() bool {
return r.regionID.IsPrivateUse()
}
// IsCountry returns whether this region is a country or autonomous area. This
// includes non-standard definitions from CLDR.
func (r Region) IsCountry() bool {
return r.regionID.IsCountry()
}
// IsGroup returns whether this region defines a collection of regions. This
// includes non-standard definitions from CLDR.
func (r Region) IsGroup() bool {
return r.regionID.IsGroup()
}
// Contains returns whether Region c is contained by Region r. It returns true
// if c == r.
func (r Region) Contains(c Region) bool {
return r.regionID.Contains(c.regionID)
}
// TLD returns the country code top-level domain (ccTLD). UK is returned for GB.
// In all other cases it returns either the region itself or an error.
//
// This method may return an error for a region for which there exists a
// canonical form with a ccTLD. To get that ccTLD canonicalize r first. The
// region will already be canonicalized it was obtained from a Tag that was
// obtained using any of the default methods.
func (r Region) TLD() (Region, error) {
tld, err := r.regionID.TLD()
return Region{tld}, err
}
// Canonicalize returns the region or a possible replacement if the region is
// deprecated. It will not return a replacement for deprecated regions that
// are split into multiple regions.
func (r Region) Canonicalize() Region {
return Region{r.regionID.Canonicalize()}
}
// Variant represents a registered variant of a language as defined by BCP 47.
type Variant struct {
variant string
}
// ParseVariant parses and returns a Variant. An error is returned if s is not
// a valid variant.
func ParseVariant(s string) (Variant, error) {
v, err := language.ParseVariant(s)
return Variant{v.String()}, err
}
// String returns the string representation of the variant.
func (v Variant) String() string {
return v.variant
}

735
vendor/golang.org/x/text/language/match.go generated vendored Normal file
View File

@ -0,0 +1,735 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"errors"
"strings"
"golang.org/x/text/internal/language"
)
// A MatchOption configures a Matcher.
type MatchOption func(*matcher)
// PreferSameScript will, in the absence of a match, result in the first
// preferred tag with the same script as a supported tag to match this supported
// tag. The default is currently true, but this may change in the future.
func PreferSameScript(preferSame bool) MatchOption {
return func(m *matcher) { m.preferSameScript = preferSame }
}
// TODO(v1.0.0): consider making Matcher a concrete type, instead of interface.
// There doesn't seem to be too much need for multiple types.
// Making it a concrete type allows MatchStrings to be a method, which will
// improve its discoverability.
// MatchStrings parses and matches the given strings until one of them matches
// the language in the Matcher. A string may be an Accept-Language header as
// handled by ParseAcceptLanguage. The default language is returned if no
// other language matched.
func MatchStrings(m Matcher, lang ...string) (tag Tag, index int) {
for _, accept := range lang {
desired, _, err := ParseAcceptLanguage(accept)
if err != nil {
continue
}
if tag, index, conf := m.Match(desired...); conf != No {
return tag, index
}
}
tag, index, _ = m.Match()
return
}
// Matcher is the interface that wraps the Match method.
//
// Match returns the best match for any of the given tags, along with
// a unique index associated with the returned tag and a confidence
// score.
type Matcher interface {
Match(t ...Tag) (tag Tag, index int, c Confidence)
}
// Comprehends reports the confidence score for a speaker of a given language
// to being able to comprehend the written form of an alternative language.
func Comprehends(speaker, alternative Tag) Confidence {
_, _, c := NewMatcher([]Tag{alternative}).Match(speaker)
return c
}
// NewMatcher returns a Matcher that matches an ordered list of preferred tags
// against a list of supported tags based on written intelligibility, closeness
// of dialect, equivalence of subtags and various other rules. It is initialized
// with the list of supported tags. The first element is used as the default
// value in case no match is found.
//
// Its Match method matches the first of the given Tags to reach a certain
// confidence threshold. The tags passed to Match should therefore be specified
// in order of preference. Extensions are ignored for matching.
//
// The index returned by the Match method corresponds to the index of the
// matched tag in t, but is augmented with the Unicode extension ('u')of the
// corresponding preferred tag. This allows user locale options to be passed
// transparently.
func NewMatcher(t []Tag, options ...MatchOption) Matcher {
return newMatcher(t, options)
}
func (m *matcher) Match(want ...Tag) (t Tag, index int, c Confidence) {
var tt language.Tag
match, w, c := m.getBest(want...)
if match != nil {
tt, index = match.tag, match.index
} else {
// TODO: this should be an option
tt = m.default_.tag
if m.preferSameScript {
outer:
for _, w := range want {
script, _ := w.Script()
if script.scriptID == 0 {
// Don't do anything if there is no script, such as with
// private subtags.
continue
}
for i, h := range m.supported {
if script.scriptID == h.maxScript {
tt, index = h.tag, i
break outer
}
}
}
}
// TODO: select first language tag based on script.
}
if w.RegionID != tt.RegionID && w.RegionID != 0 {
if w.RegionID != 0 && tt.RegionID != 0 && tt.RegionID.Contains(w.RegionID) {
tt.RegionID = w.RegionID
tt.RemakeString()
} else if r := w.RegionID.String(); len(r) == 2 {
// TODO: also filter macro and deprecated.
tt, _ = tt.SetTypeForKey("rg", strings.ToLower(r)+"zzzz")
}
}
// Copy options from the user-provided tag into the result tag. This is hard
// to do after the fact, so we do it here.
// TODO: add in alternative variants to -u-va-.
// TODO: add preferred region to -u-rg-.
if e := w.Extensions(); len(e) > 0 {
b := language.Builder{}
b.SetTag(tt)
for _, e := range e {
b.AddExt(e)
}
tt = b.Make()
}
return makeTag(tt), index, c
}
// ErrMissingLikelyTagsData indicates no information was available
// to compute likely values of missing tags.
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
// func (t *Tag) setTagsFrom(id Tag) {
// t.LangID = id.LangID
// t.ScriptID = id.ScriptID
// t.RegionID = id.RegionID
// }
// Tag Matching
// CLDR defines an algorithm for finding the best match between two sets of language
// tags. The basic algorithm defines how to score a possible match and then find
// the match with the best score
// (see https://www.unicode.org/reports/tr35/#LanguageMatching).
// Using scoring has several disadvantages. The scoring obfuscates the importance of
// the various factors considered, making the algorithm harder to understand. Using
// scoring also requires the full score to be computed for each pair of tags.
//
// We will use a different algorithm which aims to have the following properties:
// - clarity on the precedence of the various selection factors, and
// - improved performance by allowing early termination of a comparison.
//
// Matching algorithm (overview)
// Input:
// - supported: a set of supported tags
// - default: the default tag to return in case there is no match
// - desired: list of desired tags, ordered by preference, starting with
// the most-preferred.
//
// Algorithm:
// 1) Set the best match to the lowest confidence level
// 2) For each tag in "desired":
// a) For each tag in "supported":
// 1) compute the match between the two tags.
// 2) if the match is better than the previous best match, replace it
// with the new match. (see next section)
// b) if the current best match is Exact and pin is true the result will be
// frozen to the language found thusfar, although better matches may
// still be found for the same language.
// 3) If the best match so far is below a certain threshold, return "default".
//
// Ranking:
// We use two phases to determine whether one pair of tags are a better match
// than another pair of tags. First, we determine a rough confidence level. If the
// levels are different, the one with the highest confidence wins.
// Second, if the rough confidence levels are identical, we use a set of tie-breaker
// rules.
//
// The confidence level of matching a pair of tags is determined by finding the
// lowest confidence level of any matches of the corresponding subtags (the
// result is deemed as good as its weakest link).
// We define the following levels:
// Exact - An exact match of a subtag, before adding likely subtags.
// MaxExact - An exact match of a subtag, after adding likely subtags.
// [See Note 2].
// High - High level of mutual intelligibility between different subtag
// variants.
// Low - Low level of mutual intelligibility between different subtag
// variants.
// No - No mutual intelligibility.
//
// The following levels can occur for each type of subtag:
// Base: Exact, MaxExact, High, Low, No
// Script: Exact, MaxExact [see Note 3], Low, No
// Region: Exact, MaxExact, High
// Variant: Exact, High
// Private: Exact, No
//
// Any result with a confidence level of Low or higher is deemed a possible match.
// Once a desired tag matches any of the supported tags with a level of MaxExact
// or higher, the next desired tag is not considered (see Step 2.b).
// Note that CLDR provides languageMatching data that defines close equivalence
// classes for base languages, scripts and regions.
//
// Tie-breaking
// If we get the same confidence level for two matches, we apply a sequence of
// tie-breaking rules. The first that succeeds defines the result. The rules are
// applied in the following order.
// 1) Original language was defined and was identical.
// 2) Original region was defined and was identical.
// 3) Distance between two maximized regions was the smallest.
// 4) Original script was defined and was identical.
// 5) Distance from want tag to have tag using the parent relation [see Note 5.]
// If there is still no winner after these rules are applied, the first match
// found wins.
//
// Notes:
// [2] In practice, as matching of Exact is done in a separate phase from
// matching the other levels, we reuse the Exact level to mean MaxExact in
// the second phase. As a consequence, we only need the levels defined by
// the Confidence type. The MaxExact confidence level is mapped to High in
// the public API.
// [3] We do not differentiate between maximized script values that were derived
// from suppressScript versus most likely tag data. We determined that in
// ranking the two, one ranks just after the other. Moreover, the two cannot
// occur concurrently. As a consequence, they are identical for practical
// purposes.
// [4] In case of deprecated, macro-equivalents and legacy mappings, we assign
// the MaxExact level to allow iw vs he to still be a closer match than
// en-AU vs en-US, for example.
// [5] In CLDR a locale inherits fields that are unspecified for this locale
// from its parent. Therefore, if a locale is a parent of another locale,
// it is a strong measure for closeness, especially when no other tie
// breaker rule applies. One could also argue it is inconsistent, for
// example, when pt-AO matches pt (which CLDR equates with pt-BR), even
// though its parent is pt-PT according to the inheritance rules.
//
// Implementation Details:
// There are several performance considerations worth pointing out. Most notably,
// we preprocess as much as possible (within reason) at the time of creation of a
// matcher. This includes:
// - creating a per-language map, which includes data for the raw base language
// and its canonicalized variant (if applicable),
// - expanding entries for the equivalence classes defined in CLDR's
// languageMatch data.
// The per-language map ensures that typically only a very small number of tags
// need to be considered. The pre-expansion of canonicalized subtags and
// equivalence classes reduces the amount of map lookups that need to be done at
// runtime.
// matcher keeps a set of supported language tags, indexed by language.
type matcher struct {
default_ *haveTag
supported []*haveTag
index map[language.Language]*matchHeader
passSettings bool
preferSameScript bool
}
// matchHeader has the lists of tags for exact matches and matches based on
// maximized and canonicalized tags for a given language.
type matchHeader struct {
haveTags []*haveTag
original bool
}
// haveTag holds a supported Tag and its maximized script and region. The maximized
// or canonicalized language is not stored as it is not needed during matching.
type haveTag struct {
tag language.Tag
// index of this tag in the original list of supported tags.
index int
// conf is the maximum confidence that can result from matching this haveTag.
// When conf < Exact this means it was inserted after applying a CLDR equivalence rule.
conf Confidence
// Maximized region and script.
maxRegion language.Region
maxScript language.Script
// altScript may be checked as an alternative match to maxScript. If altScript
// matches, the confidence level for this match is Low. Theoretically there
// could be multiple alternative scripts. This does not occur in practice.
altScript language.Script
// nextMax is the index of the next haveTag with the same maximized tags.
nextMax uint16
}
func makeHaveTag(tag language.Tag, index int) (haveTag, language.Language) {
max := tag
if tag.LangID != 0 || tag.RegionID != 0 || tag.ScriptID != 0 {
max, _ = canonicalize(All, max)
max, _ = max.Maximize()
max.RemakeString()
}
return haveTag{tag, index, Exact, max.RegionID, max.ScriptID, altScript(max.LangID, max.ScriptID), 0}, max.LangID
}
// altScript returns an alternative script that may match the given script with
// a low confidence. At the moment, the langMatch data allows for at most one
// script to map to another and we rely on this to keep the code simple.
func altScript(l language.Language, s language.Script) language.Script {
for _, alt := range matchScript {
// TODO: also match cases where language is not the same.
if (language.Language(alt.wantLang) == l || language.Language(alt.haveLang) == l) &&
language.Script(alt.haveScript) == s {
return language.Script(alt.wantScript)
}
}
return 0
}
// addIfNew adds a haveTag to the list of tags only if it is a unique tag.
// Tags that have the same maximized values are linked by index.
func (h *matchHeader) addIfNew(n haveTag, exact bool) {
h.original = h.original || exact
// Don't add new exact matches.
for _, v := range h.haveTags {
if equalsRest(v.tag, n.tag) {
return
}
}
// Allow duplicate maximized tags, but create a linked list to allow quickly
// comparing the equivalents and bail out.
for i, v := range h.haveTags {
if v.maxScript == n.maxScript &&
v.maxRegion == n.maxRegion &&
v.tag.VariantOrPrivateUseTags() == n.tag.VariantOrPrivateUseTags() {
for h.haveTags[i].nextMax != 0 {
i = int(h.haveTags[i].nextMax)
}
h.haveTags[i].nextMax = uint16(len(h.haveTags))
break
}
}
h.haveTags = append(h.haveTags, &n)
}
// header returns the matchHeader for the given language. It creates one if
// it doesn't already exist.
func (m *matcher) header(l language.Language) *matchHeader {
if h := m.index[l]; h != nil {
return h
}
h := &matchHeader{}
m.index[l] = h
return h
}
func toConf(d uint8) Confidence {
if d <= 10 {
return High
}
if d < 30 {
return Low
}
return No
}
// newMatcher builds an index for the given supported tags and returns it as
// a matcher. It also expands the index by considering various equivalence classes
// for a given tag.
func newMatcher(supported []Tag, options []MatchOption) *matcher {
m := &matcher{
index: make(map[language.Language]*matchHeader),
preferSameScript: true,
}
for _, o := range options {
o(m)
}
if len(supported) == 0 {
m.default_ = &haveTag{}
return m
}
// Add supported languages to the index. Add exact matches first to give
// them precedence.
for i, tag := range supported {
tt := tag.tag()
pair, _ := makeHaveTag(tt, i)
m.header(tt.LangID).addIfNew(pair, true)
m.supported = append(m.supported, &pair)
}
m.default_ = m.header(supported[0].lang()).haveTags[0]
// Keep these in two different loops to support the case that two equivalent
// languages are distinguished, such as iw and he.
for i, tag := range supported {
tt := tag.tag()
pair, max := makeHaveTag(tt, i)
if max != tt.LangID {
m.header(max).addIfNew(pair, true)
}
}
// update is used to add indexes in the map for equivalent languages.
// update will only add entries to original indexes, thus not computing any
// transitive relations.
update := func(want, have uint16, conf Confidence) {
if hh := m.index[language.Language(have)]; hh != nil {
if !hh.original {
return
}
hw := m.header(language.Language(want))
for _, ht := range hh.haveTags {
v := *ht
if conf < v.conf {
v.conf = conf
}
v.nextMax = 0 // this value needs to be recomputed
if v.altScript != 0 {
v.altScript = altScript(language.Language(want), v.maxScript)
}
hw.addIfNew(v, conf == Exact && hh.original)
}
}
}
// Add entries for languages with mutual intelligibility as defined by CLDR's
// languageMatch data.
for _, ml := range matchLang {
update(ml.want, ml.have, toConf(ml.distance))
if !ml.oneway {
update(ml.have, ml.want, toConf(ml.distance))
}
}
// Add entries for possible canonicalizations. This is an optimization to
// ensure that only one map lookup needs to be done at runtime per desired tag.
// First we match deprecated equivalents. If they are perfect equivalents
// (their canonicalization simply substitutes a different language code, but
// nothing else), the match confidence is Exact, otherwise it is High.
for i, lm := range language.AliasMap {
// If deprecated codes match and there is no fiddling with the script
// or region, we consider it an exact match.
conf := Exact
if language.AliasTypes[i] != language.Macro {
if !isExactEquivalent(language.Language(lm.From)) {
conf = High
}
update(lm.To, lm.From, conf)
}
update(lm.From, lm.To, conf)
}
return m
}
// getBest gets the best matching tag in m for any of the given tags, taking into
// account the order of preference of the given tags.
func (m *matcher) getBest(want ...Tag) (got *haveTag, orig language.Tag, c Confidence) {
best := bestMatch{}
for i, ww := range want {
w := ww.tag()
var max language.Tag
// Check for exact match first.
h := m.index[w.LangID]
if w.LangID != 0 {
if h == nil {
continue
}
// Base language is defined.
max, _ = canonicalize(Legacy|Deprecated|Macro, w)
// A region that is added through canonicalization is stronger than
// a maximized region: set it in the original (e.g. mo -> ro-MD).
if w.RegionID != max.RegionID {
w.RegionID = max.RegionID
}
// TODO: should we do the same for scripts?
// See test case: en, sr, nl ; sh ; sr
max, _ = max.Maximize()
} else {
// Base language is not defined.
if h != nil {
for i := range h.haveTags {
have := h.haveTags[i]
if equalsRest(have.tag, w) {
return have, w, Exact
}
}
}
if w.ScriptID == 0 && w.RegionID == 0 {
// We skip all tags matching und for approximate matching, including
// private tags.
continue
}
max, _ = w.Maximize()
if h = m.index[max.LangID]; h == nil {
continue
}
}
pin := true
for _, t := range want[i+1:] {
if w.LangID == t.lang() {
pin = false
break
}
}
// Check for match based on maximized tag.
for i := range h.haveTags {
have := h.haveTags[i]
best.update(have, w, max.ScriptID, max.RegionID, pin)
if best.conf == Exact {
for have.nextMax != 0 {
have = h.haveTags[have.nextMax]
best.update(have, w, max.ScriptID, max.RegionID, pin)
}
return best.have, best.want, best.conf
}
}
}
if best.conf <= No {
if len(want) != 0 {
return nil, want[0].tag(), No
}
return nil, language.Tag{}, No
}
return best.have, best.want, best.conf
}
// bestMatch accumulates the best match so far.
type bestMatch struct {
have *haveTag
want language.Tag
conf Confidence
pinnedRegion language.Region
pinLanguage bool
sameRegionGroup bool
// Cached results from applying tie-breaking rules.
origLang bool
origReg bool
paradigmReg bool
regGroupDist uint8
origScript bool
}
// update updates the existing best match if the new pair is considered to be a
// better match. To determine if the given pair is a better match, it first
// computes the rough confidence level. If this surpasses the current match, it
// will replace it and update the tie-breaker rule cache. If there is a tie, it
// proceeds with applying a series of tie-breaker rules. If there is no
// conclusive winner after applying the tie-breaker rules, it leaves the current
// match as the preferred match.
//
// If pin is true and have and tag are a strong match, it will henceforth only
// consider matches for this language. This corresponds to the idea that most
// users have a strong preference for the first defined language. A user can
// still prefer a second language over a dialect of the preferred language by
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
// be false.
func (m *bestMatch) update(have *haveTag, tag language.Tag, maxScript language.Script, maxRegion language.Region, pin bool) {
// Bail if the maximum attainable confidence is below that of the current best match.
c := have.conf
if c < m.conf {
return
}
// Don't change the language once we already have found an exact match.
if m.pinLanguage && tag.LangID != m.want.LangID {
return
}
// Pin the region group if we are comparing tags for the same language.
if tag.LangID == m.want.LangID && m.sameRegionGroup {
_, sameGroup := regionGroupDist(m.pinnedRegion, have.maxRegion, have.maxScript, m.want.LangID)
if !sameGroup {
return
}
}
if c == Exact && have.maxScript == maxScript {
// If there is another language and then another entry of this language,
// don't pin anything, otherwise pin the language.
m.pinLanguage = pin
}
if equalsRest(have.tag, tag) {
} else if have.maxScript != maxScript {
// There is usually very little comprehension between different scripts.
// In a few cases there may still be Low comprehension. This possibility
// is pre-computed and stored in have.altScript.
if Low < m.conf || have.altScript != maxScript {
return
}
c = Low
} else if have.maxRegion != maxRegion {
if High < c {
// There is usually a small difference between languages across regions.
c = High
}
}
// We store the results of the computations of the tie-breaker rules along
// with the best match. There is no need to do the checks once we determine
// we have a winner, but we do still need to do the tie-breaker computations.
// We use "beaten" to keep track if we still need to do the checks.
beaten := false // true if the new pair defeats the current one.
if c != m.conf {
if c < m.conf {
return
}
beaten = true
}
// Tie-breaker rules:
// We prefer if the pre-maximized language was specified and identical.
origLang := have.tag.LangID == tag.LangID && tag.LangID != 0
if !beaten && m.origLang != origLang {
if m.origLang {
return
}
beaten = true
}
// We prefer if the pre-maximized region was specified and identical.
origReg := have.tag.RegionID == tag.RegionID && tag.RegionID != 0
if !beaten && m.origReg != origReg {
if m.origReg {
return
}
beaten = true
}
regGroupDist, sameGroup := regionGroupDist(have.maxRegion, maxRegion, maxScript, tag.LangID)
if !beaten && m.regGroupDist != regGroupDist {
if regGroupDist > m.regGroupDist {
return
}
beaten = true
}
paradigmReg := isParadigmLocale(tag.LangID, have.maxRegion)
if !beaten && m.paradigmReg != paradigmReg {
if !paradigmReg {
return
}
beaten = true
}
// Next we prefer if the pre-maximized script was specified and identical.
origScript := have.tag.ScriptID == tag.ScriptID && tag.ScriptID != 0
if !beaten && m.origScript != origScript {
if m.origScript {
return
}
beaten = true
}
// Update m to the newly found best match.
if beaten {
m.have = have
m.want = tag
m.conf = c
m.pinnedRegion = maxRegion
m.sameRegionGroup = sameGroup
m.origLang = origLang
m.origReg = origReg
m.paradigmReg = paradigmReg
m.origScript = origScript
m.regGroupDist = regGroupDist
}
}
func isParadigmLocale(lang language.Language, r language.Region) bool {
for _, e := range paradigmLocales {
if language.Language(e[0]) == lang && (r == language.Region(e[1]) || r == language.Region(e[2])) {
return true
}
}
return false
}
// regionGroupDist computes the distance between two regions based on their
// CLDR grouping.
func regionGroupDist(a, b language.Region, script language.Script, lang language.Language) (dist uint8, same bool) {
const defaultDistance = 4
aGroup := uint(regionToGroups[a]) << 1
bGroup := uint(regionToGroups[b]) << 1
for _, ri := range matchRegion {
if language.Language(ri.lang) == lang && (ri.script == 0 || language.Script(ri.script) == script) {
group := uint(1 << (ri.group &^ 0x80))
if 0x80&ri.group == 0 {
if aGroup&bGroup&group != 0 { // Both regions are in the group.
return ri.distance, ri.distance == defaultDistance
}
} else {
if (aGroup|bGroup)&group == 0 { // Both regions are not in the group.
return ri.distance, ri.distance == defaultDistance
}
}
}
}
return defaultDistance, true
}
// equalsRest compares everything except the language.
func equalsRest(a, b language.Tag) bool {
// TODO: don't include extensions in this comparison. To do this efficiently,
// though, we should handle private tags separately.
return a.ScriptID == b.ScriptID && a.RegionID == b.RegionID && a.VariantOrPrivateUseTags() == b.VariantOrPrivateUseTags()
}
// isExactEquivalent returns true if canonicalizing the language will not alter
// the script or region of a tag.
func isExactEquivalent(l language.Language) bool {
for _, o := range notEquivalent {
if o == l {
return false
}
}
return true
}
var notEquivalent []language.Language
func init() {
// Create a list of all languages for which canonicalization may alter the
// script or region.
for _, lm := range language.AliasMap {
tag := language.Tag{LangID: language.Language(lm.From)}
if tag, _ = canonicalize(All, tag); tag.ScriptID != 0 || tag.RegionID != 0 {
notEquivalent = append(notEquivalent, language.Language(lm.From))
}
}
// Maximize undefined regions of paradigm locales.
for i, v := range paradigmLocales {
t := language.Tag{LangID: language.Language(v[0])}
max, _ := t.Maximize()
if v[1] == 0 {
paradigmLocales[i][1] = uint16(max.RegionID)
}
if v[2] == 0 {
paradigmLocales[i][2] = uint16(max.RegionID)
}
}
}

256
vendor/golang.org/x/text/language/parse.go generated vendored Normal file
View File

@ -0,0 +1,256 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"errors"
"sort"
"strconv"
"strings"
"golang.org/x/text/internal/language"
)
// ValueError is returned by any of the parsing functions when the
// input is well-formed but the respective subtag is not recognized
// as a valid value.
type ValueError interface {
error
// Subtag returns the subtag for which the error occurred.
Subtag() string
}
// Parse parses the given BCP 47 string and returns a valid Tag. If parsing
// failed it returns an error and any part of the tag that could be parsed.
// If parsing succeeded but an unknown value was found, it returns
// ValueError. The Tag returned in this case is just stripped of the unknown
// value. All other values are preserved. It accepts tags in the BCP 47 format
// and extensions to this standard defined in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
// The resulting tag is canonicalized using the default canonicalization type.
func Parse(s string) (t Tag, err error) {
return Default.Parse(s)
}
// Parse parses the given BCP 47 string and returns a valid Tag. If parsing
// failed it returns an error and any part of the tag that could be parsed.
// If parsing succeeded but an unknown value was found, it returns
// ValueError. The Tag returned in this case is just stripped of the unknown
// value. All other values are preserved. It accepts tags in the BCP 47 format
// and extensions to this standard defined in
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
// The resulting tag is canonicalized using the canonicalization type c.
func (c CanonType) Parse(s string) (t Tag, err error) {
defer func() {
if recover() != nil {
t = Tag{}
err = language.ErrSyntax
}
}()
tt, err := language.Parse(s)
if err != nil {
return makeTag(tt), err
}
tt, changed := canonicalize(c, tt)
if changed {
tt.RemakeString()
}
return makeTag(tt), err
}
// Compose creates a Tag from individual parts, which may be of type Tag, Base,
// Script, Region, Variant, []Variant, Extension, []Extension or error. If a
// Base, Script or Region or slice of type Variant or Extension is passed more
// than once, the latter will overwrite the former. Variants and Extensions are
// accumulated, but if two extensions of the same type are passed, the latter
// will replace the former. For -u extensions, though, the key-type pairs are
// added, where later values overwrite older ones. A Tag overwrites all former
// values and typically only makes sense as the first argument. The resulting
// tag is returned after canonicalizing using the Default CanonType. If one or
// more errors are encountered, one of the errors is returned.
func Compose(part ...interface{}) (t Tag, err error) {
return Default.Compose(part...)
}
// Compose creates a Tag from individual parts, which may be of type Tag, Base,
// Script, Region, Variant, []Variant, Extension, []Extension or error. If a
// Base, Script or Region or slice of type Variant or Extension is passed more
// than once, the latter will overwrite the former. Variants and Extensions are
// accumulated, but if two extensions of the same type are passed, the latter
// will replace the former. For -u extensions, though, the key-type pairs are
// added, where later values overwrite older ones. A Tag overwrites all former
// values and typically only makes sense as the first argument. The resulting
// tag is returned after canonicalizing using CanonType c. If one or more errors
// are encountered, one of the errors is returned.
func (c CanonType) Compose(part ...interface{}) (t Tag, err error) {
defer func() {
if recover() != nil {
t = Tag{}
err = language.ErrSyntax
}
}()
var b language.Builder
if err = update(&b, part...); err != nil {
return und, err
}
b.Tag, _ = canonicalize(c, b.Tag)
return makeTag(b.Make()), err
}
var errInvalidArgument = errors.New("invalid Extension or Variant")
func update(b *language.Builder, part ...interface{}) (err error) {
for _, x := range part {
switch v := x.(type) {
case Tag:
b.SetTag(v.tag())
case Base:
b.Tag.LangID = v.langID
case Script:
b.Tag.ScriptID = v.scriptID
case Region:
b.Tag.RegionID = v.regionID
case Variant:
if v.variant == "" {
err = errInvalidArgument
break
}
b.AddVariant(v.variant)
case Extension:
if v.s == "" {
err = errInvalidArgument
break
}
b.SetExt(v.s)
case []Variant:
b.ClearVariants()
for _, v := range v {
b.AddVariant(v.variant)
}
case []Extension:
b.ClearExtensions()
for _, e := range v {
b.SetExt(e.s)
}
// TODO: support parsing of raw strings based on morphology or just extensions?
case error:
if v != nil {
err = v
}
}
}
return
}
var errInvalidWeight = errors.New("ParseAcceptLanguage: invalid weight")
var errTagListTooLarge = errors.New("tag list exceeds max length")
// ParseAcceptLanguage parses the contents of an Accept-Language header as
// defined in http://www.ietf.org/rfc/rfc2616.txt and returns a list of Tags and
// a list of corresponding quality weights. It is more permissive than RFC 2616
// and may return non-nil slices even if the input is not valid.
// The Tags will be sorted by highest weight first and then by first occurrence.
// Tags with a weight of zero will be dropped. An error will be returned if the
// input could not be parsed.
func ParseAcceptLanguage(s string) (tag []Tag, q []float32, err error) {
defer func() {
if recover() != nil {
tag = nil
q = nil
err = language.ErrSyntax
}
}()
if strings.Count(s, "-") > 1000 {
return nil, nil, errTagListTooLarge
}
var entry string
for s != "" {
if entry, s = split(s, ','); entry == "" {
continue
}
entry, weight := split(entry, ';')
// Scan the language.
t, err := Parse(entry)
if err != nil {
id, ok := acceptFallback[entry]
if !ok {
return nil, nil, err
}
t = makeTag(language.Tag{LangID: id})
}
// Scan the optional weight.
w := 1.0
if weight != "" {
weight = consume(weight, 'q')
weight = consume(weight, '=')
// consume returns the empty string when a token could not be
// consumed, resulting in an error for ParseFloat.
if w, err = strconv.ParseFloat(weight, 32); err != nil {
return nil, nil, errInvalidWeight
}
// Drop tags with a quality weight of 0.
if w <= 0 {
continue
}
}
tag = append(tag, t)
q = append(q, float32(w))
}
sort.Stable(&tagSort{tag, q})
return tag, q, nil
}
// consume removes a leading token c from s and returns the result or the empty
// string if there is no such token.
func consume(s string, c byte) string {
if s == "" || s[0] != c {
return ""
}
return strings.TrimSpace(s[1:])
}
func split(s string, c byte) (head, tail string) {
if i := strings.IndexByte(s, c); i >= 0 {
return strings.TrimSpace(s[:i]), strings.TrimSpace(s[i+1:])
}
return strings.TrimSpace(s), ""
}
// Add hack mapping to deal with a small number of cases that occur
// in Accept-Language (with reasonable frequency).
var acceptFallback = map[string]language.Language{
"english": _en,
"deutsch": _de,
"italian": _it,
"french": _fr,
"*": _mul, // defined in the spec to match all languages.
}
type tagSort struct {
tag []Tag
q []float32
}
func (s *tagSort) Len() int {
return len(s.q)
}
func (s *tagSort) Less(i, j int) bool {
return s.q[i] > s.q[j]
}
func (s *tagSort) Swap(i, j int) {
s.tag[i], s.tag[j] = s.tag[j], s.tag[i]
s.q[i], s.q[j] = s.q[j], s.q[i]
}

298
vendor/golang.org/x/text/language/tables.go generated vendored Normal file
View File

@ -0,0 +1,298 @@
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
package language
// CLDRVersion is the CLDR version from which the tables in this package are derived.
const CLDRVersion = "32"
const (
_de = 269
_en = 313
_fr = 350
_it = 505
_mo = 784
_no = 879
_nb = 839
_pt = 960
_sh = 1031
_mul = 806
_und = 0
)
const (
_001 = 1
_419 = 31
_BR = 65
_CA = 73
_ES = 111
_GB = 124
_MD = 189
_PT = 239
_UK = 307
_US = 310
_ZZ = 358
_XA = 324
_XC = 326
_XK = 334
)
const (
_Latn = 91
_Hani = 57
_Hans = 59
_Hant = 60
_Qaaa = 149
_Qaai = 157
_Qabx = 198
_Zinh = 255
_Zyyy = 260
_Zzzz = 261
)
var regionToGroups = []uint8{ // 359 elements
// Entry 0 - 3F
0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x04, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x00,
0x00, 0x04, 0x00, 0x00, 0x04, 0x01, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x04,
// Entry 40 - 7F
0x04, 0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
0x04, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x04, 0x00, 0x00, 0x04, 0x00, 0x00, 0x04,
0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x00,
0x08, 0x00, 0x04, 0x00, 0x00, 0x08, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x04,
// Entry 80 - BF
0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x04, 0x00,
0x00, 0x00, 0x04, 0x01, 0x00, 0x04, 0x02, 0x00,
0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x08, 0x08, 0x00, 0x00, 0x00, 0x04,
// Entry C0 - FF
0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
0x01, 0x04, 0x08, 0x04, 0x00, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00,
0x00, 0x00, 0x00, 0x04, 0x00, 0x05, 0x00, 0x00,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
// Entry 100 - 13F
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x04, 0x04, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x05, 0x04,
0x00, 0x00, 0x04, 0x00, 0x04, 0x04, 0x05, 0x00,
// Entry 140 - 17F
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
} // Size: 383 bytes
var paradigmLocales = [][3]uint16{ // 3 elements
0: [3]uint16{0x139, 0x0, 0x7c},
1: [3]uint16{0x13e, 0x0, 0x1f},
2: [3]uint16{0x3c0, 0x41, 0xef},
} // Size: 42 bytes
type mutualIntelligibility struct {
want uint16
have uint16
distance uint8
oneway bool
}
type scriptIntelligibility struct {
wantLang uint16
haveLang uint16
wantScript uint8
haveScript uint8
distance uint8
}
type regionIntelligibility struct {
lang uint16
script uint8
group uint8
distance uint8
}
// matchLang holds pairs of langIDs of base languages that are typically
// mutually intelligible. Each pair is associated with a confidence and
// whether the intelligibility goes one or both ways.
var matchLang = []mutualIntelligibility{ // 113 elements
0: {want: 0x1d1, have: 0xb7, distance: 0x4, oneway: false},
1: {want: 0x407, have: 0xb7, distance: 0x4, oneway: false},
2: {want: 0x407, have: 0x1d1, distance: 0x4, oneway: false},
3: {want: 0x407, have: 0x432, distance: 0x4, oneway: false},
4: {want: 0x43a, have: 0x1, distance: 0x4, oneway: false},
5: {want: 0x1a3, have: 0x10d, distance: 0x4, oneway: true},
6: {want: 0x295, have: 0x10d, distance: 0x4, oneway: true},
7: {want: 0x101, have: 0x36f, distance: 0x8, oneway: false},
8: {want: 0x101, have: 0x347, distance: 0x8, oneway: false},
9: {want: 0x5, have: 0x3e2, distance: 0xa, oneway: true},
10: {want: 0xd, have: 0x139, distance: 0xa, oneway: true},
11: {want: 0x16, have: 0x367, distance: 0xa, oneway: true},
12: {want: 0x21, have: 0x139, distance: 0xa, oneway: true},
13: {want: 0x56, have: 0x13e, distance: 0xa, oneway: true},
14: {want: 0x58, have: 0x3e2, distance: 0xa, oneway: true},
15: {want: 0x71, have: 0x3e2, distance: 0xa, oneway: true},
16: {want: 0x75, have: 0x139, distance: 0xa, oneway: true},
17: {want: 0x82, have: 0x1be, distance: 0xa, oneway: true},
18: {want: 0xa5, have: 0x139, distance: 0xa, oneway: true},
19: {want: 0xb2, have: 0x15e, distance: 0xa, oneway: true},
20: {want: 0xdd, have: 0x153, distance: 0xa, oneway: true},
21: {want: 0xe5, have: 0x139, distance: 0xa, oneway: true},
22: {want: 0xe9, have: 0x3a, distance: 0xa, oneway: true},
23: {want: 0xf0, have: 0x15e, distance: 0xa, oneway: true},
24: {want: 0xf9, have: 0x15e, distance: 0xa, oneway: true},
25: {want: 0x100, have: 0x139, distance: 0xa, oneway: true},
26: {want: 0x130, have: 0x139, distance: 0xa, oneway: true},
27: {want: 0x13c, have: 0x139, distance: 0xa, oneway: true},
28: {want: 0x140, have: 0x151, distance: 0xa, oneway: true},
29: {want: 0x145, have: 0x13e, distance: 0xa, oneway: true},
30: {want: 0x158, have: 0x101, distance: 0xa, oneway: true},
31: {want: 0x16d, have: 0x367, distance: 0xa, oneway: true},
32: {want: 0x16e, have: 0x139, distance: 0xa, oneway: true},
33: {want: 0x16f, have: 0x139, distance: 0xa, oneway: true},
34: {want: 0x17e, have: 0x139, distance: 0xa, oneway: true},
35: {want: 0x190, have: 0x13e, distance: 0xa, oneway: true},
36: {want: 0x194, have: 0x13e, distance: 0xa, oneway: true},
37: {want: 0x1a4, have: 0x1be, distance: 0xa, oneway: true},
38: {want: 0x1b4, have: 0x139, distance: 0xa, oneway: true},
39: {want: 0x1b8, have: 0x139, distance: 0xa, oneway: true},
40: {want: 0x1d4, have: 0x15e, distance: 0xa, oneway: true},
41: {want: 0x1d7, have: 0x3e2, distance: 0xa, oneway: true},
42: {want: 0x1d9, have: 0x139, distance: 0xa, oneway: true},
43: {want: 0x1e7, have: 0x139, distance: 0xa, oneway: true},
44: {want: 0x1f8, have: 0x139, distance: 0xa, oneway: true},
45: {want: 0x20e, have: 0x1e1, distance: 0xa, oneway: true},
46: {want: 0x210, have: 0x139, distance: 0xa, oneway: true},
47: {want: 0x22d, have: 0x15e, distance: 0xa, oneway: true},
48: {want: 0x242, have: 0x3e2, distance: 0xa, oneway: true},
49: {want: 0x24a, have: 0x139, distance: 0xa, oneway: true},
50: {want: 0x251, have: 0x139, distance: 0xa, oneway: true},
51: {want: 0x265, have: 0x139, distance: 0xa, oneway: true},
52: {want: 0x274, have: 0x48a, distance: 0xa, oneway: true},
53: {want: 0x28a, have: 0x3e2, distance: 0xa, oneway: true},
54: {want: 0x28e, have: 0x1f9, distance: 0xa, oneway: true},
55: {want: 0x2a3, have: 0x139, distance: 0xa, oneway: true},
56: {want: 0x2b5, have: 0x15e, distance: 0xa, oneway: true},
57: {want: 0x2b8, have: 0x139, distance: 0xa, oneway: true},
58: {want: 0x2be, have: 0x139, distance: 0xa, oneway: true},
59: {want: 0x2c3, have: 0x15e, distance: 0xa, oneway: true},
60: {want: 0x2ed, have: 0x139, distance: 0xa, oneway: true},
61: {want: 0x2f1, have: 0x15e, distance: 0xa, oneway: true},
62: {want: 0x2fa, have: 0x139, distance: 0xa, oneway: true},
63: {want: 0x2ff, have: 0x7e, distance: 0xa, oneway: true},
64: {want: 0x304, have: 0x139, distance: 0xa, oneway: true},
65: {want: 0x30b, have: 0x3e2, distance: 0xa, oneway: true},
66: {want: 0x31b, have: 0x1be, distance: 0xa, oneway: true},
67: {want: 0x31f, have: 0x1e1, distance: 0xa, oneway: true},
68: {want: 0x320, have: 0x139, distance: 0xa, oneway: true},
69: {want: 0x331, have: 0x139, distance: 0xa, oneway: true},
70: {want: 0x351, have: 0x139, distance: 0xa, oneway: true},
71: {want: 0x36a, have: 0x347, distance: 0xa, oneway: false},
72: {want: 0x36a, have: 0x36f, distance: 0xa, oneway: true},
73: {want: 0x37a, have: 0x139, distance: 0xa, oneway: true},
74: {want: 0x387, have: 0x139, distance: 0xa, oneway: true},
75: {want: 0x389, have: 0x139, distance: 0xa, oneway: true},
76: {want: 0x38b, have: 0x15e, distance: 0xa, oneway: true},
77: {want: 0x390, have: 0x139, distance: 0xa, oneway: true},
78: {want: 0x395, have: 0x139, distance: 0xa, oneway: true},
79: {want: 0x39d, have: 0x139, distance: 0xa, oneway: true},
80: {want: 0x3a5, have: 0x139, distance: 0xa, oneway: true},
81: {want: 0x3be, have: 0x139, distance: 0xa, oneway: true},
82: {want: 0x3c4, have: 0x13e, distance: 0xa, oneway: true},
83: {want: 0x3d4, have: 0x10d, distance: 0xa, oneway: true},
84: {want: 0x3d9, have: 0x139, distance: 0xa, oneway: true},
85: {want: 0x3e5, have: 0x15e, distance: 0xa, oneway: true},
86: {want: 0x3e9, have: 0x1be, distance: 0xa, oneway: true},
87: {want: 0x3fa, have: 0x139, distance: 0xa, oneway: true},
88: {want: 0x40c, have: 0x139, distance: 0xa, oneway: true},
89: {want: 0x423, have: 0x139, distance: 0xa, oneway: true},
90: {want: 0x429, have: 0x139, distance: 0xa, oneway: true},
91: {want: 0x431, have: 0x139, distance: 0xa, oneway: true},
92: {want: 0x43b, have: 0x139, distance: 0xa, oneway: true},
93: {want: 0x43e, have: 0x1e1, distance: 0xa, oneway: true},
94: {want: 0x445, have: 0x139, distance: 0xa, oneway: true},
95: {want: 0x450, have: 0x139, distance: 0xa, oneway: true},
96: {want: 0x461, have: 0x139, distance: 0xa, oneway: true},
97: {want: 0x467, have: 0x3e2, distance: 0xa, oneway: true},
98: {want: 0x46f, have: 0x139, distance: 0xa, oneway: true},
99: {want: 0x476, have: 0x3e2, distance: 0xa, oneway: true},
100: {want: 0x3883, have: 0x139, distance: 0xa, oneway: true},
101: {want: 0x480, have: 0x139, distance: 0xa, oneway: true},
102: {want: 0x482, have: 0x139, distance: 0xa, oneway: true},
103: {want: 0x494, have: 0x3e2, distance: 0xa, oneway: true},
104: {want: 0x49d, have: 0x139, distance: 0xa, oneway: true},
105: {want: 0x4ac, have: 0x529, distance: 0xa, oneway: true},
106: {want: 0x4b4, have: 0x139, distance: 0xa, oneway: true},
107: {want: 0x4bc, have: 0x3e2, distance: 0xa, oneway: true},
108: {want: 0x4e5, have: 0x15e, distance: 0xa, oneway: true},
109: {want: 0x4f2, have: 0x139, distance: 0xa, oneway: true},
110: {want: 0x512, have: 0x139, distance: 0xa, oneway: true},
111: {want: 0x518, have: 0x139, distance: 0xa, oneway: true},
112: {want: 0x52f, have: 0x139, distance: 0xa, oneway: true},
} // Size: 702 bytes
// matchScript holds pairs of scriptIDs where readers of one script
// can typically also read the other. Each is associated with a confidence.
var matchScript = []scriptIntelligibility{ // 26 elements
0: {wantLang: 0x432, haveLang: 0x432, wantScript: 0x5b, haveScript: 0x20, distance: 0x5},
1: {wantLang: 0x432, haveLang: 0x432, wantScript: 0x20, haveScript: 0x5b, distance: 0x5},
2: {wantLang: 0x58, haveLang: 0x3e2, wantScript: 0x5b, haveScript: 0x20, distance: 0xa},
3: {wantLang: 0xa5, haveLang: 0x139, wantScript: 0xe, haveScript: 0x5b, distance: 0xa},
4: {wantLang: 0x1d7, haveLang: 0x3e2, wantScript: 0x8, haveScript: 0x20, distance: 0xa},
5: {wantLang: 0x210, haveLang: 0x139, wantScript: 0x2e, haveScript: 0x5b, distance: 0xa},
6: {wantLang: 0x24a, haveLang: 0x139, wantScript: 0x4f, haveScript: 0x5b, distance: 0xa},
7: {wantLang: 0x251, haveLang: 0x139, wantScript: 0x53, haveScript: 0x5b, distance: 0xa},
8: {wantLang: 0x2b8, haveLang: 0x139, wantScript: 0x58, haveScript: 0x5b, distance: 0xa},
9: {wantLang: 0x304, haveLang: 0x139, wantScript: 0x6f, haveScript: 0x5b, distance: 0xa},
10: {wantLang: 0x331, haveLang: 0x139, wantScript: 0x76, haveScript: 0x5b, distance: 0xa},
11: {wantLang: 0x351, haveLang: 0x139, wantScript: 0x22, haveScript: 0x5b, distance: 0xa},
12: {wantLang: 0x395, haveLang: 0x139, wantScript: 0x83, haveScript: 0x5b, distance: 0xa},
13: {wantLang: 0x39d, haveLang: 0x139, wantScript: 0x36, haveScript: 0x5b, distance: 0xa},
14: {wantLang: 0x3be, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5b, distance: 0xa},
15: {wantLang: 0x3fa, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5b, distance: 0xa},
16: {wantLang: 0x40c, haveLang: 0x139, wantScript: 0xd6, haveScript: 0x5b, distance: 0xa},
17: {wantLang: 0x450, haveLang: 0x139, wantScript: 0xe6, haveScript: 0x5b, distance: 0xa},
18: {wantLang: 0x461, haveLang: 0x139, wantScript: 0xe9, haveScript: 0x5b, distance: 0xa},
19: {wantLang: 0x46f, haveLang: 0x139, wantScript: 0x2c, haveScript: 0x5b, distance: 0xa},
20: {wantLang: 0x476, haveLang: 0x3e2, wantScript: 0x5b, haveScript: 0x20, distance: 0xa},
21: {wantLang: 0x4b4, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5b, distance: 0xa},
22: {wantLang: 0x4bc, haveLang: 0x3e2, wantScript: 0x5b, haveScript: 0x20, distance: 0xa},
23: {wantLang: 0x512, haveLang: 0x139, wantScript: 0x3e, haveScript: 0x5b, distance: 0xa},
24: {wantLang: 0x529, haveLang: 0x529, wantScript: 0x3b, haveScript: 0x3c, distance: 0xf},
25: {wantLang: 0x529, haveLang: 0x529, wantScript: 0x3c, haveScript: 0x3b, distance: 0x13},
} // Size: 232 bytes
var matchRegion = []regionIntelligibility{ // 15 elements
0: {lang: 0x3a, script: 0x0, group: 0x4, distance: 0x4},
1: {lang: 0x3a, script: 0x0, group: 0x84, distance: 0x4},
2: {lang: 0x139, script: 0x0, group: 0x1, distance: 0x4},
3: {lang: 0x139, script: 0x0, group: 0x81, distance: 0x4},
4: {lang: 0x13e, script: 0x0, group: 0x3, distance: 0x4},
5: {lang: 0x13e, script: 0x0, group: 0x83, distance: 0x4},
6: {lang: 0x3c0, script: 0x0, group: 0x3, distance: 0x4},
7: {lang: 0x3c0, script: 0x0, group: 0x83, distance: 0x4},
8: {lang: 0x529, script: 0x3c, group: 0x2, distance: 0x4},
9: {lang: 0x529, script: 0x3c, group: 0x82, distance: 0x4},
10: {lang: 0x3a, script: 0x0, group: 0x80, distance: 0x5},
11: {lang: 0x139, script: 0x0, group: 0x80, distance: 0x5},
12: {lang: 0x13e, script: 0x0, group: 0x80, distance: 0x5},
13: {lang: 0x3c0, script: 0x0, group: 0x80, distance: 0x5},
14: {lang: 0x529, script: 0x3c, group: 0x80, distance: 0x5},
} // Size: 114 bytes
// Total table size 1473 bytes (1KiB); checksum: 7BB90B5C

145
vendor/golang.org/x/text/language/tags.go generated vendored Normal file
View File

@ -0,0 +1,145 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import "golang.org/x/text/internal/language/compact"
// TODO: Various sets of commonly use tags and regions.
// MustParse is like Parse, but panics if the given BCP 47 tag cannot be parsed.
// It simplifies safe initialization of Tag values.
func MustParse(s string) Tag {
t, err := Parse(s)
if err != nil {
panic(err)
}
return t
}
// MustParse is like Parse, but panics if the given BCP 47 tag cannot be parsed.
// It simplifies safe initialization of Tag values.
func (c CanonType) MustParse(s string) Tag {
t, err := c.Parse(s)
if err != nil {
panic(err)
}
return t
}
// MustParseBase is like ParseBase, but panics if the given base cannot be parsed.
// It simplifies safe initialization of Base values.
func MustParseBase(s string) Base {
b, err := ParseBase(s)
if err != nil {
panic(err)
}
return b
}
// MustParseScript is like ParseScript, but panics if the given script cannot be
// parsed. It simplifies safe initialization of Script values.
func MustParseScript(s string) Script {
scr, err := ParseScript(s)
if err != nil {
panic(err)
}
return scr
}
// MustParseRegion is like ParseRegion, but panics if the given region cannot be
// parsed. It simplifies safe initialization of Region values.
func MustParseRegion(s string) Region {
r, err := ParseRegion(s)
if err != nil {
panic(err)
}
return r
}
var (
und = Tag{}
Und Tag = Tag{}
Afrikaans Tag = Tag(compact.Afrikaans)
Amharic Tag = Tag(compact.Amharic)
Arabic Tag = Tag(compact.Arabic)
ModernStandardArabic Tag = Tag(compact.ModernStandardArabic)
Azerbaijani Tag = Tag(compact.Azerbaijani)
Bulgarian Tag = Tag(compact.Bulgarian)
Bengali Tag = Tag(compact.Bengali)
Catalan Tag = Tag(compact.Catalan)
Czech Tag = Tag(compact.Czech)
Danish Tag = Tag(compact.Danish)
German Tag = Tag(compact.German)
Greek Tag = Tag(compact.Greek)
English Tag = Tag(compact.English)
AmericanEnglish Tag = Tag(compact.AmericanEnglish)
BritishEnglish Tag = Tag(compact.BritishEnglish)
Spanish Tag = Tag(compact.Spanish)
EuropeanSpanish Tag = Tag(compact.EuropeanSpanish)
LatinAmericanSpanish Tag = Tag(compact.LatinAmericanSpanish)
Estonian Tag = Tag(compact.Estonian)
Persian Tag = Tag(compact.Persian)
Finnish Tag = Tag(compact.Finnish)
Filipino Tag = Tag(compact.Filipino)
French Tag = Tag(compact.French)
CanadianFrench Tag = Tag(compact.CanadianFrench)
Gujarati Tag = Tag(compact.Gujarati)
Hebrew Tag = Tag(compact.Hebrew)
Hindi Tag = Tag(compact.Hindi)
Croatian Tag = Tag(compact.Croatian)
Hungarian Tag = Tag(compact.Hungarian)
Armenian Tag = Tag(compact.Armenian)
Indonesian Tag = Tag(compact.Indonesian)
Icelandic Tag = Tag(compact.Icelandic)
Italian Tag = Tag(compact.Italian)
Japanese Tag = Tag(compact.Japanese)
Georgian Tag = Tag(compact.Georgian)
Kazakh Tag = Tag(compact.Kazakh)
Khmer Tag = Tag(compact.Khmer)
Kannada Tag = Tag(compact.Kannada)
Korean Tag = Tag(compact.Korean)
Kirghiz Tag = Tag(compact.Kirghiz)
Lao Tag = Tag(compact.Lao)
Lithuanian Tag = Tag(compact.Lithuanian)
Latvian Tag = Tag(compact.Latvian)
Macedonian Tag = Tag(compact.Macedonian)
Malayalam Tag = Tag(compact.Malayalam)
Mongolian Tag = Tag(compact.Mongolian)
Marathi Tag = Tag(compact.Marathi)
Malay Tag = Tag(compact.Malay)
Burmese Tag = Tag(compact.Burmese)
Nepali Tag = Tag(compact.Nepali)
Dutch Tag = Tag(compact.Dutch)
Norwegian Tag = Tag(compact.Norwegian)
Punjabi Tag = Tag(compact.Punjabi)
Polish Tag = Tag(compact.Polish)
Portuguese Tag = Tag(compact.Portuguese)
BrazilianPortuguese Tag = Tag(compact.BrazilianPortuguese)
EuropeanPortuguese Tag = Tag(compact.EuropeanPortuguese)
Romanian Tag = Tag(compact.Romanian)
Russian Tag = Tag(compact.Russian)
Sinhala Tag = Tag(compact.Sinhala)
Slovak Tag = Tag(compact.Slovak)
Slovenian Tag = Tag(compact.Slovenian)
Albanian Tag = Tag(compact.Albanian)
Serbian Tag = Tag(compact.Serbian)
SerbianLatin Tag = Tag(compact.SerbianLatin)
Swedish Tag = Tag(compact.Swedish)
Swahili Tag = Tag(compact.Swahili)
Tamil Tag = Tag(compact.Tamil)
Telugu Tag = Tag(compact.Telugu)
Thai Tag = Tag(compact.Thai)
Turkish Tag = Tag(compact.Turkish)
Ukrainian Tag = Tag(compact.Ukrainian)
Urdu Tag = Tag(compact.Urdu)
Uzbek Tag = Tag(compact.Uzbek)
Vietnamese Tag = Tag(compact.Vietnamese)
Chinese Tag = Tag(compact.Chinese)
SimplifiedChinese Tag = Tag(compact.SimplifiedChinese)
TraditionalChinese Tag = Tag(compact.TraditionalChinese)
Zulu Tag = Tag(compact.Zulu)
)

View File

@ -106,12 +106,11 @@ func Find(importPath, srcDir string) (filename, path string) {
// additional trailing data beyond the end of the export data.
func NewReader(r io.Reader) (io.Reader, error) {
buf := bufio.NewReader(r)
_, size, err := gcimporter.FindExportData(buf)
size, err := gcimporter.FindExportData(buf)
if err != nil {
return nil, err
}
if size >= 0 {
// We were given an archive and found the __.PKGDEF in it.
// This tells us the size of the export data, and we don't
// need to return the entire file.
@ -119,11 +118,6 @@ func NewReader(r io.Reader) (io.Reader, error) {
R: buf,
N: size,
}, nil
} else {
// We were given an object file. As such, we don't know how large
// the export data is and must return the entire file.
return buf, nil
}
}
// readAll works the same way as io.ReadAll, but avoids allocations and copies

View File

@ -13,6 +13,7 @@ import (
"fmt"
"os"
"os/exec"
"slices"
"strings"
)
@ -131,7 +132,7 @@ func findExternalDriver(cfg *Config) driver {
// command.
//
// (See similar trick in Invocation.run in ../../internal/gocommand/invoke.go)
cmd.Env = append(slicesClip(cfg.Env), "PWD="+cfg.Dir)
cmd.Env = append(slices.Clip(cfg.Env), "PWD="+cfg.Dir)
cmd.Stdin = bytes.NewReader(req)
cmd.Stdout = buf
cmd.Stderr = stderr
@ -150,7 +151,3 @@ func findExternalDriver(cfg *Config) driver {
return &response, nil
}
}
// slicesClip removes unused capacity from the slice, returning s[:len(s):len(s)].
// TODO(adonovan): use go1.21 slices.Clip.
func slicesClip[S ~[]E, E any](s S) S { return s[:len(s):len(s)] }

View File

@ -322,6 +322,7 @@ type jsonPackage struct {
ImportPath string
Dir string
Name string
Target string
Export string
GoFiles []string
CompiledGoFiles []string
@ -505,13 +506,15 @@ func (state *golistState) createDriverResponse(words ...string) (*DriverResponse
pkg := &Package{
Name: p.Name,
ID: p.ImportPath,
Dir: p.Dir,
Target: p.Target,
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
EmbedFiles: absJoin(p.Dir, p.EmbedFiles),
EmbedPatterns: absJoin(p.Dir, p.EmbedPatterns),
IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles),
forTest: p.ForTest,
ForTest: p.ForTest,
depsErrors: p.DepsErrors,
Module: p.Module,
}
@ -795,7 +798,7 @@ func jsonFlag(cfg *Config, goVersion int) string {
// Request Dir in the unlikely case Export is not absolute.
addFields("Dir", "Export")
}
if cfg.Mode&needInternalForTest != 0 {
if cfg.Mode&NeedForTest != 0 {
addFields("ForTest")
}
if cfg.Mode&needInternalDepsErrors != 0 {
@ -810,6 +813,9 @@ func jsonFlag(cfg *Config, goVersion int) string {
if cfg.Mode&NeedEmbedPatterns != 0 {
addFields("EmbedPatterns")
}
if cfg.Mode&NeedTarget != 0 {
addFields("Target")
}
return "-json=" + strings.Join(fields, ",")
}

View File

@ -23,9 +23,11 @@ var modes = [...]struct {
{NeedSyntax, "NeedSyntax"},
{NeedTypesInfo, "NeedTypesInfo"},
{NeedTypesSizes, "NeedTypesSizes"},
{NeedForTest, "NeedForTest"},
{NeedModule, "NeedModule"},
{NeedEmbedFiles, "NeedEmbedFiles"},
{NeedEmbedPatterns, "NeedEmbedPatterns"},
{NeedTarget, "NeedTarget"},
}
func (mode LoadMode) String() string {

View File

@ -43,6 +43,20 @@ import (
// ID and Errors (if present) will always be filled.
// [Load] may return more information than requested.
//
// The Mode flag is a union of several bits named NeedName,
// NeedFiles, and so on, each of which determines whether
// a given field of Package (Name, Files, etc) should be
// populated.
//
// For convenience, we provide named constants for the most
// common combinations of Need flags:
//
// [LoadFiles] lists of files in each package
// [LoadImports] ... plus imports
// [LoadTypes] ... plus type information
// [LoadSyntax] ... plus type-annotated syntax
// [LoadAllSyntax] ... for all dependencies
//
// Unfortunately there are a number of open bugs related to
// interactions among the LoadMode bits:
// - https://github.com/golang/go/issues/56633
@ -55,7 +69,7 @@ const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
// NeedFiles adds GoFiles, OtherFiles, and IgnoredFiles
// NeedFiles adds Dir, GoFiles, OtherFiles, and IgnoredFiles
NeedFiles
// NeedCompiledGoFiles adds CompiledGoFiles.
@ -86,9 +100,10 @@ const (
// needInternalDepsErrors adds the internal deps errors field for use by gopls.
needInternalDepsErrors
// needInternalForTest adds the internal forTest field.
// NeedForTest adds ForTest.
//
// Tests must also be set on the context for this field to be populated.
needInternalForTest
NeedForTest
// typecheckCgo enables full support for type checking cgo. Requires Go 1.15+.
// Modifies CompiledGoFiles and Types, and has no effect on its own.
@ -103,38 +118,26 @@ const (
// NeedEmbedPatterns adds EmbedPatterns.
NeedEmbedPatterns
// NeedTarget adds Target.
NeedTarget
// Be sure to update loadmode_string.go when adding new items!
)
const (
// LoadFiles loads the name and file names for the initial packages.
//
// Deprecated: LoadFiles exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles
// LoadImports loads the name, file names, and import mapping for the initial packages.
//
// Deprecated: LoadImports exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadImports = LoadFiles | NeedImports
// LoadTypes loads exported type information for the initial packages.
//
// Deprecated: LoadTypes exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadTypes = LoadImports | NeedTypes | NeedTypesSizes
// LoadSyntax loads typed syntax for the initial packages.
//
// Deprecated: LoadSyntax exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo
// LoadAllSyntax loads typed syntax for the initial packages and all dependencies.
//
// Deprecated: LoadAllSyntax exists for historical compatibility
// and should not be used. Please directly specify the needed fields using the Need values.
LoadAllSyntax = LoadSyntax | NeedDeps
// Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile.
@ -434,6 +437,12 @@ type Package struct {
// PkgPath is the package path as used by the go/types package.
PkgPath string
// Dir is the directory associated with the package, if it exists.
//
// For packages listed by the go command, this is the directory containing
// the package files.
Dir string
// Errors contains any errors encountered querying the metadata
// of the package, or while parsing or type-checking its files.
Errors []Error
@ -473,6 +482,10 @@ type Package struct {
// information for the package as provided by the build system.
ExportFile string
// Target is the absolute install path of the .a file, for libraries,
// and of the executable file, for binaries.
Target string
// Imports maps import paths appearing in the package's Go source files
// to corresponding loaded Packages.
Imports map[string]*Package
@ -521,8 +534,8 @@ type Package struct {
// -- internal --
// forTest is the package under test, if any.
forTest string
// ForTest is the package under test, if any.
ForTest string
// depsErrors is the DepsErrors field from the go list response, if any.
depsErrors []*packagesinternal.PackageError
@ -551,9 +564,6 @@ type ModuleError struct {
}
func init() {
packagesinternal.GetForTest = func(p interface{}) string {
return p.(*Package).forTest
}
packagesinternal.GetDepsErrors = func(p interface{}) []*packagesinternal.PackageError {
return p.(*Package).depsErrors
}
@ -565,7 +575,6 @@ func init() {
}
packagesinternal.TypecheckCgo = int(typecheckCgo)
packagesinternal.DepsErrors = int(needInternalDepsErrors)
packagesinternal.ForTest = int(needInternalForTest)
}
// An Error describes a problem with a package's metadata, syntax, or types.

Some files were not shown because too many files have changed in this diff Show More