[main] Upgrade to latest dependencies (#1738)

* upgrade to latest dependencies

bumping knative.dev/serving 2332731...4229a18:
  > 4229a18 upgrade to latest dependencies (# 13352)
  > 183bea1 Add log message when internal encryption is enabled (# 13351)
  > affc8df upgrade to latest dependencies (# 13348)
  > 79a4588 Use kebab-case in config for queue sidecar image key (# 13347)
  > f0d66ee Update net-istio nightly (# 13345)
  > 6791c23 Update net-certmanager nightly (# 13346)
  > 842e273 Update net-gateway-api nightly (# 13344)
  > bedf5e9 Update community files (# 13342)
  > 47907d0 Update net-certmanager nightly (# 13341)
  > bf0e590 Update net-kourier nightly (# 13340)
  > c18d504 Update community files (# 13339)
  > 8a11681 Update net-kourier nightly (# 13336)
  > 243fad9 Update net-istio nightly (# 13334)
  > 4ac8668 Update net-kourier nightly (# 13335)
  > ed3515c Fix: Raise the reconciliation timeout from 10 to 30s. (# 13323)
  > 6b1972b upgrade to latest dependencies (# 13330)
  > 2befffb Update net-istio nightly (# 13328)
bumping knative.dev/hack d1317b0...92a65f1:
  > 92a65f1 don't quote vars referencing files (# 234)
  > b3c9790 Notarize Mac binaries (# 231)
  > 0198902 Format go code (# 226)
  > 7dff557 Update community files (# 233)
  > 6887217 Update community files (# 232)
  > 9b76387 output certificate as well (# 229)

Signed-off-by: Knative Automation <automation@knative.team>

* Update to latest Eventing

Signed-off-by: Knative Automation <automation@knative.team>
Co-authored-by: David Simansky <dsimansk@redhat.com>
This commit is contained in:
knative-automation 2022-10-05 09:24:29 -04:00 committed by GitHub
parent 84cba981c1
commit 369321c1db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 949 additions and 713 deletions

16
go.mod
View File

@ -20,11 +20,11 @@ require (
k8s.io/cli-runtime v0.24.4
k8s.io/client-go v0.24.4
k8s.io/code-generator v0.24.4
knative.dev/eventing v0.34.1-0.20220921104109-54174e3cb963
knative.dev/hack v0.0.0-20220914183605-d1317b08c0c3
knative.dev/networking v0.0.0-20220914020748-cefed20d561c
knative.dev/pkg v0.0.0-20220921024409-d1d5c849073b
knative.dev/serving v0.34.1-0.20220921150110-2332731db1b9
knative.dev/eventing v0.34.1-0.20221005061829-af2298ff121a
knative.dev/hack v0.0.0-20221004153928-92a65f105c37
knative.dev/networking v0.0.0-20221003195429-a5c26fe64325
knative.dev/pkg v0.0.0-20221003153827-158538cc46ec
knative.dev/serving v0.34.1-0.20221005094629-080aaa5c6241
sigs.k8s.io/yaml v1.3.0
)
@ -38,13 +38,13 @@ require (
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
github.com/Azure/go-autorest/logger v0.2.1 // indirect
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20211221011931-643d94fcab96 // indirect
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/blendle/zapdriver v1.3.1 // indirect
github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/cloudevents/sdk-go/sql/v2 v2.8.0 // indirect
github.com/cloudevents/sdk-go/v2 v2.10.1 // indirect
github.com/cloudevents/sdk-go/sql/v2 v2.0.0-20220930150014-52b12276cc4a // indirect
github.com/cloudevents/sdk-go/v2 v2.12.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/emicklei/go-restful v2.9.5+incompatible // indirect

35
go.sum
View File

@ -100,8 +100,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20211221011931-643d94fcab96 h1:2P/dm3KbCLnRHQN/Ma50elhMx1Si9loEZe5hOrsuvuE=
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20211221011931-643d94fcab96/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 h1:yL7+Jz0jTC6yykIK/Wh74gnTJnrGr5AyrNMXuA0gves=
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
@ -134,11 +134,10 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudevents/sdk-go/sql/v2 v2.8.0 h1:gWednxJHL0Ycf93XeEFyQxYj81A7b4eNwkzjNxGunAM=
github.com/cloudevents/sdk-go/sql/v2 v2.8.0/go.mod h1:u9acNJbhmi1wnDJro4PEAqbr4N1LTCyEUClErxbPS1A=
github.com/cloudevents/sdk-go/v2 v2.8.0/go.mod h1:GpCBmUj7DIRiDhVvsK5d6WCbgTWs8DxAWTRtAwQmIXs=
github.com/cloudevents/sdk-go/v2 v2.10.1 h1:qNFovJ18fWOd8Q9ydWJPk1oiFudXyv1GxJIP7MwPjuM=
github.com/cloudevents/sdk-go/v2 v2.10.1/go.mod h1:GpCBmUj7DIRiDhVvsK5d6WCbgTWs8DxAWTRtAwQmIXs=
github.com/cloudevents/sdk-go/sql/v2 v2.0.0-20220930150014-52b12276cc4a h1:jwFoLdIzNYc92s2ZwhU4yOdoPP8zoALcKOzenIALHJM=
github.com/cloudevents/sdk-go/sql/v2 v2.0.0-20220930150014-52b12276cc4a/go.mod h1:SPV4fdKh1YqSNn2TTUVqUcw1El9Er0HI/yzay5wqsFk=
github.com/cloudevents/sdk-go/v2 v2.12.0 h1:p1k+ysVOZtNiXfijnwB3WqZNA3y2cGOiKQygWkUHCEI=
github.com/cloudevents/sdk-go/v2 v2.12.0/go.mod h1:xDmKfzNjM8gBvjaF8ijFjM1VYOVUEeUfapHMUX1T5To=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
@ -655,7 +654,6 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1
github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs=
github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM=
@ -986,7 +984,6 @@ golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxb
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20220920022843-2ce7c2934d45 h1:yuLAip3bfURHClMG9VBdzPrQvCWjWiWUTBGV+/fCbUs=
golang.org/x/time v0.0.0-20220920022843-2ce7c2934d45/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -1331,16 +1328,16 @@ k8s.io/kube-openapi v0.0.0-20220328201542-3ee0da9b0b42/go.mod h1:Z/45zLw8lUo4wdi
k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 h1:HNSDgDCrr/6Ly3WEGKZftiE7IY19Vz2GdbOCyI4qqhc=
k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
knative.dev/eventing v0.34.1-0.20220921104109-54174e3cb963 h1:DCz3QAiL/8CV4dfX/bmuUWcOh/R1qDzH1Ncc7wjYptQ=
knative.dev/eventing v0.34.1-0.20220921104109-54174e3cb963/go.mod h1:8u0uAkG22m9JWfhsO5a8Fk8+67KKlJvBrv7/SzeORiI=
knative.dev/hack v0.0.0-20220914183605-d1317b08c0c3 h1:5s3/9VZuTfdVGFIF/+7FUju9nHKyIOg6jsFXw7rhdIM=
knative.dev/hack v0.0.0-20220914183605-d1317b08c0c3/go.mod h1:yk2OjGDsbEnQjfxdm0/HJKS2WqTLEFg/N6nUs6Rqx3Q=
knative.dev/networking v0.0.0-20220914020748-cefed20d561c h1:KTVyYjUsxn23Ot9oTBCFj4tfUug5H3PO7lDCF/3H25k=
knative.dev/networking v0.0.0-20220914020748-cefed20d561c/go.mod h1:U5WmvAm/sFvqXz6uodddTrYBgXGFB9VlbGO/CnxZ/eU=
knative.dev/pkg v0.0.0-20220921024409-d1d5c849073b h1:DQn+gWHuXna0DlCYC9Cj/ElhUZlvblZVsgcDO0Gn4fQ=
knative.dev/pkg v0.0.0-20220921024409-d1d5c849073b/go.mod h1:v7MjUh7DVDGOFkiLfDTUt4hG75RwNJRyeQmlBz3wUAc=
knative.dev/serving v0.34.1-0.20220921150110-2332731db1b9 h1:lGx/PjQ9mtrdhDteP7g11WvJ4LGpDKuJMpFLzJHni+4=
knative.dev/serving v0.34.1-0.20220921150110-2332731db1b9/go.mod h1:4N5p1lqIdRADGqfjijwV1v49LlJU1x1hCeUE5FgMPWg=
knative.dev/eventing v0.34.1-0.20221005061829-af2298ff121a h1:Bh/WV0aSUVpa5V9t+KVT4Yrthr2iOwvxaPFk4b2E6Q8=
knative.dev/eventing v0.34.1-0.20221005061829-af2298ff121a/go.mod h1:KTyxEUhiRMbAUTPIscPDEYmCjXSXc0gHD+gQciefuGg=
knative.dev/hack v0.0.0-20221004153928-92a65f105c37 h1:4xB0A2aWQtzUcFjpZf9ufxRsjt+E7tEL364VlPttI8s=
knative.dev/hack v0.0.0-20221004153928-92a65f105c37/go.mod h1:yk2OjGDsbEnQjfxdm0/HJKS2WqTLEFg/N6nUs6Rqx3Q=
knative.dev/networking v0.0.0-20221003195429-a5c26fe64325 h1:0cMVoIs4hQ+kQzkZBon4WncT32AU/dHocb5gfj6/Wjk=
knative.dev/networking v0.0.0-20221003195429-a5c26fe64325/go.mod h1:fabM/jvxxkaMWBQGoY8N6vqJAdvYULWhJEnht4hacfM=
knative.dev/pkg v0.0.0-20221003153827-158538cc46ec h1:Cen2FjQ3sUDLfeKkP8gVA9YhLbj/VueTlGzLHHKWNfU=
knative.dev/pkg v0.0.0-20221003153827-158538cc46ec/go.mod h1:qTItuDia3CTW2J9oC4fzJsXifRPZcFWvESKozCE364Q=
knative.dev/serving v0.34.1-0.20221005094629-080aaa5c6241 h1:ddHb6DwHQxpksH6qQB4sdEqQ7CfV7ddeM0f1+kqRyis=
knative.dev/serving v0.34.1-0.20221005094629-080aaa5c6241/go.mod h1:JW/0/lyHDG3QclYrbuR4B5UbK455TkEJBPC+00W/xyM=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=

View File

@ -4,6 +4,8 @@
package antlr
import "sync"
var ATNInvalidAltNumber int
type ATN struct {
@ -37,6 +39,10 @@ type ATN struct {
ruleToTokenType []int
states []ATNState
mu sync.Mutex
stateMu sync.RWMutex
edgeMu sync.RWMutex
}
func NewATN(grammarType int, maxTokenType int) *ATN {
@ -59,14 +65,15 @@ func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
// in s and staying in same rule. Token.EPSILON is in set if we reach end of
// rule.
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
return s.GetNextTokenWithinRule()
a.mu.Lock()
defer a.mu.Unlock()
iset := s.GetNextTokenWithinRule()
if iset == nil {
iset = a.NextTokensInContext(s, nil)
iset.readOnly = true
s.SetNextTokenWithinRule(iset)
}
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()
return iset
}
func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {

View File

@ -104,7 +104,7 @@ func (b *BaseATNConfigSet) Alts() *BitSet {
func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet {
return &BaseATNConfigSet{
cachedHash: -1,
configLookup: NewArray2DHashSetWithCap(hashATNConfig, equalATNConfigs, 16, 2),
configLookup: newArray2DHashSetWithCap(hashATNConfig, equalATNConfigs, 16, 2),
fullCtx: fullCtx,
}
}
@ -155,7 +155,7 @@ func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
}
func (b *BaseATNConfigSet) GetStates() Set {
states := NewArray2DHashSet(nil, nil)
states := newArray2DHashSet(nil, nil)
for i := 0; i < len(b.configs); i++ {
states.Add(b.configs[i].GetState())
@ -283,7 +283,7 @@ func (b *BaseATNConfigSet) Clear() {
b.configs = make([]ATNConfig, 0)
b.cachedHash = -1
b.configLookup = NewArray2DHashSet(nil, equalATNConfigs)
b.configLookup = newArray2DHashSet(nil, equalATNConfigs)
}
func (b *BaseATNConfigSet) FullContext() bool {
@ -365,7 +365,7 @@ type OrderedATNConfigSet struct {
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
b := NewBaseATNConfigSet(false)
b.configLookup = NewArray2DHashSet(nil, nil)
b.configLookup = newArray2DHashSet(nil, nil)
return &OrderedATNConfigSet{BaseATNConfigSet: b}
}

View File

@ -4,7 +4,9 @@
package antlr
var ATNDeserializationOptionsdefaultOptions = &ATNDeserializationOptions{true, false, false}
import "errors"
var defaultATNDeserializationOptions = ATNDeserializationOptions{true, true, false}
type ATNDeserializationOptions struct {
readOnly bool
@ -12,14 +14,48 @@ type ATNDeserializationOptions struct {
generateRuleBypassTransitions bool
}
func NewATNDeserializationOptions(CopyFrom *ATNDeserializationOptions) *ATNDeserializationOptions {
o := new(ATNDeserializationOptions)
func (opts *ATNDeserializationOptions) ReadOnly() bool {
return opts.readOnly
}
if CopyFrom != nil {
o.readOnly = CopyFrom.readOnly
o.verifyATN = CopyFrom.verifyATN
o.generateRuleBypassTransitions = CopyFrom.generateRuleBypassTransitions
func (opts *ATNDeserializationOptions) SetReadOnly(readOnly bool) {
if opts.readOnly {
panic(errors.New("Cannot mutate read only ATNDeserializationOptions"))
}
opts.readOnly = readOnly
}
func (opts *ATNDeserializationOptions) VerifyATN() bool {
return opts.verifyATN
}
func (opts *ATNDeserializationOptions) SetVerifyATN(verifyATN bool) {
if opts.readOnly {
panic(errors.New("Cannot mutate read only ATNDeserializationOptions"))
}
opts.verifyATN = verifyATN
}
func (opts *ATNDeserializationOptions) GenerateRuleBypassTransitions() bool {
return opts.generateRuleBypassTransitions
}
func (opts *ATNDeserializationOptions) SetGenerateRuleBypassTransitions(generateRuleBypassTransitions bool) {
if opts.readOnly {
panic(errors.New("Cannot mutate read only ATNDeserializationOptions"))
}
opts.generateRuleBypassTransitions = generateRuleBypassTransitions
}
func DefaultATNDeserializationOptions() *ATNDeserializationOptions {
return NewATNDeserializationOptions(&defaultATNDeserializationOptions)
}
func NewATNDeserializationOptions(other *ATNDeserializationOptions) *ATNDeserializationOptions {
o := new(ATNDeserializationOptions)
if other != nil {
*o = *other
o.readOnly = false
}
return o
}

View File

@ -5,50 +5,34 @@
package antlr
import (
"encoding/hex"
"fmt"
"strconv"
"strings"
"unicode/utf16"
)
// This is the earliest supported serialized UUID.
// stick to serialized version for now, we don't need a UUID instance
var BaseSerializedUUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
var AddedUnicodeSMP = "59627784-3BE5-417A-B9EB-8131A7286089"
const serializedVersion = 4
// This list contains all of the currently supported UUIDs, ordered by when
// the feature first appeared in this branch.
var SupportedUUIDs = []string{BaseSerializedUUID, AddedUnicodeSMP}
var SerializedVersion = 3
// This is the current serialized UUID.
var SerializedUUID = AddedUnicodeSMP
type LoopEndStateIntPair struct {
type loopEndStateIntPair struct {
item0 *LoopEndState
item1 int
}
type BlockStartStateIntPair struct {
type blockStartStateIntPair struct {
item0 BlockStartState
item1 int
}
type ATNDeserializer struct {
deserializationOptions *ATNDeserializationOptions
data []rune
options *ATNDeserializationOptions
data []int32
pos int
uuid string
}
func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
if options == nil {
options = ATNDeserializationOptionsdefaultOptions
options = &defaultATNDeserializationOptions
}
return &ATNDeserializer{deserializationOptions: options}
return &ATNDeserializer{options: options}
}
func stringInSlice(a string, list []string) int {
@ -61,30 +45,10 @@ func stringInSlice(a string, list []string) int {
return -1
}
// isFeatureSupported determines if a particular serialized representation of an
// ATN supports a particular feature, identified by the UUID used for
// serializing the ATN at the time the feature was first introduced. Feature is
// the UUID marking the first time the feature was supported in the serialized
// ATN. ActualUuid is the UUID of the actual serialized ATN which is currently
// being deserialized. It returns true if actualUuid represents a serialized ATN
// at or after the feature identified by feature was introduced, and otherwise
// false.
func (a *ATNDeserializer) isFeatureSupported(feature, actualUUID string) bool {
idx1 := stringInSlice(feature, SupportedUUIDs)
if idx1 < 0 {
return false
}
idx2 := stringInSlice(actualUUID, SupportedUUIDs)
return idx2 >= idx1
}
func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.reset(utf16.Decode(data))
func (a *ATNDeserializer) Deserialize(data []int32) *ATN {
a.data = data
a.pos = 0
a.checkVersion()
a.checkUUID()
atn := a.readATN()
@ -92,15 +56,7 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.readRules(atn)
a.readModes(atn)
sets := make([]*IntervalSet, 0)
// First, deserialize sets with 16-bit arguments <= U+FFFF.
sets = a.readSets(atn, sets, a.readInt)
// Next, if the ATN was serialized with the Unicode SMP feature,
// deserialize sets with 32-bit arguments <= U+10FFFF.
if (a.isFeatureSupported(AddedUnicodeSMP, a.uuid)) {
sets = a.readSets(atn, sets, a.readInt32)
}
sets := a.readSets(atn, nil)
a.readEdges(atn, sets)
a.readDecisions(atn)
@ -108,7 +64,7 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.markPrecedenceDecisions(atn)
a.verifyATN(atn)
if a.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
if a.options.GenerateRuleBypassTransitions() && atn.grammarType == ATNTypeParser {
a.generateRuleBypassTransitions(atn)
// Re-verify after modification
a.verifyATN(atn)
@ -118,42 +74,14 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
}
func (a *ATNDeserializer) reset(data []rune) {
temp := make([]rune, len(data))
for i, c := range data {
// Don't adjust the first value since that's the version number
if i == 0 {
temp[i] = c
} else if c > 1 {
temp[i] = c - 2
} else {
temp[i] = c + 65533
}
}
a.data = temp
a.pos = 0
}
func (a *ATNDeserializer) checkVersion() {
version := a.readInt()
if version != SerializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
if version != serializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(serializedVersion) + ").")
}
}
func (a *ATNDeserializer) checkUUID() {
uuid := a.readUUID()
if stringInSlice(uuid, SupportedUUIDs) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
}
a.uuid = uuid
}
func (a *ATNDeserializer) readATN() *ATN {
grammarType := a.readInt()
maxTokenType := a.readInt()
@ -162,37 +90,36 @@ func (a *ATNDeserializer) readATN() *ATN {
}
func (a *ATNDeserializer) readStates(atn *ATN) {
loopBackStateNumbers := make([]LoopEndStateIntPair, 0)
endStateNumbers := make([]BlockStartStateIntPair, 0)
nstates := a.readInt()
// Allocate worst case size.
loopBackStateNumbers := make([]loopEndStateIntPair, 0, nstates)
endStateNumbers := make([]blockStartStateIntPair, 0, nstates)
// Preallocate states slice.
atn.states = make([]ATNState, 0, nstates)
for i := 0; i < nstates; i++ {
stype := a.readInt()
// Ignore bad types of states
if stype == ATNStateInvalidType {
atn.addState(nil)
continue
}
ruleIndex := a.readInt()
if ruleIndex == 0xFFFF {
ruleIndex = -1
}
s := a.stateFactory(stype, ruleIndex)
if stype == ATNStateLoopEnd {
loopBackStateNumber := a.readInt()
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
loopBackStateNumbers = append(loopBackStateNumbers, loopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
} else if s2, ok := s.(BlockStartState); ok {
endStateNumber := a.readInt()
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
endStateNumbers = append(endStateNumbers, blockStartStateIntPair{s2, endStateNumber})
}
atn.addState(s)
@ -200,20 +127,15 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
// Delay the assignment of loop back and end states until we know all the state
// instances have been initialized
for j := 0; j < len(loopBackStateNumbers); j++ {
pair := loopBackStateNumbers[j]
for _, pair := range loopBackStateNumbers {
pair.item0.loopBackState = atn.states[pair.item1]
}
for j := 0; j < len(endStateNumbers); j++ {
pair := endStateNumbers[j]
for _, pair := range endStateNumbers {
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
}
numNonGreedyStates := a.readInt()
for j := 0; j < numNonGreedyStates; j++ {
stateNumber := a.readInt()
@ -221,7 +143,6 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
}
numPrecedenceStates := a.readInt()
for j := 0; j < numPrecedenceStates; j++ {
stateNumber := a.readInt()
@ -233,12 +154,12 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
nrules := a.readInt()
if atn.grammarType == ATNTypeLexer {
atn.ruleToTokenType = make([]int, nrules) // TODO: initIntArray(nrules, 0)
atn.ruleToTokenType = make([]int, nrules)
}
atn.ruleToStartState = make([]*RuleStartState, nrules) // TODO: initIntArray(nrules, 0)
atn.ruleToStartState = make([]*RuleStartState, nrules)
for i := 0; i < nrules; i++ {
for i := range atn.ruleToStartState {
s := a.readInt()
startState := atn.states[s].(*RuleStartState)
@ -247,19 +168,13 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
tokenType := a.readInt()
if tokenType == 0xFFFF {
tokenType = TokenEOF
}
atn.ruleToTokenType[i] = tokenType
}
}
atn.ruleToStopState = make([]*RuleStopState, nrules) //initIntArray(nrules, 0)
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
atn.ruleToStopState = make([]*RuleStopState, nrules)
for _, state := range atn.states {
if s2, ok := state.(*RuleStopState); ok {
atn.ruleToStopState[s2.ruleIndex] = s2
atn.ruleToStartState[s2.ruleIndex].stopState = s2
@ -269,17 +184,25 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
func (a *ATNDeserializer) readModes(atn *ATN) {
nmodes := a.readInt()
atn.modeToStartState = make([]*TokensStartState, nmodes)
for i := 0; i < nmodes; i++ {
for i := range atn.modeToStartState {
s := a.readInt()
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
atn.modeToStartState[i] = atn.states[s].(*TokensStartState)
}
}
func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet, readUnicode func() int) []*IntervalSet {
func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet) []*IntervalSet {
m := a.readInt()
// Preallocate the needed capacity.
if cap(sets)-len(sets) < m {
isets := make([]*IntervalSet, len(sets), len(sets)+m)
copy(isets, sets)
sets = isets
}
for i := 0; i < m; i++ {
iset := NewIntervalSet()
@ -293,8 +216,8 @@ func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet, readUnicode fu
}
for j := 0; j < n; j++ {
i1 := readUnicode()
i2 := readUnicode()
i1 := a.readInt()
i2 := a.readInt()
iset.addRange(i1, i2)
}
@ -322,11 +245,9 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
// Edges for rule stop states can be derived, so they are not serialized
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
for j := 0; j < len(state.GetTransitions()); j++ {
var t, ok = state.GetTransitions()[j].(*RuleTransition)
for _, state := range atn.states {
for _, t := range state.GetTransitions() {
var rt, ok = t.(*RuleTransition)
if !ok {
continue
@ -334,48 +255,42 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
outermostPrecedenceReturn := -1
if atn.ruleToStartState[t.getTarget().GetRuleIndex()].isPrecedenceRule {
if t.precedence == 0 {
outermostPrecedenceReturn = t.getTarget().GetRuleIndex()
if atn.ruleToStartState[rt.getTarget().GetRuleIndex()].isPrecedenceRule {
if rt.precedence == 0 {
outermostPrecedenceReturn = rt.getTarget().GetRuleIndex()
}
}
trans := NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
trans := NewEpsilonTransition(rt.followState, outermostPrecedenceReturn)
atn.ruleToStopState[t.getTarget().GetRuleIndex()].AddTransition(trans, -1)
atn.ruleToStopState[rt.getTarget().GetRuleIndex()].AddTransition(trans, -1)
}
}
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
if s2, ok := state.(*BaseBlockStartState); ok {
for _, state := range atn.states {
if s2, ok := state.(BlockStartState); ok {
// We need to know the end state to set its start state
if s2.endState == nil {
if s2.getEndState() == nil {
panic("IllegalState")
}
// Block end states can only be associated to a single block start state
if s2.endState.startState != nil {
if s2.getEndState().startState != nil {
panic("IllegalState")
}
s2.endState.startState = state
s2.getEndState().startState = state
}
if s2, ok := state.(*PlusLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
target := s2.GetTransitions()[j].getTarget()
if t2, ok := target.(*PlusBlockStartState); ok {
for _, t := range s2.GetTransitions() {
if t2, ok := t.getTarget().(*PlusBlockStartState); ok {
t2.loopBackState = state
}
}
} else if s2, ok := state.(*StarLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
target := s2.GetTransitions()[j].getTarget()
if t2, ok := target.(*StarLoopEntryState); ok {
for _, t := range s2.GetTransitions() {
if t2, ok := t.getTarget().(*StarLoopEntryState); ok {
t2.loopBackState = state
}
}
@ -399,25 +314,13 @@ func (a *ATNDeserializer) readLexerActions(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
count := a.readInt()
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
atn.lexerActions = make([]LexerAction, count)
for i := 0; i < count; i++ {
for i := range atn.lexerActions {
actionType := a.readInt()
data1 := a.readInt()
if data1 == 0xFFFF {
data1 = -1
}
data2 := a.readInt()
if data2 == 0xFFFF {
data2 = -1
}
lexerAction := a.lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
atn.lexerActions[i] = a.lexerActionFactory(actionType, data1, data2)
}
}
}
@ -565,14 +468,12 @@ func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
}
func (a *ATNDeserializer) verifyATN(atn *ATN) {
if !a.deserializationOptions.verifyATN {
if !a.options.VerifyATN() {
return
}
// Verify assumptions
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
for _, state := range atn.states {
if state == nil {
continue
}
@ -587,18 +488,18 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
a.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(len(s2.GetTransitions()) == 2, "")
switch s2 := state.(type) {
switch s2.transitions[0].getTarget().(type) {
case *StarBlockStartState:
var _, ok2 = s2.GetTransitions()[1].getTarget().(*LoopEndState)
_, ok := s2.transitions[1].getTarget().(*LoopEndState)
a.checkCondition(ok2, "")
a.checkCondition(ok, "")
a.checkCondition(!s2.nonGreedy, "")
case *LoopEndState:
var s3, ok2 = s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
var _, ok = s2.transitions[1].getTarget().(*StarBlockStartState)
a.checkCondition(ok2, "")
a.checkCondition(s3.nonGreedy, "")
a.checkCondition(ok, "")
a.checkCondition(s2.nonGreedy, "")
default:
panic("IllegalState")
@ -607,9 +508,9 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
case *StarLoopbackState:
a.checkCondition(len(state.GetTransitions()) == 1, "")
var _, ok2 = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
var _, ok = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
a.checkCondition(ok2, "")
a.checkCondition(ok, "")
case *LoopEndState:
a.checkCondition(s2.loopBackState != nil, "")
@ -617,8 +518,8 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
case *RuleStartState:
a.checkCondition(s2.stopState != nil, "")
case *BaseBlockStartState:
a.checkCondition(s2.endState != nil, "")
case BlockStartState:
a.checkCondition(s2.getEndState() != nil, "")
case *BlockEndState:
a.checkCondition(s2.startState != nil, "")
@ -649,53 +550,7 @@ func (a *ATNDeserializer) readInt() int {
a.pos++
return int(v)
}
func (a *ATNDeserializer) readInt32() int {
var low = a.readInt()
var high = a.readInt()
return low | (high << 16)
}
//TODO
//func (a *ATNDeserializer) readLong() int64 {
// panic("Not implemented")
// var low = a.readInt32()
// var high = a.readInt32()
// return (low & 0x00000000FFFFFFFF) | (high << int32)
//}
func createByteToHex() []string {
bth := make([]string, 256)
for i := 0; i < 256; i++ {
bth[i] = strings.ToUpper(hex.EncodeToString([]byte{byte(i)}))
}
return bth
}
var byteToHex = createByteToHex()
func (a *ATNDeserializer) readUUID() string {
bb := make([]int, 16)
for i := 7; i >= 0; i-- {
integer := a.readInt()
bb[(2*i)+1] = integer & 0xFF
bb[2*i] = (integer >> 8) & 0xFF
}
return byteToHex[bb[0]] + byteToHex[bb[1]] +
byteToHex[bb[2]] + byteToHex[bb[3]] + "-" +
byteToHex[bb[4]] + byteToHex[bb[5]] + "-" +
byteToHex[bb[6]] + byteToHex[bb[7]] + "-" +
byteToHex[bb[8]] + byteToHex[bb[9]] + "-" +
byteToHex[bb[10]] + byteToHex[bb[11]] +
byteToHex[bb[12]] + byteToHex[bb[13]] +
byteToHex[bb[14]] + byteToHex[bb[15]]
return int(v) // data is 32 bits but int is at least that big
}
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {

View File

@ -243,6 +243,8 @@ func NewBasicBlockStartState() *BasicBlockStartState {
return &BasicBlockStartState{BaseBlockStartState: b}
}
var _ BlockStartState = &BasicBlockStartState{}
// BlockEndState is a terminal node of a simple (a|b|c) block.
type BlockEndState struct {
*BaseATNState
@ -318,6 +320,8 @@ func NewPlusBlockStartState() *PlusBlockStartState {
return &PlusBlockStartState{BaseBlockStartState: b}
}
var _ BlockStartState = &PlusBlockStartState{}
// StarBlockStartState is the block that begins a closure loop.
type StarBlockStartState struct {
*BaseBlockStartState
@ -331,6 +335,8 @@ func NewStarBlockStartState() *StarBlockStartState {
return &StarBlockStartState{BaseBlockStartState: b}
}
var _ BlockStartState = &StarBlockStartState{}
type StarLoopbackState struct {
*BaseATNState
}

View File

@ -6,7 +6,6 @@ package antlr
import (
"sort"
"sync"
)
type DFA struct {
@ -18,23 +17,27 @@ type DFA struct {
// states is all the DFA states. Use Map to get the old state back; Set can only
// indicate whether it is there.
states map[int]*DFAState
statesMu sync.RWMutex
s0 *DFAState
s0Mu sync.RWMutex
// precedenceDfa is the backing field for isPrecedenceDfa and setPrecedenceDfa.
// True if the DFA is for a precedence decision and false otherwise.
precedenceDfa bool
precedenceDfaMu sync.RWMutex
}
func NewDFA(atnStartState DecisionState, decision int) *DFA {
return &DFA{
dfa := &DFA{
atnStartState: atnStartState,
decision: decision,
states: make(map[int]*DFAState),
}
if s, ok := atnStartState.(*StarLoopEntryState); ok && s.precedenceRuleDecision {
dfa.precedenceDfa = true
dfa.s0 = NewDFAState(-1, NewBaseATNConfigSet(false))
dfa.s0.isAcceptState = false
dfa.s0.requiresFullContext = false
}
return dfa
}
// getPrecedenceStartState gets the start state for the current precedence and
@ -79,8 +82,6 @@ func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
}
func (d *DFA) getPrecedenceDfa() bool {
d.precedenceDfaMu.RLock()
defer d.precedenceDfaMu.RUnlock()
return d.precedenceDfa
}
@ -104,46 +105,32 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
d.setS0(nil)
}
d.precedenceDfaMu.Lock()
defer d.precedenceDfaMu.Unlock()
d.precedenceDfa = precedenceDfa
}
}
func (d *DFA) getS0() *DFAState {
d.s0Mu.RLock()
defer d.s0Mu.RUnlock()
return d.s0
}
func (d *DFA) setS0(s *DFAState) {
d.s0Mu.Lock()
defer d.s0Mu.Unlock()
d.s0 = s
}
func (d *DFA) getState(hash int) (*DFAState, bool) {
d.statesMu.RLock()
defer d.statesMu.RUnlock()
s, ok := d.states[hash]
return s, ok
}
func (d *DFA) setStates(states map[int]*DFAState) {
d.statesMu.Lock()
defer d.statesMu.Unlock()
d.states = states
}
func (d *DFA) setState(hash int, state *DFAState) {
d.statesMu.Lock()
defer d.statesMu.Unlock()
d.states[hash] = state
}
func (d *DFA) numStates() int {
d.statesMu.RLock()
defer d.statesMu.RUnlock()
return len(d.states)
}

View File

@ -6,7 +6,6 @@ package antlr
import (
"fmt"
"sync"
)
// PredPrediction maps a predicate to a predicted alternative.
@ -51,7 +50,6 @@ type DFAState struct {
// edges elements point to the target of the symbol. Shift up by 1 so (-1)
// Token.EOF maps to the first element.
edges []*DFAState
edgesMu sync.RWMutex
isAcceptState bool
@ -93,7 +91,7 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
// GetAltSet gets the set of all alts mentioned by all ATN configurations in d.
func (d *DFAState) GetAltSet() Set {
alts := NewArray2DHashSet(nil, nil)
alts := newArray2DHashSet(nil, nil)
if d.configs != nil {
for _, c := range d.configs.GetItems() {
@ -109,32 +107,22 @@ func (d *DFAState) GetAltSet() Set {
}
func (d *DFAState) getEdges() []*DFAState {
d.edgesMu.RLock()
defer d.edgesMu.RUnlock()
return d.edges
}
func (d *DFAState) numEdges() int {
d.edgesMu.RLock()
defer d.edgesMu.RUnlock()
return len(d.edges)
}
func (d *DFAState) getIthEdge(i int) *DFAState {
d.edgesMu.RLock()
defer d.edgesMu.RUnlock()
return d.edges[i]
}
func (d *DFAState) setEdges(newEdges []*DFAState) {
d.edgesMu.Lock()
defer d.edgesMu.Unlock()
d.edges = newEdges
}
func (d *DFAState) setIthEdge(i int, edge *DFAState) {
d.edgesMu.Lock()
defer d.edgesMu.Unlock()
d.edges[i] = edge
}

View File

@ -16,7 +16,7 @@ type ErrorStrategy interface {
RecoverInline(Parser) Token
Recover(Parser, RecognitionException)
Sync(Parser)
inErrorRecoveryMode(Parser) bool
InErrorRecoveryMode(Parser) bool
ReportError(Parser, RecognitionException)
ReportMatch(Parser)
}
@ -40,7 +40,7 @@ func NewDefaultErrorStrategy() *DefaultErrorStrategy {
// error". This is used to suppress Reporting multiple error messages while
// attempting to recover from a detected syntax error.
//
// @see //inErrorRecoveryMode
// @see //InErrorRecoveryMode
//
d.errorRecoveryMode = false
@ -71,7 +71,7 @@ func (d *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) {
d.errorRecoveryMode = true
}
func (d *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
func (d *DefaultErrorStrategy) InErrorRecoveryMode(recognizer Parser) bool {
return d.errorRecoveryMode
}
@ -118,7 +118,7 @@ func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
// if we've already Reported an error and have not Matched a token
// yet successfully, don't Report any errors.
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return // don't Report spurious errors
}
d.beginErrorCondition(recognizer)
@ -209,7 +209,7 @@ func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException
//
func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
// If already recovering, don't try to Sync
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return
}
@ -312,7 +312,7 @@ func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *Faile
// @param recognizer the parser instance
//
func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return
}
d.beginErrorCondition(recognizer)
@ -341,7 +341,7 @@ func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
// @param recognizer the parser instance
//
func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return
}
d.beginErrorCondition(recognizer)
@ -738,7 +738,11 @@ func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
context := recognizer.GetParserRuleContext()
for context != nil {
context.SetException(e)
context = context.GetParent().(ParserRuleContext)
if parent, ok := context.GetParent().(ParserRuleContext); ok {
context = parent
} else {
context = nil
}
}
panic(NewParseCancellationException()) // TODO we don't emit e properly
}

View File

@ -91,11 +91,16 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
dfa := l.decisionToDFA[mode]
if dfa.getS0() == nil {
var s0 *DFAState
l.atn.stateMu.RLock()
s0 = dfa.getS0()
l.atn.stateMu.RUnlock()
if s0 == nil {
return l.MatchATN(input)
}
return l.execATN(input, dfa.getS0())
return l.execATN(input, s0)
}
func (l *LexerATNSimulator) reset() {
@ -117,11 +122,7 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
suppressEdge := s0Closure.hasSemanticContext
s0Closure.hasSemanticContext = false
next := l.addDFAState(s0Closure)
if !suppressEdge {
l.decisionToDFA[l.mode].setS0(next)
}
next := l.addDFAState(s0Closure, suppressEdge)
predict := l.execATN(input, next)
@ -203,10 +204,15 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// {@code t}, or {@code nil} if the target state for l edge is not
// already cached
func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.getEdges() == nil || t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
if t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
return nil
}
l.atn.edgeMu.RLock()
defer l.atn.edgeMu.RUnlock()
if s.getEdges() == nil {
return nil
}
target := s.getIthEdge(t - LexerATNSimulatorMinDFAEdge)
if LexerATNSimulatorDebug && target != nil {
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
@ -537,7 +543,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
suppressEdge := cfgs.HasSemanticContext()
cfgs.SetHasSemanticContext(false)
to = l.addDFAState(cfgs)
to = l.addDFAState(cfgs, true)
if suppressEdge {
return to
@ -551,6 +557,8 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
}
l.atn.edgeMu.Lock()
defer l.atn.edgeMu.Unlock()
if from.getEdges() == nil {
// make room for tokens 1..n and -1 masquerading as index 0
from.setEdges(make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1))
@ -564,7 +572,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
// configurations already. This method also detects the first
// configuration containing an ATN rule stop state. Later, when
// traversing the DFA, we will know which rule to accept.
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet, suppressEdge bool) *DFAState {
proposed := NewDFAState(-1, configs)
var firstConfigWithRuleStopState ATNConfig
@ -585,16 +593,22 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
}
hash := proposed.hash()
dfa := l.decisionToDFA[l.mode]
l.atn.stateMu.Lock()
defer l.atn.stateMu.Unlock()
existing, ok := dfa.getState(hash)
if ok {
return existing
}
newState := proposed
newState.stateNumber = dfa.numStates()
proposed = existing
} else {
proposed.stateNumber = dfa.numStates()
configs.SetReadOnly(true)
newState.configs = configs
dfa.setState(hash, newState)
return newState
proposed.configs = configs
dfa.setState(hash, proposed)
}
if !suppressEdge {
dfa.setS0(proposed)
}
return proposed
}
func (l *LexerATNSimulator) getDFA(mode int) *DFA {

View File

@ -38,7 +38,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
look := make([]*IntervalSet, count)
for alt := 0; alt < count; alt++ {
look[alt] = NewIntervalSet()
lookBusy := NewArray2DHashSet(nil, nil)
lookBusy := newArray2DHashSet(nil, nil)
seeThruPreds := false // fail to get lookahead upon pred
la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
@ -75,7 +75,7 @@ func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet
if ctx != nil {
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
la.look1(s, stopState, lookContext, r, NewArray2DHashSet(nil, nil), NewBitSet(), seeThruPreds, true)
la.look1(s, stopState, lookContext, r, newArray2DHashSet(nil, nil), NewBitSet(), seeThruPreds, true)
return r
}

View File

@ -425,7 +425,7 @@ func (p *BaseParser) Consume() Token {
}
hasListener := p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener {
if p.errHandler.inErrorRecoveryMode(p) {
if p.errHandler.InErrorRecoveryMode(p) {
node := p.ctx.AddErrorNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {

View File

@ -96,14 +96,18 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
// Now we are certain to have a specific decision's DFA
// But, do we still need an initial state?
var s0 *DFAState
p.atn.stateMu.RLock()
if dfa.getPrecedenceDfa() {
p.atn.edgeMu.RLock()
// the start state for a precedence DFA depends on the current
// parser precedence, and is provided by a DFA method.
s0 = dfa.getPrecedenceStartState(p.parser.GetPrecedence())
p.atn.edgeMu.RUnlock()
} else {
// the start state for a "regular" DFA is just s0
s0 = dfa.getS0()
}
p.atn.stateMu.RUnlock()
if s0 == nil {
if outerContext == nil {
@ -114,21 +118,10 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
" exec LA(1)==" + p.getLookaheadName(input) +
", outerContext=" + outerContext.String(p.parser.GetRuleNames(), nil))
}
// If p is not a precedence DFA, we check the ATN start state
// to determine if p ATN start state is the decision for the
// closure block that determines whether a precedence rule
// should continue or complete.
t2 := dfa.atnStartState
t, ok := t2.(*StarLoopEntryState)
if !dfa.getPrecedenceDfa() && ok {
if t.precedenceRuleDecision {
dfa.setPrecedenceDfa(true)
}
}
fullCtx := false
s0Closure := p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
p.atn.stateMu.Lock()
if dfa.getPrecedenceDfa() {
// If p is a precedence DFA, we use applyPrecedenceFilter
// to convert the computed start state to a precedence start
@ -139,12 +132,16 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
dfa.s0.configs = s0Closure
s0Closure = p.applyPrecedenceFilter(s0Closure)
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
p.atn.edgeMu.Lock()
dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
p.atn.edgeMu.Unlock()
} else {
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.setS0(s0)
}
p.atn.stateMu.Unlock()
}
alt := p.execATN(dfa, s0, input, index, outerContext)
if ParserATNSimulatorDebug {
fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil))
@ -295,11 +292,16 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
// already cached
func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) *DFAState {
edges := previousD.getEdges()
if edges == nil || t+1 < 0 || t+1 >= len(edges) {
if t+1 < 0 {
return nil
}
p.atn.edgeMu.RLock()
defer p.atn.edgeMu.RUnlock()
edges := previousD.getEdges()
if edges == nil || t+1 >= len(edges) {
return nil
}
return previousD.getIthEdge(t + 1)
}
@ -568,7 +570,7 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt
//
if reach == nil {
reach = NewBaseATNConfigSet(fullCtx)
closureBusy := NewArray2DHashSet(nil, nil)
closureBusy := newArray2DHashSet(nil, nil)
treatEOFAsEpsilon := t == TokenEOF
amount := len(intermediate.configs)
for k := 0; k < amount; k++ {
@ -663,7 +665,7 @@ func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, full
for i := 0; i < len(a.GetTransitions()); i++ {
target := a.GetTransitions()[i].getTarget()
c := NewBaseATNConfig6(target, i+1, initialContext)
closureBusy := NewArray2DHashSet(nil, nil)
closureBusy := newArray2DHashSet(nil, nil)
p.closure(c, configs, closureBusy, true, fullCtx, false)
}
return configs
@ -1446,14 +1448,18 @@ func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFA
if to == nil {
return nil
}
p.atn.stateMu.Lock()
to = p.addDFAState(dfa, to) // used existing if possible not incoming
p.atn.stateMu.Unlock()
if from == nil || t < -1 || t > p.atn.maxTokenType {
return to
}
p.atn.edgeMu.Lock()
if from.getEdges() == nil {
from.setEdges(make([]*DFAState, p.atn.maxTokenType+1+1))
}
from.setIthEdge(t+1, to) // connect
p.atn.edgeMu.Unlock()
if ParserATNSimulatorDebug {
var names []string

View File

@ -49,7 +49,7 @@ var tokenTypeMapCache = make(map[string]int)
var ruleIndexMapCache = make(map[string]int)
func (b *BaseRecognizer) checkVersion(toolVersion string) {
runtimeVersion := "4.9.3"
runtimeVersion := "4.10.1"
if runtimeVersion != toolVersion {
fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
}

View File

@ -193,7 +193,7 @@ type AND struct {
func NewAND(a, b SemanticContext) *AND {
operands := NewArray2DHashSet(nil, nil)
operands := newArray2DHashSet(nil, nil)
if aa, ok := a.(*AND); ok {
for _, o := range aa.opnds {
operands.Add(o)
@ -345,7 +345,7 @@ type OR struct {
func NewOR(a, b SemanticContext) *OR {
operands := NewArray2DHashSet(nil, nil)
operands := newArray2DHashSet(nil, nil)
if aa, ok := a.(*OR); ok {
for _, o := range aa.opnds {
operands.Add(o)

View File

@ -64,7 +64,7 @@ type BaseParseTreeVisitor struct{}
var _ ParseTreeVisitor = &BaseParseTreeVisitor{}
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return nil }
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return tree.Accept(v) }
func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitTerminal(node TerminalNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil }

View File

@ -121,13 +121,18 @@ func (b *BitSet) clear(index int) {
}
func (b *BitSet) or(set *BitSet) {
size := intMax(b.minLen(), set.minLen())
if size > len(b.data) {
data := make([]uint64, size)
// Get min size necessary to represent the bits in both sets.
bLen := b.minLen()
setLen := set.minLen()
maxLen := intMax(bLen, setLen)
if maxLen > len(b.data) {
// Increase the size of len(b.data) to repesent the bits in both sets.
data := make([]uint64, maxLen)
copy(data, b.data)
b.data = data
}
for i := 0; i < size; i++ {
// len(b.data) is at least setLen.
for i := 0; i < setLen; i++ {
b.data[i] |= set.data[i]
}
}
@ -164,12 +169,18 @@ func (b *BitSet) equals(other interface{}) bool {
return true
}
if len(b.data) != len(otherBitSet.data) {
// We only compare set bits, so we cannot rely on the two slices having the same size. Its
// possible for two BitSets to have different slice lengths but the same set bits. So we only
// compare the relavent words and ignore the trailing zeros.
bLen := b.minLen()
otherLen := otherBitSet.minLen()
if bLen != otherLen {
return false
}
for k := range b.data {
if b.data[k] != otherBitSet.data[k] {
for i := 0; i < bLen; i++ {
if b.data[i] != otherBitSet.data[i] {
return false
}
}

View File

@ -8,7 +8,7 @@ const (
_loadFactor = 0.75
)
var _ Set = (*Array2DHashSet)(nil)
var _ Set = (*array2DHashSet)(nil)
type Set interface {
Add(value interface{}) (added interface{})
@ -19,7 +19,7 @@ type Set interface {
Each(f func(interface{}) bool)
}
type Array2DHashSet struct {
type array2DHashSet struct {
buckets [][]interface{}
hashcodeFunction func(interface{}) int
equalsFunction func(interface{}, interface{}) bool
@ -31,7 +31,7 @@ type Array2DHashSet struct {
initialBucketCapacity int
}
func (as *Array2DHashSet) Each(f func(interface{}) bool) {
func (as *array2DHashSet) Each(f func(interface{}) bool) {
if as.Len() < 1 {
return
}
@ -48,7 +48,7 @@ func (as *Array2DHashSet) Each(f func(interface{}) bool) {
}
}
func (as *Array2DHashSet) Values() []interface{} {
func (as *array2DHashSet) Values() []interface{} {
if as.Len() < 1 {
return nil
}
@ -61,18 +61,18 @@ func (as *Array2DHashSet) Values() []interface{} {
return values
}
func (as *Array2DHashSet) Contains(value interface{}) bool {
func (as *array2DHashSet) Contains(value interface{}) bool {
return as.Get(value) != nil
}
func (as *Array2DHashSet) Add(value interface{}) interface{} {
func (as *array2DHashSet) Add(value interface{}) interface{} {
if as.n > as.threshold {
as.expand()
}
return as.innerAdd(value)
}
func (as *Array2DHashSet) expand() {
func (as *array2DHashSet) expand() {
old := as.buckets
as.currentPrime += 4
@ -120,11 +120,11 @@ func (as *Array2DHashSet) expand() {
}
}
func (as *Array2DHashSet) Len() int {
func (as *array2DHashSet) Len() int {
return as.n
}
func (as *Array2DHashSet) Get(o interface{}) interface{} {
func (as *array2DHashSet) Get(o interface{}) interface{} {
if o == nil {
return nil
}
@ -147,7 +147,7 @@ func (as *Array2DHashSet) Get(o interface{}) interface{} {
return nil
}
func (as *Array2DHashSet) innerAdd(o interface{}) interface{} {
func (as *array2DHashSet) innerAdd(o interface{}) interface{} {
b := as.getBuckets(o)
bucket := as.buckets[b]
@ -187,25 +187,25 @@ func (as *Array2DHashSet) innerAdd(o interface{}) interface{} {
return o
}
func (as *Array2DHashSet) getBuckets(value interface{}) int {
func (as *array2DHashSet) getBuckets(value interface{}) int {
hash := as.hashcodeFunction(value)
return hash & (len(as.buckets) - 1)
}
func (as *Array2DHashSet) createBuckets(cap int) [][]interface{} {
func (as *array2DHashSet) createBuckets(cap int) [][]interface{} {
return make([][]interface{}, cap)
}
func (as *Array2DHashSet) createBucket(cap int) []interface{} {
func (as *array2DHashSet) createBucket(cap int) []interface{} {
return make([]interface{}, cap)
}
func NewArray2DHashSetWithCap(
func newArray2DHashSetWithCap(
hashcodeFunction func(interface{}) int,
equalsFunction func(interface{}, interface{}) bool,
initCap int,
initBucketCap int,
) *Array2DHashSet {
) *array2DHashSet {
if hashcodeFunction == nil {
hashcodeFunction = standardHashFunction
}
@ -214,7 +214,7 @@ func NewArray2DHashSetWithCap(
equalsFunction = standardEqualsFunction
}
ret := &Array2DHashSet{
ret := &array2DHashSet{
hashcodeFunction: hashcodeFunction,
equalsFunction: equalsFunction,
@ -229,9 +229,9 @@ func NewArray2DHashSetWithCap(
return ret
}
func NewArray2DHashSet(
func newArray2DHashSet(
hashcodeFunction func(interface{}) int,
equalsFunction func(interface{}, interface{}) bool,
) *Array2DHashSet {
return NewArray2DHashSetWithCap(hashcodeFunction, equalsFunction, _initalCapacity, _initalBucketCapacity)
) *array2DHashSet {
return newArray2DHashSetWithCap(hashcodeFunction, equalsFunction, _initalCapacity, _initalBucketCapacity)
}

View File

@ -23,7 +23,7 @@ res, err := expression.Evaluate(event)
To regenerate the parser, make sure you have [ANTLR4 installed](https://github.com/antlr/antlr4/blob/master/doc/getting-started.md) and then run:
```shell
antlr4 -Dlanguage=Go -package gen -o gen -visitor -no-listener CESQLParser.g4
antlr4 -v 4.10.1 -Dlanguage=Go -package gen -o gen -visitor -no-listener CESQLParser.g4
```
Then you need to run this sed command as a workaround until this ANTLR [issue](https://github.com/antlr/antlr4/issues/2433) is resolved. Without this, building for 32bit platforms will throw an int overflow error:

View File

@ -84,4 +84,4 @@ setExpression
atn:
[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 35, 112, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 41, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 57, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 63, 10, 3, 3, 3, 3, 3, 7, 3, 67, 10, 3, 12, 3, 14, 3, 70, 11, 3, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 76, 10, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 7, 10, 92, 10, 10, 12, 10, 14, 10, 95, 11, 10, 5, 10, 97, 10, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 11, 7, 11, 105, 10, 11, 12, 11, 14, 11, 108, 11, 11, 3, 11, 3, 11, 3, 11, 2, 3, 4, 12, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 2, 10, 3, 2, 13, 15, 3, 2, 16, 17, 3, 2, 18, 24, 3, 2, 9, 11, 3, 2, 33, 34, 4, 2, 33, 33, 35, 35, 3, 2, 28, 29, 3, 2, 30, 31, 2, 120, 2, 22, 3, 2, 2, 2, 4, 40, 3, 2, 2, 2, 6, 75, 3, 2, 2, 2, 8, 77, 3, 2, 2, 2, 10, 79, 3, 2, 2, 2, 12, 81, 3, 2, 2, 2, 14, 83, 3, 2, 2, 2, 16, 85, 3, 2, 2, 2, 18, 87, 3, 2, 2, 2, 20, 100, 3, 2, 2, 2, 22, 23, 5, 4, 3, 2, 23, 24, 7, 2, 2, 3, 24, 3, 3, 2, 2, 2, 25, 26, 8, 3, 1, 2, 26, 27, 5, 10, 6, 2, 27, 28, 5, 18, 10, 2, 28, 41, 3, 2, 2, 2, 29, 30, 7, 12, 2, 2, 30, 41, 5, 4, 3, 13, 31, 32, 7, 17, 2, 2, 32, 41, 5, 4, 3, 12, 33, 34, 7, 26, 2, 2, 34, 41, 5, 8, 5, 2, 35, 36, 7, 4, 2, 2, 36, 37, 5, 4, 3, 2, 37, 38, 7, 5, 2, 2, 38, 41, 3, 2, 2, 2, 39, 41, 5, 6, 4, 2, 40, 25, 3, 2, 2, 2, 40, 29, 3, 2, 2, 2, 40, 31, 3, 2, 2, 2, 40, 33, 3, 2, 2, 2, 40, 35, 3, 2, 2, 2, 40, 39, 3, 2, 2, 2, 41, 68, 3, 2, 2, 2, 42, 43, 12, 8, 2, 2, 43, 44, 9, 2, 2, 2, 44, 67, 5, 4, 3, 9, 45, 46, 12, 7, 2, 2, 46, 47, 9, 3, 2, 2, 47, 67, 5, 4, 3, 8, 48, 49, 12, 6, 2, 2, 49, 50, 9, 4, 2, 2, 50, 67, 5, 4, 3, 7, 51, 52, 12, 5, 2, 2, 52, 53, 9, 5, 2, 2, 53, 67, 5, 4, 3, 5, 54, 56, 12, 11, 2, 2, 55, 57, 7, 12, 2, 2, 56, 55, 3, 2, 2, 2, 56, 57, 3, 2, 2, 2, 57, 58, 3, 2, 2, 2, 58, 59, 7, 25, 2, 2, 59, 67, 5, 14, 8, 2, 60, 62, 12, 9, 2, 2, 61, 63, 7, 12, 2, 2, 62, 61, 3, 2, 2, 2, 62, 63, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 64, 65, 7, 27, 2, 2, 65, 67, 5, 20, 11, 2, 66, 42, 3, 2, 2, 2, 66, 45, 3, 2, 2, 2, 66, 48, 3, 2, 2, 2, 66, 51, 3, 2, 2, 2, 66, 54, 3, 2, 2, 2, 66, 60, 3, 2, 2, 2, 67, 70, 3, 2, 2, 2, 68, 66, 3, 2, 2, 2, 68, 69, 3, 2, 2, 2, 69, 5, 3, 2, 2, 2, 70, 68, 3, 2, 2, 2, 71, 76, 5, 12, 7, 2, 72, 76, 5, 16, 9, 2, 73, 76, 5, 14, 8, 2, 74, 76, 5, 8, 5, 2, 75, 71, 3, 2, 2, 2, 75, 72, 3, 2, 2, 2, 75, 73, 3, 2, 2, 2, 75, 74, 3, 2, 2, 2, 76, 7, 3, 2, 2, 2, 77, 78, 9, 6, 2, 2, 78, 9, 3, 2, 2, 2, 79, 80, 9, 7, 2, 2, 80, 11, 3, 2, 2, 2, 81, 82, 9, 8, 2, 2, 82, 13, 3, 2, 2, 2, 83, 84, 9, 9, 2, 2, 84, 15, 3, 2, 2, 2, 85, 86, 7, 32, 2, 2, 86, 17, 3, 2, 2, 2, 87, 96, 7, 4, 2, 2, 88, 93, 5, 4, 3, 2, 89, 90, 7, 6, 2, 2, 90, 92, 5, 4, 3, 2, 91, 89, 3, 2, 2, 2, 92, 95, 3, 2, 2, 2, 93, 91, 3, 2, 2, 2, 93, 94, 3, 2, 2, 2, 94, 97, 3, 2, 2, 2, 95, 93, 3, 2, 2, 2, 96, 88, 3, 2, 2, 2, 96, 97, 3, 2, 2, 2, 97, 98, 3, 2, 2, 2, 98, 99, 7, 5, 2, 2, 99, 19, 3, 2, 2, 2, 100, 101, 7, 4, 2, 2, 101, 106, 5, 4, 3, 2, 102, 103, 7, 6, 2, 2, 103, 105, 5, 4, 3, 2, 104, 102, 3, 2, 2, 2, 105, 108, 3, 2, 2, 2, 106, 104, 3, 2, 2, 2, 106, 107, 3, 2, 2, 2, 107, 109, 3, 2, 2, 2, 108, 106, 3, 2, 2, 2, 109, 110, 7, 5, 2, 2, 110, 21, 3, 2, 2, 2, 11, 40, 56, 62, 66, 68, 75, 93, 96, 106]
[4, 1, 33, 110, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 39, 8, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 55, 8, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 61, 8, 1, 1, 1, 1, 1, 5, 1, 65, 8, 1, 10, 1, 12, 1, 68, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 74, 8, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 90, 8, 8, 10, 8, 12, 8, 93, 9, 8, 3, 8, 95, 8, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 103, 8, 9, 10, 9, 12, 9, 106, 9, 9, 1, 9, 1, 9, 1, 9, 0, 1, 2, 10, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 0, 8, 1, 0, 11, 13, 1, 0, 14, 15, 1, 0, 16, 22, 1, 0, 7, 9, 1, 0, 31, 32, 2, 0, 31, 31, 33, 33, 1, 0, 26, 27, 1, 0, 28, 29, 118, 0, 20, 1, 0, 0, 0, 2, 38, 1, 0, 0, 0, 4, 73, 1, 0, 0, 0, 6, 75, 1, 0, 0, 0, 8, 77, 1, 0, 0, 0, 10, 79, 1, 0, 0, 0, 12, 81, 1, 0, 0, 0, 14, 83, 1, 0, 0, 0, 16, 85, 1, 0, 0, 0, 18, 98, 1, 0, 0, 0, 20, 21, 3, 2, 1, 0, 21, 22, 5, 0, 0, 1, 22, 1, 1, 0, 0, 0, 23, 24, 6, 1, -1, 0, 24, 25, 3, 8, 4, 0, 25, 26, 3, 16, 8, 0, 26, 39, 1, 0, 0, 0, 27, 28, 5, 10, 0, 0, 28, 39, 3, 2, 1, 11, 29, 30, 5, 15, 0, 0, 30, 39, 3, 2, 1, 10, 31, 32, 5, 24, 0, 0, 32, 39, 3, 6, 3, 0, 33, 34, 5, 2, 0, 0, 34, 35, 3, 2, 1, 0, 35, 36, 5, 3, 0, 0, 36, 39, 1, 0, 0, 0, 37, 39, 3, 4, 2, 0, 38, 23, 1, 0, 0, 0, 38, 27, 1, 0, 0, 0, 38, 29, 1, 0, 0, 0, 38, 31, 1, 0, 0, 0, 38, 33, 1, 0, 0, 0, 38, 37, 1, 0, 0, 0, 39, 66, 1, 0, 0, 0, 40, 41, 10, 6, 0, 0, 41, 42, 7, 0, 0, 0, 42, 65, 3, 2, 1, 7, 43, 44, 10, 5, 0, 0, 44, 45, 7, 1, 0, 0, 45, 65, 3, 2, 1, 6, 46, 47, 10, 4, 0, 0, 47, 48, 7, 2, 0, 0, 48, 65, 3, 2, 1, 5, 49, 50, 10, 3, 0, 0, 50, 51, 7, 3, 0, 0, 51, 65, 3, 2, 1, 3, 52, 54, 10, 9, 0, 0, 53, 55, 5, 10, 0, 0, 54, 53, 1, 0, 0, 0, 54, 55, 1, 0, 0, 0, 55, 56, 1, 0, 0, 0, 56, 57, 5, 23, 0, 0, 57, 65, 3, 12, 6, 0, 58, 60, 10, 7, 0, 0, 59, 61, 5, 10, 0, 0, 60, 59, 1, 0, 0, 0, 60, 61, 1, 0, 0, 0, 61, 62, 1, 0, 0, 0, 62, 63, 5, 25, 0, 0, 63, 65, 3, 18, 9, 0, 64, 40, 1, 0, 0, 0, 64, 43, 1, 0, 0, 0, 64, 46, 1, 0, 0, 0, 64, 49, 1, 0, 0, 0, 64, 52, 1, 0, 0, 0, 64, 58, 1, 0, 0, 0, 65, 68, 1, 0, 0, 0, 66, 64, 1, 0, 0, 0, 66, 67, 1, 0, 0, 0, 67, 3, 1, 0, 0, 0, 68, 66, 1, 0, 0, 0, 69, 74, 3, 10, 5, 0, 70, 74, 3, 14, 7, 0, 71, 74, 3, 12, 6, 0, 72, 74, 3, 6, 3, 0, 73, 69, 1, 0, 0, 0, 73, 70, 1, 0, 0, 0, 73, 71, 1, 0, 0, 0, 73, 72, 1, 0, 0, 0, 74, 5, 1, 0, 0, 0, 75, 76, 7, 4, 0, 0, 76, 7, 1, 0, 0, 0, 77, 78, 7, 5, 0, 0, 78, 9, 1, 0, 0, 0, 79, 80, 7, 6, 0, 0, 80, 11, 1, 0, 0, 0, 81, 82, 7, 7, 0, 0, 82, 13, 1, 0, 0, 0, 83, 84, 5, 30, 0, 0, 84, 15, 1, 0, 0, 0, 85, 94, 5, 2, 0, 0, 86, 91, 3, 2, 1, 0, 87, 88, 5, 4, 0, 0, 88, 90, 3, 2, 1, 0, 89, 87, 1, 0, 0, 0, 90, 93, 1, 0, 0, 0, 91, 89, 1, 0, 0, 0, 91, 92, 1, 0, 0, 0, 92, 95, 1, 0, 0, 0, 93, 91, 1, 0, 0, 0, 94, 86, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 97, 5, 3, 0, 0, 97, 17, 1, 0, 0, 0, 98, 99, 5, 2, 0, 0, 99, 104, 3, 2, 1, 0, 100, 101, 5, 4, 0, 0, 101, 103, 3, 2, 1, 0, 102, 100, 1, 0, 0, 0, 103, 106, 1, 0, 0, 0, 104, 102, 1, 0, 0, 0, 104, 105, 1, 0, 0, 0, 105, 107, 1, 0, 0, 0, 106, 104, 1, 0, 0, 0, 107, 108, 5, 3, 0, 0, 108, 19, 1, 0, 0, 0, 9, 38, 54, 60, 64, 66, 73, 91, 94, 104]

File diff suppressed because one or more lines are too long

View File

@ -1,9 +1,4 @@
/*
Copyright 2021 The CloudEvents Authors
SPDX-License-Identifier: Apache-2.0
*/
// Code generated from CESQLParser.g4 by ANTLR 4.9. DO NOT EDIT.
// Code generated from CESQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CESQLParser
import "github.com/antlr/antlr4/runtime/Go/antlr"

View File

@ -1,14 +1,10 @@
/*
Copyright 2021 The CloudEvents Authors
SPDX-License-Identifier: Apache-2.0
*/
// Code generated from CESQLParser.g4 by ANTLR 4.9. DO NOT EDIT.
// Code generated from CESQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen
import (
"fmt"
"sync"
"unicode"
"github.com/antlr/antlr4/runtime/Go/antlr"
@ -16,148 +12,9 @@ import (
// Suppress unused import error
var _ = fmt.Printf
var _ = sync.Once{}
var _ = unicode.IsLetter
var serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 35, 237,
8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7,
9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12,
4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4,
18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23,
9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9,
28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33,
4, 34, 9, 34, 4, 35, 9, 35, 4, 36, 9, 36, 4, 37, 9, 37, 4, 38, 9, 38, 4,
39, 9, 39, 4, 40, 9, 40, 3, 2, 6, 2, 83, 10, 2, 13, 2, 14, 2, 84, 3, 2,
3, 2, 3, 3, 6, 3, 90, 10, 3, 13, 3, 14, 3, 91, 3, 4, 3, 4, 3, 4, 3, 4,
3, 4, 3, 4, 7, 4, 100, 10, 4, 12, 4, 14, 4, 103, 11, 4, 3, 4, 3, 4, 3,
5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 7, 5, 113, 10, 5, 12, 5, 14, 5, 116, 11,
5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 7, 3, 7, 7, 7, 124, 10, 7, 12, 7, 14, 7,
127, 11, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12,
3, 12, 3, 13, 3, 13, 5, 13, 141, 10, 13, 3, 14, 3, 14, 3, 14, 3, 14, 3,
15, 3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 17, 3, 17, 3, 17, 3, 17,
3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3,
23, 3, 23, 3, 24, 3, 24, 3, 24, 3, 25, 3, 25, 3, 26, 3, 26, 3, 26, 3, 27,
3, 27, 3, 28, 3, 28, 3, 28, 3, 29, 3, 29, 3, 29, 3, 30, 3, 30, 3, 30, 3,
30, 3, 30, 3, 31, 3, 31, 3, 31, 3, 31, 3, 31, 3, 31, 3, 31, 3, 32, 3, 32,
3, 32, 3, 33, 3, 33, 3, 33, 3, 33, 3, 33, 3, 34, 3, 34, 3, 34, 3, 34, 3,
34, 3, 34, 3, 35, 3, 35, 3, 36, 3, 36, 3, 37, 6, 37, 217, 10, 37, 13, 37,
14, 37, 218, 3, 38, 6, 38, 222, 10, 38, 13, 38, 14, 38, 223, 3, 39, 6,
39, 227, 10, 39, 13, 39, 14, 39, 228, 3, 40, 3, 40, 7, 40, 233, 10, 40,
12, 40, 14, 40, 236, 11, 40, 2, 2, 41, 3, 3, 5, 2, 7, 2, 9, 2, 11, 2, 13,
2, 15, 4, 17, 5, 19, 6, 21, 7, 23, 8, 25, 2, 27, 9, 29, 10, 31, 11, 33,
12, 35, 13, 37, 14, 39, 15, 41, 16, 43, 17, 45, 18, 47, 19, 49, 20, 51,
21, 53, 22, 55, 23, 57, 24, 59, 25, 61, 26, 63, 27, 65, 28, 67, 29, 69,
30, 71, 31, 73, 32, 75, 33, 77, 34, 79, 35, 3, 2, 10, 5, 2, 11, 12, 15,
15, 34, 34, 5, 2, 50, 59, 67, 92, 99, 124, 4, 2, 36, 36, 94, 94, 4, 2,
41, 41, 94, 94, 3, 2, 50, 59, 3, 2, 67, 92, 4, 2, 67, 92, 97, 97, 4, 2,
67, 92, 99, 124, 2, 244, 2, 3, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3,
2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 27,
3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2,
35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2,
2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2,
2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, 2,
2, 2, 2, 59, 3, 2, 2, 2, 2, 61, 3, 2, 2, 2, 2, 63, 3, 2, 2, 2, 2, 65, 3,
2, 2, 2, 2, 67, 3, 2, 2, 2, 2, 69, 3, 2, 2, 2, 2, 71, 3, 2, 2, 2, 2, 73,
3, 2, 2, 2, 2, 75, 3, 2, 2, 2, 2, 77, 3, 2, 2, 2, 2, 79, 3, 2, 2, 2, 3,
82, 3, 2, 2, 2, 5, 89, 3, 2, 2, 2, 7, 93, 3, 2, 2, 2, 9, 106, 3, 2, 2,
2, 11, 119, 3, 2, 2, 2, 13, 121, 3, 2, 2, 2, 15, 128, 3, 2, 2, 2, 17, 130,
3, 2, 2, 2, 19, 132, 3, 2, 2, 2, 21, 134, 3, 2, 2, 2, 23, 136, 3, 2, 2,
2, 25, 140, 3, 2, 2, 2, 27, 142, 3, 2, 2, 2, 29, 146, 3, 2, 2, 2, 31, 149,
3, 2, 2, 2, 33, 153, 3, 2, 2, 2, 35, 157, 3, 2, 2, 2, 37, 159, 3, 2, 2,
2, 39, 161, 3, 2, 2, 2, 41, 163, 3, 2, 2, 2, 43, 165, 3, 2, 2, 2, 45, 167,
3, 2, 2, 2, 47, 169, 3, 2, 2, 2, 49, 172, 3, 2, 2, 2, 51, 174, 3, 2, 2,
2, 53, 177, 3, 2, 2, 2, 55, 179, 3, 2, 2, 2, 57, 182, 3, 2, 2, 2, 59, 185,
3, 2, 2, 2, 61, 190, 3, 2, 2, 2, 63, 197, 3, 2, 2, 2, 65, 200, 3, 2, 2,
2, 67, 205, 3, 2, 2, 2, 69, 211, 3, 2, 2, 2, 71, 213, 3, 2, 2, 2, 73, 216,
3, 2, 2, 2, 75, 221, 3, 2, 2, 2, 77, 226, 3, 2, 2, 2, 79, 230, 3, 2, 2,
2, 81, 83, 9, 2, 2, 2, 82, 81, 3, 2, 2, 2, 83, 84, 3, 2, 2, 2, 84, 82,
3, 2, 2, 2, 84, 85, 3, 2, 2, 2, 85, 86, 3, 2, 2, 2, 86, 87, 8, 2, 2, 2,
87, 4, 3, 2, 2, 2, 88, 90, 9, 3, 2, 2, 89, 88, 3, 2, 2, 2, 90, 91, 3, 2,
2, 2, 91, 89, 3, 2, 2, 2, 91, 92, 3, 2, 2, 2, 92, 6, 3, 2, 2, 2, 93, 101,
7, 36, 2, 2, 94, 95, 7, 94, 2, 2, 95, 100, 11, 2, 2, 2, 96, 97, 7, 36,
2, 2, 97, 100, 7, 36, 2, 2, 98, 100, 10, 4, 2, 2, 99, 94, 3, 2, 2, 2, 99,
96, 3, 2, 2, 2, 99, 98, 3, 2, 2, 2, 100, 103, 3, 2, 2, 2, 101, 99, 3, 2,
2, 2, 101, 102, 3, 2, 2, 2, 102, 104, 3, 2, 2, 2, 103, 101, 3, 2, 2, 2,
104, 105, 7, 36, 2, 2, 105, 8, 3, 2, 2, 2, 106, 114, 7, 41, 2, 2, 107,
108, 7, 94, 2, 2, 108, 113, 11, 2, 2, 2, 109, 110, 7, 41, 2, 2, 110, 113,
7, 41, 2, 2, 111, 113, 10, 5, 2, 2, 112, 107, 3, 2, 2, 2, 112, 109, 3,
2, 2, 2, 112, 111, 3, 2, 2, 2, 113, 116, 3, 2, 2, 2, 114, 112, 3, 2, 2,
2, 114, 115, 3, 2, 2, 2, 115, 117, 3, 2, 2, 2, 116, 114, 3, 2, 2, 2, 117,
118, 7, 41, 2, 2, 118, 10, 3, 2, 2, 2, 119, 120, 9, 6, 2, 2, 120, 12, 3,
2, 2, 2, 121, 125, 9, 7, 2, 2, 122, 124, 9, 8, 2, 2, 123, 122, 3, 2, 2,
2, 124, 127, 3, 2, 2, 2, 125, 123, 3, 2, 2, 2, 125, 126, 3, 2, 2, 2, 126,
14, 3, 2, 2, 2, 127, 125, 3, 2, 2, 2, 128, 129, 7, 42, 2, 2, 129, 16, 3,
2, 2, 2, 130, 131, 7, 43, 2, 2, 131, 18, 3, 2, 2, 2, 132, 133, 7, 46, 2,
2, 133, 20, 3, 2, 2, 2, 134, 135, 7, 41, 2, 2, 135, 22, 3, 2, 2, 2, 136,
137, 7, 36, 2, 2, 137, 24, 3, 2, 2, 2, 138, 141, 5, 21, 11, 2, 139, 141,
5, 23, 12, 2, 140, 138, 3, 2, 2, 2, 140, 139, 3, 2, 2, 2, 141, 26, 3, 2,
2, 2, 142, 143, 7, 67, 2, 2, 143, 144, 7, 80, 2, 2, 144, 145, 7, 70, 2,
2, 145, 28, 3, 2, 2, 2, 146, 147, 7, 81, 2, 2, 147, 148, 7, 84, 2, 2, 148,
30, 3, 2, 2, 2, 149, 150, 7, 90, 2, 2, 150, 151, 7, 81, 2, 2, 151, 152,
7, 84, 2, 2, 152, 32, 3, 2, 2, 2, 153, 154, 7, 80, 2, 2, 154, 155, 7, 81,
2, 2, 155, 156, 7, 86, 2, 2, 156, 34, 3, 2, 2, 2, 157, 158, 7, 44, 2, 2,
158, 36, 3, 2, 2, 2, 159, 160, 7, 49, 2, 2, 160, 38, 3, 2, 2, 2, 161, 162,
7, 39, 2, 2, 162, 40, 3, 2, 2, 2, 163, 164, 7, 45, 2, 2, 164, 42, 3, 2,
2, 2, 165, 166, 7, 47, 2, 2, 166, 44, 3, 2, 2, 2, 167, 168, 7, 63, 2, 2,
168, 46, 3, 2, 2, 2, 169, 170, 7, 35, 2, 2, 170, 171, 7, 63, 2, 2, 171,
48, 3, 2, 2, 2, 172, 173, 7, 64, 2, 2, 173, 50, 3, 2, 2, 2, 174, 175, 7,
64, 2, 2, 175, 176, 7, 63, 2, 2, 176, 52, 3, 2, 2, 2, 177, 178, 7, 62,
2, 2, 178, 54, 3, 2, 2, 2, 179, 180, 7, 62, 2, 2, 180, 181, 7, 64, 2, 2,
181, 56, 3, 2, 2, 2, 182, 183, 7, 62, 2, 2, 183, 184, 7, 63, 2, 2, 184,
58, 3, 2, 2, 2, 185, 186, 7, 78, 2, 2, 186, 187, 7, 75, 2, 2, 187, 188,
7, 77, 2, 2, 188, 189, 7, 71, 2, 2, 189, 60, 3, 2, 2, 2, 190, 191, 7, 71,
2, 2, 191, 192, 7, 90, 2, 2, 192, 193, 7, 75, 2, 2, 193, 194, 7, 85, 2,
2, 194, 195, 7, 86, 2, 2, 195, 196, 7, 85, 2, 2, 196, 62, 3, 2, 2, 2, 197,
198, 7, 75, 2, 2, 198, 199, 7, 80, 2, 2, 199, 64, 3, 2, 2, 2, 200, 201,
7, 86, 2, 2, 201, 202, 7, 84, 2, 2, 202, 203, 7, 87, 2, 2, 203, 204, 7,
71, 2, 2, 204, 66, 3, 2, 2, 2, 205, 206, 7, 72, 2, 2, 206, 207, 7, 67,
2, 2, 207, 208, 7, 78, 2, 2, 208, 209, 7, 85, 2, 2, 209, 210, 7, 71, 2,
2, 210, 68, 3, 2, 2, 2, 211, 212, 5, 7, 4, 2, 212, 70, 3, 2, 2, 2, 213,
214, 5, 9, 5, 2, 214, 72, 3, 2, 2, 2, 215, 217, 5, 11, 6, 2, 216, 215,
3, 2, 2, 2, 217, 218, 3, 2, 2, 2, 218, 216, 3, 2, 2, 2, 218, 219, 3, 2,
2, 2, 219, 74, 3, 2, 2, 2, 220, 222, 9, 9, 2, 2, 221, 220, 3, 2, 2, 2,
222, 223, 3, 2, 2, 2, 223, 221, 3, 2, 2, 2, 223, 224, 3, 2, 2, 2, 224,
76, 3, 2, 2, 2, 225, 227, 9, 3, 2, 2, 226, 225, 3, 2, 2, 2, 227, 228, 3,
2, 2, 2, 228, 226, 3, 2, 2, 2, 228, 229, 3, 2, 2, 2, 229, 78, 3, 2, 2,
2, 230, 234, 9, 7, 2, 2, 231, 233, 9, 8, 2, 2, 232, 231, 3, 2, 2, 2, 233,
236, 3, 2, 2, 2, 234, 232, 3, 2, 2, 2, 234, 235, 3, 2, 2, 2, 235, 80, 3,
2, 2, 2, 236, 234, 3, 2, 2, 2, 15, 2, 84, 91, 99, 101, 112, 114, 125, 140,
218, 223, 228, 234, 3, 8, 2, 2,
}
var lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
var lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerLiteralNames = []string{
"", "", "'('", "')'", "','", "'''", "'\"'", "'AND'", "'OR'", "'XOR'", "'NOT'",
"'*'", "'/'", "'%'", "'+'", "'-'", "'='", "'!='", "'>'", "'>='", "'<'",
"'<>'", "'<='", "'LIKE'", "'EXISTS'", "'IN'", "'TRUE'", "'FALSE'",
}
var lexerSymbolicNames = []string{
"", "SPACE", "LR_BRACKET", "RR_BRACKET", "COMMA", "SINGLE_QUOTE_SYMB",
"DOUBLE_QUOTE_SYMB", "AND", "OR", "XOR", "NOT", "STAR", "DIVIDE", "MODULE",
"PLUS", "MINUS", "EQUAL", "NOT_EQUAL", "GREATER", "GREATER_OR_EQUAL", "LESS",
"LESS_GREATER", "LESS_OR_EQUAL", "LIKE", "EXISTS", "IN", "TRUE", "FALSE",
"DQUOTED_STRING_LITERAL", "SQUOTED_STRING_LITERAL", "INTEGER_LITERAL",
"IDENTIFIER", "IDENTIFIER_WITH_NUMBER", "FUNCTION_IDENTIFIER_WITH_UNDERSCORE",
}
var lexerRuleNames = []string{
"SPACE", "ID_LITERAL", "DQUOTA_STRING", "SQUOTA_STRING", "INT_DIGIT", "FN_LITERAL",
"LR_BRACKET", "RR_BRACKET", "COMMA", "SINGLE_QUOTE_SYMB", "DOUBLE_QUOTE_SYMB",
"QUOTE_SYMB", "AND", "OR", "XOR", "NOT", "STAR", "DIVIDE", "MODULE", "PLUS",
"MINUS", "EQUAL", "NOT_EQUAL", "GREATER", "GREATER_OR_EQUAL", "LESS", "LESS_GREATER",
"LESS_OR_EQUAL", "LIKE", "EXISTS", "IN", "TRUE", "FALSE", "DQUOTED_STRING_LITERAL",
"SQUOTED_STRING_LITERAL", "INTEGER_LITERAL", "IDENTIFIER", "IDENTIFIER_WITH_NUMBER",
"FUNCTION_IDENTIFIER_WITH_UNDERSCORE",
}
type CESQLParserLexer struct {
*antlr.BaseLexer
channelNames []string
@ -165,28 +22,186 @@ type CESQLParserLexer struct {
// TODO: EOF string
}
// NewCESQLParserLexer produces a new lexer instance for the optional input antlr.CharStream.
//
// The *CESQLParserLexer instance produced may be reused by calling the SetInputStream method.
// The initial lexer configuration is expensive to construct, and the object is not thread-safe;
// however, if used within a Golang sync.Pool, the construction cost amortizes well and the
// objects can be used in a thread-safe manner.
func NewCESQLParserLexer(input antlr.CharStream) *CESQLParserLexer {
l := new(CESQLParserLexer)
lexerDeserializer := antlr.NewATNDeserializer(nil)
lexerAtn := lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
lexerDecisionToDFA := make([]*antlr.DFA, len(lexerAtn.DecisionToState))
for index, ds := range lexerAtn.DecisionToState {
lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
}
l.BaseLexer = antlr.NewBaseLexer(input)
l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
var cesqlparserlexerLexerStaticData struct {
once sync.Once
serializedATN []int32
channelNames []string
modeNames []string
literalNames []string
symbolicNames []string
ruleNames []string
predictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
l.channelNames = lexerChannelNames
l.modeNames = lexerModeNames
l.RuleNames = lexerRuleNames
l.LiteralNames = lexerLiteralNames
l.SymbolicNames = lexerSymbolicNames
func cesqlparserlexerLexerInit() {
staticData := &cesqlparserlexerLexerStaticData
staticData.channelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
staticData.modeNames = []string{
"DEFAULT_MODE",
}
staticData.literalNames = []string{
"", "", "'('", "')'", "','", "'''", "'\"'", "'AND'", "'OR'", "'XOR'",
"'NOT'", "'*'", "'/'", "'%'", "'+'", "'-'", "'='", "'!='", "'>'", "'>='",
"'<'", "'<>'", "'<='", "'LIKE'", "'EXISTS'", "'IN'", "'TRUE'", "'FALSE'",
}
staticData.symbolicNames = []string{
"", "SPACE", "LR_BRACKET", "RR_BRACKET", "COMMA", "SINGLE_QUOTE_SYMB",
"DOUBLE_QUOTE_SYMB", "AND", "OR", "XOR", "NOT", "STAR", "DIVIDE", "MODULE",
"PLUS", "MINUS", "EQUAL", "NOT_EQUAL", "GREATER", "GREATER_OR_EQUAL",
"LESS", "LESS_GREATER", "LESS_OR_EQUAL", "LIKE", "EXISTS", "IN", "TRUE",
"FALSE", "DQUOTED_STRING_LITERAL", "SQUOTED_STRING_LITERAL", "INTEGER_LITERAL",
"IDENTIFIER", "IDENTIFIER_WITH_NUMBER", "FUNCTION_IDENTIFIER_WITH_UNDERSCORE",
}
staticData.ruleNames = []string{
"SPACE", "ID_LITERAL", "DQUOTA_STRING", "SQUOTA_STRING", "INT_DIGIT",
"FN_LITERAL", "LR_BRACKET", "RR_BRACKET", "COMMA", "SINGLE_QUOTE_SYMB",
"DOUBLE_QUOTE_SYMB", "QUOTE_SYMB", "AND", "OR", "XOR", "NOT", "STAR",
"DIVIDE", "MODULE", "PLUS", "MINUS", "EQUAL", "NOT_EQUAL", "GREATER",
"GREATER_OR_EQUAL", "LESS", "LESS_GREATER", "LESS_OR_EQUAL", "LIKE",
"EXISTS", "IN", "TRUE", "FALSE", "DQUOTED_STRING_LITERAL", "SQUOTED_STRING_LITERAL",
"INTEGER_LITERAL", "IDENTIFIER", "IDENTIFIER_WITH_NUMBER", "FUNCTION_IDENTIFIER_WITH_UNDERSCORE",
}
staticData.predictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 33, 235, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 1, 0, 4, 0, 81, 8, 0, 11, 0, 12, 0,
82, 1, 0, 1, 0, 1, 1, 4, 1, 88, 8, 1, 11, 1, 12, 1, 89, 1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2, 5, 2, 98, 8, 2, 10, 2, 12, 2, 101, 9, 2, 1, 2, 1,
2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 111, 8, 3, 10, 3, 12, 3, 114,
9, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 5, 5, 122, 8, 5, 10, 5, 12, 5,
125, 9, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10,
1, 11, 1, 11, 3, 11, 139, 8, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1,
13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16,
1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1,
21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25,
1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1,
28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30,
1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1,
32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 4, 35, 215, 8, 35, 11, 35, 12, 35,
216, 1, 36, 4, 36, 220, 8, 36, 11, 36, 12, 36, 221, 1, 37, 4, 37, 225,
8, 37, 11, 37, 12, 37, 226, 1, 38, 1, 38, 5, 38, 231, 8, 38, 10, 38, 12,
38, 234, 9, 38, 0, 0, 39, 1, 1, 3, 0, 5, 0, 7, 0, 9, 0, 11, 0, 13, 2, 15,
3, 17, 4, 19, 5, 21, 6, 23, 0, 25, 7, 27, 8, 29, 9, 31, 10, 33, 11, 35,
12, 37, 13, 39, 14, 41, 15, 43, 16, 45, 17, 47, 18, 49, 19, 51, 20, 53,
21, 55, 22, 57, 23, 59, 24, 61, 25, 63, 26, 65, 27, 67, 28, 69, 29, 71,
30, 73, 31, 75, 32, 77, 33, 1, 0, 8, 3, 0, 9, 10, 13, 13, 32, 32, 3, 0,
48, 57, 65, 90, 97, 122, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 1,
0, 48, 57, 1, 0, 65, 90, 2, 0, 65, 90, 95, 95, 2, 0, 65, 90, 97, 122, 242,
0, 1, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0,
0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0,
0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1,
0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43,
1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0,
51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0,
0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0, 65, 1, 0, 0,
0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 73, 1, 0,
0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 1, 80, 1, 0, 0, 0, 3, 87, 1,
0, 0, 0, 5, 91, 1, 0, 0, 0, 7, 104, 1, 0, 0, 0, 9, 117, 1, 0, 0, 0, 11,
119, 1, 0, 0, 0, 13, 126, 1, 0, 0, 0, 15, 128, 1, 0, 0, 0, 17, 130, 1,
0, 0, 0, 19, 132, 1, 0, 0, 0, 21, 134, 1, 0, 0, 0, 23, 138, 1, 0, 0, 0,
25, 140, 1, 0, 0, 0, 27, 144, 1, 0, 0, 0, 29, 147, 1, 0, 0, 0, 31, 151,
1, 0, 0, 0, 33, 155, 1, 0, 0, 0, 35, 157, 1, 0, 0, 0, 37, 159, 1, 0, 0,
0, 39, 161, 1, 0, 0, 0, 41, 163, 1, 0, 0, 0, 43, 165, 1, 0, 0, 0, 45, 167,
1, 0, 0, 0, 47, 170, 1, 0, 0, 0, 49, 172, 1, 0, 0, 0, 51, 175, 1, 0, 0,
0, 53, 177, 1, 0, 0, 0, 55, 180, 1, 0, 0, 0, 57, 183, 1, 0, 0, 0, 59, 188,
1, 0, 0, 0, 61, 195, 1, 0, 0, 0, 63, 198, 1, 0, 0, 0, 65, 203, 1, 0, 0,
0, 67, 209, 1, 0, 0, 0, 69, 211, 1, 0, 0, 0, 71, 214, 1, 0, 0, 0, 73, 219,
1, 0, 0, 0, 75, 224, 1, 0, 0, 0, 77, 228, 1, 0, 0, 0, 79, 81, 7, 0, 0,
0, 80, 79, 1, 0, 0, 0, 81, 82, 1, 0, 0, 0, 82, 80, 1, 0, 0, 0, 82, 83,
1, 0, 0, 0, 83, 84, 1, 0, 0, 0, 84, 85, 6, 0, 0, 0, 85, 2, 1, 0, 0, 0,
86, 88, 7, 1, 0, 0, 87, 86, 1, 0, 0, 0, 88, 89, 1, 0, 0, 0, 89, 87, 1,
0, 0, 0, 89, 90, 1, 0, 0, 0, 90, 4, 1, 0, 0, 0, 91, 99, 5, 34, 0, 0, 92,
93, 5, 92, 0, 0, 93, 98, 9, 0, 0, 0, 94, 95, 5, 34, 0, 0, 95, 98, 5, 34,
0, 0, 96, 98, 8, 2, 0, 0, 97, 92, 1, 0, 0, 0, 97, 94, 1, 0, 0, 0, 97, 96,
1, 0, 0, 0, 98, 101, 1, 0, 0, 0, 99, 97, 1, 0, 0, 0, 99, 100, 1, 0, 0,
0, 100, 102, 1, 0, 0, 0, 101, 99, 1, 0, 0, 0, 102, 103, 5, 34, 0, 0, 103,
6, 1, 0, 0, 0, 104, 112, 5, 39, 0, 0, 105, 106, 5, 92, 0, 0, 106, 111,
9, 0, 0, 0, 107, 108, 5, 39, 0, 0, 108, 111, 5, 39, 0, 0, 109, 111, 8,
3, 0, 0, 110, 105, 1, 0, 0, 0, 110, 107, 1, 0, 0, 0, 110, 109, 1, 0, 0,
0, 111, 114, 1, 0, 0, 0, 112, 110, 1, 0, 0, 0, 112, 113, 1, 0, 0, 0, 113,
115, 1, 0, 0, 0, 114, 112, 1, 0, 0, 0, 115, 116, 5, 39, 0, 0, 116, 8, 1,
0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 10, 1, 0, 0, 0, 119, 123, 7, 5, 0,
0, 120, 122, 7, 6, 0, 0, 121, 120, 1, 0, 0, 0, 122, 125, 1, 0, 0, 0, 123,
121, 1, 0, 0, 0, 123, 124, 1, 0, 0, 0, 124, 12, 1, 0, 0, 0, 125, 123, 1,
0, 0, 0, 126, 127, 5, 40, 0, 0, 127, 14, 1, 0, 0, 0, 128, 129, 5, 41, 0,
0, 129, 16, 1, 0, 0, 0, 130, 131, 5, 44, 0, 0, 131, 18, 1, 0, 0, 0, 132,
133, 5, 39, 0, 0, 133, 20, 1, 0, 0, 0, 134, 135, 5, 34, 0, 0, 135, 22,
1, 0, 0, 0, 136, 139, 3, 19, 9, 0, 137, 139, 3, 21, 10, 0, 138, 136, 1,
0, 0, 0, 138, 137, 1, 0, 0, 0, 139, 24, 1, 0, 0, 0, 140, 141, 5, 65, 0,
0, 141, 142, 5, 78, 0, 0, 142, 143, 5, 68, 0, 0, 143, 26, 1, 0, 0, 0, 144,
145, 5, 79, 0, 0, 145, 146, 5, 82, 0, 0, 146, 28, 1, 0, 0, 0, 147, 148,
5, 88, 0, 0, 148, 149, 5, 79, 0, 0, 149, 150, 5, 82, 0, 0, 150, 30, 1,
0, 0, 0, 151, 152, 5, 78, 0, 0, 152, 153, 5, 79, 0, 0, 153, 154, 5, 84,
0, 0, 154, 32, 1, 0, 0, 0, 155, 156, 5, 42, 0, 0, 156, 34, 1, 0, 0, 0,
157, 158, 5, 47, 0, 0, 158, 36, 1, 0, 0, 0, 159, 160, 5, 37, 0, 0, 160,
38, 1, 0, 0, 0, 161, 162, 5, 43, 0, 0, 162, 40, 1, 0, 0, 0, 163, 164, 5,
45, 0, 0, 164, 42, 1, 0, 0, 0, 165, 166, 5, 61, 0, 0, 166, 44, 1, 0, 0,
0, 167, 168, 5, 33, 0, 0, 168, 169, 5, 61, 0, 0, 169, 46, 1, 0, 0, 0, 170,
171, 5, 62, 0, 0, 171, 48, 1, 0, 0, 0, 172, 173, 5, 62, 0, 0, 173, 174,
5, 61, 0, 0, 174, 50, 1, 0, 0, 0, 175, 176, 5, 60, 0, 0, 176, 52, 1, 0,
0, 0, 177, 178, 5, 60, 0, 0, 178, 179, 5, 62, 0, 0, 179, 54, 1, 0, 0, 0,
180, 181, 5, 60, 0, 0, 181, 182, 5, 61, 0, 0, 182, 56, 1, 0, 0, 0, 183,
184, 5, 76, 0, 0, 184, 185, 5, 73, 0, 0, 185, 186, 5, 75, 0, 0, 186, 187,
5, 69, 0, 0, 187, 58, 1, 0, 0, 0, 188, 189, 5, 69, 0, 0, 189, 190, 5, 88,
0, 0, 190, 191, 5, 73, 0, 0, 191, 192, 5, 83, 0, 0, 192, 193, 5, 84, 0,
0, 193, 194, 5, 83, 0, 0, 194, 60, 1, 0, 0, 0, 195, 196, 5, 73, 0, 0, 196,
197, 5, 78, 0, 0, 197, 62, 1, 0, 0, 0, 198, 199, 5, 84, 0, 0, 199, 200,
5, 82, 0, 0, 200, 201, 5, 85, 0, 0, 201, 202, 5, 69, 0, 0, 202, 64, 1,
0, 0, 0, 203, 204, 5, 70, 0, 0, 204, 205, 5, 65, 0, 0, 205, 206, 5, 76,
0, 0, 206, 207, 5, 83, 0, 0, 207, 208, 5, 69, 0, 0, 208, 66, 1, 0, 0, 0,
209, 210, 3, 5, 2, 0, 210, 68, 1, 0, 0, 0, 211, 212, 3, 7, 3, 0, 212, 70,
1, 0, 0, 0, 213, 215, 3, 9, 4, 0, 214, 213, 1, 0, 0, 0, 215, 216, 1, 0,
0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 72, 1, 0, 0, 0,
218, 220, 7, 7, 0, 0, 219, 218, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221,
219, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 74, 1, 0, 0, 0, 223, 225, 7,
1, 0, 0, 224, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 224, 1, 0, 0,
0, 226, 227, 1, 0, 0, 0, 227, 76, 1, 0, 0, 0, 228, 232, 7, 5, 0, 0, 229,
231, 7, 6, 0, 0, 230, 229, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230,
1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 78, 1, 0, 0, 0, 234, 232, 1, 0,
0, 0, 13, 0, 82, 89, 97, 99, 110, 112, 123, 138, 216, 221, 226, 232, 1,
6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// CESQLParserLexerInit initializes any static state used to implement CESQLParserLexer. By default the
// static state used to implement the lexer is lazily initialized during the first call to
// NewCESQLParserLexer(). You can call this function if you wish to initialize the static state ahead
// of time.
func CESQLParserLexerInit() {
staticData := &cesqlparserlexerLexerStaticData
staticData.once.Do(cesqlparserlexerLexerInit)
}
// NewCESQLParserLexer produces a new lexer instance for the optional input antlr.CharStream.
func NewCESQLParserLexer(input antlr.CharStream) *CESQLParserLexer {
CESQLParserLexerInit()
l := new(CESQLParserLexer)
l.BaseLexer = antlr.NewBaseLexer(input)
staticData := &cesqlparserlexerLexerStaticData
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache)
l.channelNames = staticData.channelNames
l.modeNames = staticData.modeNames
l.RuleNames = staticData.ruleNames
l.LiteralNames = staticData.literalNames
l.SymbolicNames = staticData.symbolicNames
l.GrammarFileName = "CESQLParser.g4"
// TODO: l.EOF = antlr.TokenEOF

View File

@ -1,117 +1,132 @@
/*
Copyright 2021 The CloudEvents Authors
SPDX-License-Identifier: Apache-2.0
*/
// Code generated from CESQLParser.g4 by ANTLR 4.9. DO NOT EDIT.
// Code generated from CESQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CESQLParser
import (
"fmt"
"reflect"
"strconv"
"sync"
"github.com/antlr/antlr4/runtime/Go/antlr"
)
// Suppress unused import errors
var _ = fmt.Printf
var _ = reflect.Copy
var _ = strconv.Itoa
var parserATN = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 35, 112,
4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7,
4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 3, 2, 3, 2, 3, 2, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 5, 3, 41, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 57, 10, 3, 3, 3, 3,
3, 3, 3, 3, 3, 5, 3, 63, 10, 3, 3, 3, 3, 3, 7, 3, 67, 10, 3, 12, 3, 14,
3, 70, 11, 3, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 76, 10, 4, 3, 5, 3, 5, 3, 6,
3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 7,
10, 92, 10, 10, 12, 10, 14, 10, 95, 11, 10, 5, 10, 97, 10, 10, 3, 10, 3,
10, 3, 11, 3, 11, 3, 11, 3, 11, 7, 11, 105, 10, 11, 12, 11, 14, 11, 108,
11, 11, 3, 11, 3, 11, 3, 11, 2, 3, 4, 12, 2, 4, 6, 8, 10, 12, 14, 16, 18,
20, 2, 10, 3, 2, 13, 15, 3, 2, 16, 17, 3, 2, 18, 24, 3, 2, 9, 11, 3, 2,
33, 34, 4, 2, 33, 33, 35, 35, 3, 2, 28, 29, 3, 2, 30, 31, 2, 120, 2, 22,
3, 2, 2, 2, 4, 40, 3, 2, 2, 2, 6, 75, 3, 2, 2, 2, 8, 77, 3, 2, 2, 2, 10,
79, 3, 2, 2, 2, 12, 81, 3, 2, 2, 2, 14, 83, 3, 2, 2, 2, 16, 85, 3, 2, 2,
2, 18, 87, 3, 2, 2, 2, 20, 100, 3, 2, 2, 2, 22, 23, 5, 4, 3, 2, 23, 24,
7, 2, 2, 3, 24, 3, 3, 2, 2, 2, 25, 26, 8, 3, 1, 2, 26, 27, 5, 10, 6, 2,
27, 28, 5, 18, 10, 2, 28, 41, 3, 2, 2, 2, 29, 30, 7, 12, 2, 2, 30, 41,
5, 4, 3, 13, 31, 32, 7, 17, 2, 2, 32, 41, 5, 4, 3, 12, 33, 34, 7, 26, 2,
2, 34, 41, 5, 8, 5, 2, 35, 36, 7, 4, 2, 2, 36, 37, 5, 4, 3, 2, 37, 38,
7, 5, 2, 2, 38, 41, 3, 2, 2, 2, 39, 41, 5, 6, 4, 2, 40, 25, 3, 2, 2, 2,
40, 29, 3, 2, 2, 2, 40, 31, 3, 2, 2, 2, 40, 33, 3, 2, 2, 2, 40, 35, 3,
2, 2, 2, 40, 39, 3, 2, 2, 2, 41, 68, 3, 2, 2, 2, 42, 43, 12, 8, 2, 2, 43,
44, 9, 2, 2, 2, 44, 67, 5, 4, 3, 9, 45, 46, 12, 7, 2, 2, 46, 47, 9, 3,
2, 2, 47, 67, 5, 4, 3, 8, 48, 49, 12, 6, 2, 2, 49, 50, 9, 4, 2, 2, 50,
67, 5, 4, 3, 7, 51, 52, 12, 5, 2, 2, 52, 53, 9, 5, 2, 2, 53, 67, 5, 4,
3, 5, 54, 56, 12, 11, 2, 2, 55, 57, 7, 12, 2, 2, 56, 55, 3, 2, 2, 2, 56,
57, 3, 2, 2, 2, 57, 58, 3, 2, 2, 2, 58, 59, 7, 25, 2, 2, 59, 67, 5, 14,
8, 2, 60, 62, 12, 9, 2, 2, 61, 63, 7, 12, 2, 2, 62, 61, 3, 2, 2, 2, 62,
63, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 64, 65, 7, 27, 2, 2, 65, 67, 5, 20,
11, 2, 66, 42, 3, 2, 2, 2, 66, 45, 3, 2, 2, 2, 66, 48, 3, 2, 2, 2, 66,
51, 3, 2, 2, 2, 66, 54, 3, 2, 2, 2, 66, 60, 3, 2, 2, 2, 67, 70, 3, 2, 2,
2, 68, 66, 3, 2, 2, 2, 68, 69, 3, 2, 2, 2, 69, 5, 3, 2, 2, 2, 70, 68, 3,
2, 2, 2, 71, 76, 5, 12, 7, 2, 72, 76, 5, 16, 9, 2, 73, 76, 5, 14, 8, 2,
74, 76, 5, 8, 5, 2, 75, 71, 3, 2, 2, 2, 75, 72, 3, 2, 2, 2, 75, 73, 3,
2, 2, 2, 75, 74, 3, 2, 2, 2, 76, 7, 3, 2, 2, 2, 77, 78, 9, 6, 2, 2, 78,
9, 3, 2, 2, 2, 79, 80, 9, 7, 2, 2, 80, 11, 3, 2, 2, 2, 81, 82, 9, 8, 2,
2, 82, 13, 3, 2, 2, 2, 83, 84, 9, 9, 2, 2, 84, 15, 3, 2, 2, 2, 85, 86,
7, 32, 2, 2, 86, 17, 3, 2, 2, 2, 87, 96, 7, 4, 2, 2, 88, 93, 5, 4, 3, 2,
89, 90, 7, 6, 2, 2, 90, 92, 5, 4, 3, 2, 91, 89, 3, 2, 2, 2, 92, 95, 3,
2, 2, 2, 93, 91, 3, 2, 2, 2, 93, 94, 3, 2, 2, 2, 94, 97, 3, 2, 2, 2, 95,
93, 3, 2, 2, 2, 96, 88, 3, 2, 2, 2, 96, 97, 3, 2, 2, 2, 97, 98, 3, 2, 2,
2, 98, 99, 7, 5, 2, 2, 99, 19, 3, 2, 2, 2, 100, 101, 7, 4, 2, 2, 101, 106,
5, 4, 3, 2, 102, 103, 7, 6, 2, 2, 103, 105, 5, 4, 3, 2, 104, 102, 3, 2,
2, 2, 105, 108, 3, 2, 2, 2, 106, 104, 3, 2, 2, 2, 106, 107, 3, 2, 2, 2,
107, 109, 3, 2, 2, 2, 108, 106, 3, 2, 2, 2, 109, 110, 7, 5, 2, 2, 110,
21, 3, 2, 2, 2, 11, 40, 56, 62, 66, 68, 75, 93, 96, 106,
}
var literalNames = []string{
"", "", "'('", "')'", "','", "'''", "'\"'", "'AND'", "'OR'", "'XOR'", "'NOT'",
"'*'", "'/'", "'%'", "'+'", "'-'", "'='", "'!='", "'>'", "'>='", "'<'",
"'<>'", "'<='", "'LIKE'", "'EXISTS'", "'IN'", "'TRUE'", "'FALSE'",
}
var symbolicNames = []string{
"", "SPACE", "LR_BRACKET", "RR_BRACKET", "COMMA", "SINGLE_QUOTE_SYMB",
"DOUBLE_QUOTE_SYMB", "AND", "OR", "XOR", "NOT", "STAR", "DIVIDE", "MODULE",
"PLUS", "MINUS", "EQUAL", "NOT_EQUAL", "GREATER", "GREATER_OR_EQUAL", "LESS",
"LESS_GREATER", "LESS_OR_EQUAL", "LIKE", "EXISTS", "IN", "TRUE", "FALSE",
"DQUOTED_STRING_LITERAL", "SQUOTED_STRING_LITERAL", "INTEGER_LITERAL",
"IDENTIFIER", "IDENTIFIER_WITH_NUMBER", "FUNCTION_IDENTIFIER_WITH_UNDERSCORE",
}
var ruleNames = []string{
"cesql", "expression", "atom", "identifier", "functionIdentifier", "booleanLiteral",
"stringLiteral", "integerLiteral", "functionParameterList", "setExpression",
}
var _ = sync.Once{}
type CESQLParserParser struct {
*antlr.BaseParser
}
// NewCESQLParserParser produces a new parser instance for the optional input antlr.TokenStream.
//
// The *CESQLParserParser instance produced may be reused by calling the SetInputStream method.
// The initial parser configuration is expensive to construct, and the object is not thread-safe;
// however, if used within a Golang sync.Pool, the construction cost amortizes well and the
// objects can be used in a thread-safe manner.
func NewCESQLParserParser(input antlr.TokenStream) *CESQLParserParser {
this := new(CESQLParserParser)
deserializer := antlr.NewATNDeserializer(nil)
deserializedATN := deserializer.DeserializeFromUInt16(parserATN)
decisionToDFA := make([]*antlr.DFA, len(deserializedATN.DecisionToState))
for index, ds := range deserializedATN.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(ds, index)
}
this.BaseParser = antlr.NewBaseParser(input)
var cesqlparserParserStaticData struct {
once sync.Once
serializedATN []int32
literalNames []string
symbolicNames []string
ruleNames []string
predictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache())
this.RuleNames = ruleNames
this.LiteralNames = literalNames
this.SymbolicNames = symbolicNames
func cesqlparserParserInit() {
staticData := &cesqlparserParserStaticData
staticData.literalNames = []string{
"", "", "'('", "')'", "','", "'''", "'\"'", "'AND'", "'OR'", "'XOR'",
"'NOT'", "'*'", "'/'", "'%'", "'+'", "'-'", "'='", "'!='", "'>'", "'>='",
"'<'", "'<>'", "'<='", "'LIKE'", "'EXISTS'", "'IN'", "'TRUE'", "'FALSE'",
}
staticData.symbolicNames = []string{
"", "SPACE", "LR_BRACKET", "RR_BRACKET", "COMMA", "SINGLE_QUOTE_SYMB",
"DOUBLE_QUOTE_SYMB", "AND", "OR", "XOR", "NOT", "STAR", "DIVIDE", "MODULE",
"PLUS", "MINUS", "EQUAL", "NOT_EQUAL", "GREATER", "GREATER_OR_EQUAL",
"LESS", "LESS_GREATER", "LESS_OR_EQUAL", "LIKE", "EXISTS", "IN", "TRUE",
"FALSE", "DQUOTED_STRING_LITERAL", "SQUOTED_STRING_LITERAL", "INTEGER_LITERAL",
"IDENTIFIER", "IDENTIFIER_WITH_NUMBER", "FUNCTION_IDENTIFIER_WITH_UNDERSCORE",
}
staticData.ruleNames = []string{
"cesql", "expression", "atom", "identifier", "functionIdentifier", "booleanLiteral",
"stringLiteral", "integerLiteral", "functionParameterList", "setExpression",
}
staticData.predictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 33, 110, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 1, 0, 1,
0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 39, 8, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 55, 8, 1, 1,
1, 1, 1, 1, 1, 1, 1, 3, 1, 61, 8, 1, 1, 1, 1, 1, 5, 1, 65, 8, 1, 10, 1,
12, 1, 68, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 74, 8, 2, 1, 3, 1, 3, 1,
4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 5,
8, 90, 8, 8, 10, 8, 12, 8, 93, 9, 8, 3, 8, 95, 8, 8, 1, 8, 1, 8, 1, 9,
1, 9, 1, 9, 1, 9, 5, 9, 103, 8, 9, 10, 9, 12, 9, 106, 9, 9, 1, 9, 1, 9,
1, 9, 0, 1, 2, 10, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 0, 8, 1, 0, 11, 13,
1, 0, 14, 15, 1, 0, 16, 22, 1, 0, 7, 9, 1, 0, 31, 32, 2, 0, 31, 31, 33,
33, 1, 0, 26, 27, 1, 0, 28, 29, 118, 0, 20, 1, 0, 0, 0, 2, 38, 1, 0, 0,
0, 4, 73, 1, 0, 0, 0, 6, 75, 1, 0, 0, 0, 8, 77, 1, 0, 0, 0, 10, 79, 1,
0, 0, 0, 12, 81, 1, 0, 0, 0, 14, 83, 1, 0, 0, 0, 16, 85, 1, 0, 0, 0, 18,
98, 1, 0, 0, 0, 20, 21, 3, 2, 1, 0, 21, 22, 5, 0, 0, 1, 22, 1, 1, 0, 0,
0, 23, 24, 6, 1, -1, 0, 24, 25, 3, 8, 4, 0, 25, 26, 3, 16, 8, 0, 26, 39,
1, 0, 0, 0, 27, 28, 5, 10, 0, 0, 28, 39, 3, 2, 1, 11, 29, 30, 5, 15, 0,
0, 30, 39, 3, 2, 1, 10, 31, 32, 5, 24, 0, 0, 32, 39, 3, 6, 3, 0, 33, 34,
5, 2, 0, 0, 34, 35, 3, 2, 1, 0, 35, 36, 5, 3, 0, 0, 36, 39, 1, 0, 0, 0,
37, 39, 3, 4, 2, 0, 38, 23, 1, 0, 0, 0, 38, 27, 1, 0, 0, 0, 38, 29, 1,
0, 0, 0, 38, 31, 1, 0, 0, 0, 38, 33, 1, 0, 0, 0, 38, 37, 1, 0, 0, 0, 39,
66, 1, 0, 0, 0, 40, 41, 10, 6, 0, 0, 41, 42, 7, 0, 0, 0, 42, 65, 3, 2,
1, 7, 43, 44, 10, 5, 0, 0, 44, 45, 7, 1, 0, 0, 45, 65, 3, 2, 1, 6, 46,
47, 10, 4, 0, 0, 47, 48, 7, 2, 0, 0, 48, 65, 3, 2, 1, 5, 49, 50, 10, 3,
0, 0, 50, 51, 7, 3, 0, 0, 51, 65, 3, 2, 1, 3, 52, 54, 10, 9, 0, 0, 53,
55, 5, 10, 0, 0, 54, 53, 1, 0, 0, 0, 54, 55, 1, 0, 0, 0, 55, 56, 1, 0,
0, 0, 56, 57, 5, 23, 0, 0, 57, 65, 3, 12, 6, 0, 58, 60, 10, 7, 0, 0, 59,
61, 5, 10, 0, 0, 60, 59, 1, 0, 0, 0, 60, 61, 1, 0, 0, 0, 61, 62, 1, 0,
0, 0, 62, 63, 5, 25, 0, 0, 63, 65, 3, 18, 9, 0, 64, 40, 1, 0, 0, 0, 64,
43, 1, 0, 0, 0, 64, 46, 1, 0, 0, 0, 64, 49, 1, 0, 0, 0, 64, 52, 1, 0, 0,
0, 64, 58, 1, 0, 0, 0, 65, 68, 1, 0, 0, 0, 66, 64, 1, 0, 0, 0, 66, 67,
1, 0, 0, 0, 67, 3, 1, 0, 0, 0, 68, 66, 1, 0, 0, 0, 69, 74, 3, 10, 5, 0,
70, 74, 3, 14, 7, 0, 71, 74, 3, 12, 6, 0, 72, 74, 3, 6, 3, 0, 73, 69, 1,
0, 0, 0, 73, 70, 1, 0, 0, 0, 73, 71, 1, 0, 0, 0, 73, 72, 1, 0, 0, 0, 74,
5, 1, 0, 0, 0, 75, 76, 7, 4, 0, 0, 76, 7, 1, 0, 0, 0, 77, 78, 7, 5, 0,
0, 78, 9, 1, 0, 0, 0, 79, 80, 7, 6, 0, 0, 80, 11, 1, 0, 0, 0, 81, 82, 7,
7, 0, 0, 82, 13, 1, 0, 0, 0, 83, 84, 5, 30, 0, 0, 84, 15, 1, 0, 0, 0, 85,
94, 5, 2, 0, 0, 86, 91, 3, 2, 1, 0, 87, 88, 5, 4, 0, 0, 88, 90, 3, 2, 1,
0, 89, 87, 1, 0, 0, 0, 90, 93, 1, 0, 0, 0, 91, 89, 1, 0, 0, 0, 91, 92,
1, 0, 0, 0, 92, 95, 1, 0, 0, 0, 93, 91, 1, 0, 0, 0, 94, 86, 1, 0, 0, 0,
94, 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 97, 5, 3, 0, 0, 97, 17, 1,
0, 0, 0, 98, 99, 5, 2, 0, 0, 99, 104, 3, 2, 1, 0, 100, 101, 5, 4, 0, 0,
101, 103, 3, 2, 1, 0, 102, 100, 1, 0, 0, 0, 103, 106, 1, 0, 0, 0, 104,
102, 1, 0, 0, 0, 104, 105, 1, 0, 0, 0, 105, 107, 1, 0, 0, 0, 106, 104,
1, 0, 0, 0, 107, 108, 5, 3, 0, 0, 108, 19, 1, 0, 0, 0, 9, 38, 54, 60, 64,
66, 73, 91, 94, 104,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// CESQLParserParserInit initializes any static state used to implement CESQLParserParser. By default the
// static state used to implement the parser is lazily initialized during the first call to
// NewCESQLParserParser(). You can call this function if you wish to initialize the static state ahead
// of time.
func CESQLParserParserInit() {
staticData := &cesqlparserParserStaticData
staticData.once.Do(cesqlparserParserInit)
}
// NewCESQLParserParser produces a new parser instance for the optional input antlr.TokenStream.
func NewCESQLParserParser(input antlr.TokenStream) *CESQLParserParser {
CESQLParserParserInit()
this := new(CESQLParserParser)
this.BaseParser = antlr.NewBaseParser(input)
staticData := &cesqlparserParserStaticData
this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache)
this.RuleNames = staticData.ruleNames
this.LiteralNames = staticData.literalNames
this.SymbolicNames = staticData.symbolicNames
this.GrammarFileName = "CESQLParser.g4"
return this
@ -208,7 +223,13 @@ func NewCesqlContext(parser antlr.Parser, parent antlr.ParserRuleContext, invoki
func (s *CesqlContext) GetParser() antlr.Parser { return s.parser }
func (s *CesqlContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -240,6 +261,9 @@ func (s *CesqlContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
}
func (p *CESQLParserParser) Cesql() (localctx ICesqlContext) {
this := p
_ = this
localctx = NewCesqlContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 0, CESQLParserParserRULE_cesql)
@ -341,7 +365,13 @@ func (s *InExpressionContext) GetRuleContext() antlr.RuleContext {
}
func (s *InExpressionContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -355,7 +385,13 @@ func (s *InExpressionContext) IN() antlr.TerminalNode {
}
func (s *InExpressionContext) SetExpression() ISetExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*ISetExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ISetExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -397,12 +433,20 @@ func (s *BinaryComparisonExpressionContext) GetRuleContext() antlr.RuleContext {
}
func (s *BinaryComparisonExpressionContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@ -410,7 +454,17 @@ func (s *BinaryComparisonExpressionContext) AllExpression() []IExpressionContext
}
func (s *BinaryComparisonExpressionContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@ -476,7 +530,13 @@ func (s *AtomExpressionContext) GetRuleContext() antlr.RuleContext {
}
func (s *AtomExpressionContext) Atom() IAtomContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IAtomContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IAtomContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -518,7 +578,13 @@ func (s *ExistsExpressionContext) EXISTS() antlr.TerminalNode {
}
func (s *ExistsExpressionContext) Identifier() IIdentifierContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentifierContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -556,12 +622,20 @@ func (s *BinaryLogicExpressionContext) GetRuleContext() antlr.RuleContext {
}
func (s *BinaryLogicExpressionContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@ -569,7 +643,17 @@ func (s *BinaryLogicExpressionContext) AllExpression() []IExpressionContext {
}
func (s *BinaryLogicExpressionContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@ -619,7 +703,13 @@ func (s *LikeExpressionContext) GetRuleContext() antlr.RuleContext {
}
func (s *LikeExpressionContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -633,7 +723,13 @@ func (s *LikeExpressionContext) LIKE() antlr.TerminalNode {
}
func (s *LikeExpressionContext) StringLiteral() IStringLiteralContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IStringLiteralContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IStringLiteralContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -675,7 +771,13 @@ func (s *FunctionInvocationExpressionContext) GetRuleContext() antlr.RuleContext
}
func (s *FunctionInvocationExpressionContext) FunctionIdentifier() IFunctionIdentifierContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IFunctionIdentifierContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFunctionIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -685,7 +787,13 @@ func (s *FunctionInvocationExpressionContext) FunctionIdentifier() IFunctionIden
}
func (s *FunctionInvocationExpressionContext) FunctionParameterList() IFunctionParameterListContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IFunctionParameterListContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFunctionParameterListContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -723,12 +831,20 @@ func (s *BinaryMultiplicativeExpressionContext) GetRuleContext() antlr.RuleConte
}
func (s *BinaryMultiplicativeExpressionContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@ -736,7 +852,17 @@ func (s *BinaryMultiplicativeExpressionContext) AllExpression() []IExpressionCon
}
func (s *BinaryMultiplicativeExpressionContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@ -790,7 +916,13 @@ func (s *UnaryLogicExpressionContext) NOT() antlr.TerminalNode {
}
func (s *UnaryLogicExpressionContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -832,7 +964,13 @@ func (s *UnaryNumericExpressionContext) MINUS() antlr.TerminalNode {
}
func (s *UnaryNumericExpressionContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -874,7 +1012,13 @@ func (s *SubExpressionContext) LR_BRACKET() antlr.TerminalNode {
}
func (s *SubExpressionContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -916,12 +1060,20 @@ func (s *BinaryAdditiveExpressionContext) GetRuleContext() antlr.RuleContext {
}
func (s *BinaryAdditiveExpressionContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@ -929,7 +1081,17 @@ func (s *BinaryAdditiveExpressionContext) AllExpression() []IExpressionContext {
}
func (s *BinaryAdditiveExpressionContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@ -961,6 +1123,9 @@ func (p *CESQLParserParser) Expression() (localctx IExpressionContext) {
}
func (p *CESQLParserParser) expression(_p int) (localctx IExpressionContext) {
this := p
_ = this
var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
_parentState := p.GetState()
localctx = NewExpressionContext(p, p.GetParserRuleContext(), _parentState)
@ -1318,7 +1483,13 @@ func (s *BooleanAtomContext) GetRuleContext() antlr.RuleContext {
}
func (s *BooleanAtomContext) BooleanLiteral() IBooleanLiteralContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IBooleanLiteralContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IBooleanLiteralContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -1356,7 +1527,13 @@ func (s *IdentifierAtomContext) GetRuleContext() antlr.RuleContext {
}
func (s *IdentifierAtomContext) Identifier() IIdentifierContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IIdentifierContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -1394,7 +1571,13 @@ func (s *StringAtomContext) GetRuleContext() antlr.RuleContext {
}
func (s *StringAtomContext) StringLiteral() IStringLiteralContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IStringLiteralContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IStringLiteralContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -1432,7 +1615,13 @@ func (s *IntegerAtomContext) GetRuleContext() antlr.RuleContext {
}
func (s *IntegerAtomContext) IntegerLiteral() IIntegerLiteralContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IIntegerLiteralContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIntegerLiteralContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@ -1452,6 +1641,9 @@ func (s *IntegerAtomContext) Accept(visitor antlr.ParseTreeVisitor) interface{}
}
func (p *CESQLParserParser) Atom() (localctx IAtomContext) {
this := p
_ = this
localctx = NewAtomContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 4, CESQLParserParserRULE_atom)
@ -1579,6 +1771,9 @@ func (s *IdentifierContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
}
func (p *CESQLParserParser) Identifier() (localctx IIdentifierContext) {
this := p
_ = this
localctx = NewIdentifierContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 6, CESQLParserParserRULE_identifier)
var _la int
@ -1680,6 +1875,9 @@ func (s *FunctionIdentifierContext) Accept(visitor antlr.ParseTreeVisitor) inter
}
func (p *CESQLParserParser) FunctionIdentifier() (localctx IFunctionIdentifierContext) {
this := p
_ = this
localctx = NewFunctionIdentifierContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 8, CESQLParserParserRULE_functionIdentifier)
var _la int
@ -1781,6 +1979,9 @@ func (s *BooleanLiteralContext) Accept(visitor antlr.ParseTreeVisitor) interface
}
func (p *CESQLParserParser) BooleanLiteral() (localctx IBooleanLiteralContext) {
this := p
_ = this
localctx = NewBooleanLiteralContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 10, CESQLParserParserRULE_booleanLiteral)
var _la int
@ -1882,6 +2083,9 @@ func (s *StringLiteralContext) Accept(visitor antlr.ParseTreeVisitor) interface{
}
func (p *CESQLParserParser) StringLiteral() (localctx IStringLiteralContext) {
this := p
_ = this
localctx = NewStringLiteralContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 12, CESQLParserParserRULE_stringLiteral)
var _la int
@ -1979,6 +2183,9 @@ func (s *IntegerLiteralContext) Accept(visitor antlr.ParseTreeVisitor) interface
}
func (p *CESQLParserParser) IntegerLiteral() (localctx IIntegerLiteralContext) {
this := p
_ = this
localctx = NewIntegerLiteralContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 14, CESQLParserParserRULE_integerLiteral)
@ -2054,12 +2261,20 @@ func (s *FunctionParameterListContext) RR_BRACKET() antlr.TerminalNode {
}
func (s *FunctionParameterListContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@ -2067,7 +2282,17 @@ func (s *FunctionParameterListContext) AllExpression() []IExpressionContext {
}
func (s *FunctionParameterListContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@ -2103,6 +2328,9 @@ func (s *FunctionParameterListContext) Accept(visitor antlr.ParseTreeVisitor) in
}
func (p *CESQLParserParser) FunctionParameterList() (localctx IFunctionParameterListContext) {
this := p
_ = this
localctx = NewFunctionParameterListContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 16, CESQLParserParserRULE_functionParameterList)
var _la int
@ -2208,12 +2436,20 @@ func (s *SetExpressionContext) LR_BRACKET() antlr.TerminalNode {
}
func (s *SetExpressionContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@ -2221,7 +2457,17 @@ func (s *SetExpressionContext) AllExpression() []IExpressionContext {
}
func (s *SetExpressionContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@ -2261,6 +2507,9 @@ func (s *SetExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{
}
func (p *CESQLParserParser) SetExpression() (localctx ISetExpressionContext) {
this := p
_ = this
localctx = NewSetExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 18, CESQLParserParserRULE_setExpression)
var _la int
@ -2331,6 +2580,9 @@ func (p *CESQLParserParser) Sempred(localctx antlr.RuleContext, ruleIndex, predI
}
func (p *CESQLParserParser) Expression_Sempred(localctx antlr.RuleContext, predIndex int) bool {
this := p
_ = this
switch predIndex {
case 0:
return p.Precpred(p.GetParserRuleContext(), 6)

View File

@ -1,9 +1,4 @@
/*
Copyright 2021 The CloudEvents Authors
SPDX-License-Identifier: Apache-2.0
*/
// Code generated from CESQLParser.g4 by ANTLR 4.9. DO NOT EDIT.
// Code generated from CESQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CESQLParser
import "github.com/antlr/antlr4/runtime/Go/antlr"

View File

@ -134,6 +134,10 @@ var (
ToMessage = binding.ToMessage
// Event Creation
NewEventFromHTTPRequest = http.NewEventFromHTTPRequest
NewEventFromHTTPResponse = http.NewEventFromHTTPResponse
// HTTP Messages
WriteHTTPRequest = http.WriteRequest

View File

@ -370,6 +370,9 @@ func consumeDataAsBytes(e *Event, isBase64 bool, b []byte) error {
iter := jsoniter.ParseBytes(jsoniter.ConfigFastest, b)
src := iter.ReadString() // handles escaping
e.DataEncoded = []byte(src)
if iter.Error != nil {
return fmt.Errorf("unexpected data payload for media type %q, expected a string: %w", mt, iter.Error)
}
return nil
}
@ -397,6 +400,9 @@ func consumeData(e *Event, isBase64 bool, iter *jsoniter.Iterator) error {
// If not json, then data is encoded as string
src := iter.ReadString() // handles escaping
e.DataEncoded = []byte(src)
if iter.Error != nil {
return fmt.Errorf("unexpected data payload for media type %q, expected a string: %w", mt, iter.Error)
}
return nil
}

View File

@ -359,6 +359,7 @@ func (p *Protocol) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
}
status := http.StatusOK
var errMsg string
if res != nil {
var result *Result
switch {
@ -366,7 +367,7 @@ func (p *Protocol) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
if result.StatusCode > 100 && result.StatusCode < 600 {
status = result.StatusCode
}
errMsg = fmt.Errorf(result.Format, result.Args...).Error()
case !protocol.IsACK(res):
// Map client errors to http status code
validationError := event.ValidationError{}
@ -390,6 +391,9 @@ func (p *Protocol) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
}
rw.WriteHeader(status)
if _, err := rw.Write([]byte(errMsg)); err != nil {
return err
}
return nil
}

View File

@ -0,0 +1,26 @@
/*
Copyright 2022 The CloudEvents Authors
SPDX-License-Identifier: Apache-2.0
*/
package http
import (
"context"
nethttp "net/http"
"github.com/cloudevents/sdk-go/v2/binding"
"github.com/cloudevents/sdk-go/v2/event"
)
// NewEventFromHTTPRequest returns an Event.
func NewEventFromHTTPRequest(req *nethttp.Request) (*event.Event, error) {
msg := NewMessageFromHttpRequest(req)
return binding.ToEvent(context.Background(), msg)
}
// NewEventFromHTTPResponse returns an Event.
func NewEventFromHTTPResponse(resp *nethttp.Response) (*event.Event, error) {
msg := NewMessageFromHttpResponse(resp)
return binding.ToEvent(context.Background(), msg)
}

View File

@ -104,6 +104,9 @@ ARTIFACTS_TO_PUBLISH=""
FROM_NIGHTLY_RELEASE=""
FROM_NIGHTLY_RELEASE_GCS=""
SIGNING_IDENTITY=""
APPLE_CODESIGN_KEY=""
APPLE_NOTARY_API_KEY=""
APPLE_CODESIGN_PASSWORD_FILE=""
export KO_DOCKER_REPO="gcr.io/knative-nightly"
# Build stripped binary to reduce size
export GOFLAGS="-ldflags=-s -ldflags=-w"
@ -314,6 +317,22 @@ function sign_release() {
if [ -z "${SIGN_IMAGES:-}" ]; then # Temporary Feature Gate
return 0
fi
# Notarizing mac binaries needs to be done before cosign as it changes the checksum values
# of the darwin binaries
if [ -n "${APPLE_CODESIGN_KEY}" ] && [ -n "${APPLE_CODESIGN_PASSWORD_FILE}" ] && [ -n "${APPLE_NOTARY_API_KEY}" ]; then
banner "Notarizing macOS Binaries for the release"
FILES=$(find -- * -type f -name "*darwin*")
for file in $FILES; do
rcodesign sign "${file}" --p12-file="${APPLE_CODESIGN_KEY}" \
--code-signature-flags=runtime \
--p12-password-file="${APPLE_CODESIGN_PASSWORD_FILE}"
done
zip files.zip ${FILES}
rcodesign notary-submit files.zip --api-key-path="${APPLE_NOTARY_API_KEY}" --wait
sha256sum ${ARTIFACTS_TO_PUBLISH//checksums.txt/} > checksums.txt
fi
## Sign the images with cosign
## For now, check if ko has created imagerefs.txt file. In the future, missing image refs will break
## the release for all jobs that publish images.
@ -328,11 +347,11 @@ function sign_release() {
## Check if there is checksums.txt file. If so, sign the checksum file
if [[ -f "checksums.txt" ]]; then
echo "Signing Images with the identity ${SIGNING_IDENTITY}"
COSIGN_EXPERIMENTAL=1 cosign sign-blob checksums.txt --output-signature checksums.txt.sig --identity-token="$(
COSIGN_EXPERIMENTAL=1 cosign sign-blob checksums.txt --output-signature=checksums.txt.sig --output-certificate=checksums.txt.pem --identity-token="$(
gcloud auth print-identity-token --audiences=sigstore \
--include-email \
--impersonate-service-account="${SIGNING_IDENTITY}")"
ARTIFACTS_TO_PUBLISH="${ARTIFACTS_TO_PUBLISH} checksums.txt.sig"
ARTIFACTS_TO_PUBLISH="${ARTIFACTS_TO_PUBLISH} checksums.txt.sig checksums.txt.pem"
fi
}
@ -438,6 +457,15 @@ function parse_flags() {
[[ $1 =~ ^v[0-9]+-[0-9a-f]+$ ]] || abort "nightly tag must be 'vYYYYMMDD-commithash'"
FROM_NIGHTLY_RELEASE=$1
;;
--apple-codesign-key)
APPLE_CODESIGN_KEY=$1
;;
--apple-codesign-password-file)
APPLE_CODESIGN_PASSWORD_FILE=$1
;;
--apple-notary-api-key)
APPLE_NOTARY_API_KEY=$1
;;
*) abort "unknown option ${parameter}" ;;
esac
esac

20
vendor/modules.txt vendored
View File

@ -26,7 +26,7 @@ github.com/Azure/go-autorest/logger
# github.com/Azure/go-autorest/tracing v0.6.0
## explicit; go 1.12
github.com/Azure/go-autorest/tracing
# github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20211221011931-643d94fcab96
# github.com/antlr/antlr4/runtime/Go/antlr v1.4.10
## explicit; go 1.16
github.com/antlr/antlr4/runtime/Go/antlr
# github.com/beorn7/perks v1.0.1
@ -46,8 +46,8 @@ github.com/census-instrumentation/opencensus-proto/gen-go/trace/v1
# github.com/cespare/xxhash/v2 v2.1.2
## explicit; go 1.11
github.com/cespare/xxhash/v2
# github.com/cloudevents/sdk-go/sql/v2 v2.8.0
## explicit; go 1.14
# github.com/cloudevents/sdk-go/sql/v2 v2.0.0-20220930150014-52b12276cc4a
## explicit; go 1.17
github.com/cloudevents/sdk-go/sql/v2
github.com/cloudevents/sdk-go/sql/v2/expression
github.com/cloudevents/sdk-go/sql/v2/function
@ -55,8 +55,8 @@ github.com/cloudevents/sdk-go/sql/v2/gen
github.com/cloudevents/sdk-go/sql/v2/parser
github.com/cloudevents/sdk-go/sql/v2/runtime
github.com/cloudevents/sdk-go/sql/v2/utils
# github.com/cloudevents/sdk-go/v2 v2.10.1
## explicit; go 1.14
# github.com/cloudevents/sdk-go/v2 v2.12.0
## explicit; go 1.17
github.com/cloudevents/sdk-go/v2
github.com/cloudevents/sdk-go/v2/binding
github.com/cloudevents/sdk-go/v2/binding/format
@ -922,7 +922,7 @@ k8s.io/utils/net
k8s.io/utils/pointer
k8s.io/utils/strings/slices
k8s.io/utils/trace
# knative.dev/eventing v0.34.1-0.20220921104109-54174e3cb963
# knative.dev/eventing v0.34.1-0.20221005061829-af2298ff121a
## explicit; go 1.18
knative.dev/eventing/pkg/apis/config
knative.dev/eventing/pkg/apis/duck
@ -950,10 +950,10 @@ knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1
knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1/fake
knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1beta2
knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1beta2/fake
# knative.dev/hack v0.0.0-20220914183605-d1317b08c0c3
# knative.dev/hack v0.0.0-20221004153928-92a65f105c37
## explicit; go 1.18
knative.dev/hack
# knative.dev/networking v0.0.0-20220914020748-cefed20d561c
# knative.dev/networking v0.0.0-20221003195429-a5c26fe64325
## explicit; go 1.18
knative.dev/networking/pkg
knative.dev/networking/pkg/apis/networking
@ -968,7 +968,7 @@ knative.dev/networking/pkg/http/probe
knative.dev/networking/pkg/http/proxy
knative.dev/networking/pkg/http/stats
knative.dev/networking/pkg/k8s
# knative.dev/pkg v0.0.0-20220921024409-d1d5c849073b
# knative.dev/pkg v0.0.0-20221003153827-158538cc46ec
## explicit; go 1.18
knative.dev/pkg/apis
knative.dev/pkg/apis/duck
@ -1018,7 +1018,7 @@ knative.dev/pkg/tracing/config
knative.dev/pkg/tracing/propagation
knative.dev/pkg/tracing/propagation/tracecontextb3
knative.dev/pkg/tracker
# knative.dev/serving v0.34.1-0.20220921150110-2332731db1b9
# knative.dev/serving v0.34.1-0.20221005094629-080aaa5c6241
## explicit; go 1.18
knative.dev/serving/pkg/apis/autoscaling
knative.dev/serving/pkg/apis/autoscaling/v1alpha1