Conformance tests: always enable required operations (#2851)

Signed-off-by: ItalyPaleAle <43508+ItalyPaleAle@users.noreply.github.com>
This commit is contained in:
Alessandro (Ale) Segala 2023-05-22 12:43:20 -07:00 committed by GitHub
parent 99ce411ef8
commit abaf74150e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 204 additions and 268 deletions

View File

@ -1,4 +1,4 @@
# Supported operations: create, operations and read # Supported operations: create, operations, read
# Config map: # Config map:
## output: A map of strings that will be part of the request for the output binding ## output: A map of strings that will be part of the request for the output binding
## readBindingTimeout : timeout to wait to receive test event ## readBindingTimeout : timeout to wait to receive test event
@ -74,7 +74,6 @@ components:
- component: kubemq - component: kubemq
operations: [ "create", "operations", "read" ] operations: [ "create", "operations", "read" ]
- component: postgres - component: postgres
allOperations: false
operations: [ "exec", "query", "close", "operations" ] operations: [ "exec", "query", "close", "operations" ]
- component: aws.s3.docker - component: aws.s3.docker
operations: ["create", "operations", "get", "list"] operations: ["create", "operations", "get", "list"]

View File

@ -1,9 +1,9 @@
# Supported operation: get, subscribe, unsubscribe # Supported additional operation: (none)
componentType: configuration componentType: configuration
components: components:
- component: redis.v6 - component: redis.v6
allOperations: true operations: []
- component: redis.v7 - component: redis.v7
allOperations: true operations: []
- component: postgres - component: postgres
allOperations: true operations: []

View File

@ -4,7 +4,7 @@
componentType: crypto componentType: crypto
components: components:
- component: localstorage - component: localstorage
allOperations: true operations: ["public", "symmetric"]
config: config:
keys: keys:
- algorithms: ["EdDSA"] - algorithms: ["EdDSA"]
@ -29,7 +29,7 @@ components:
type: symmetric type: symmetric
name: symmetric-256.b64 name: symmetric-256.b64
- component: jwks - component: jwks
allOperations: true operations: ["public", "symmetric"]
config: config:
keys: keys:
- algorithms: ["EdDSA"] - algorithms: ["EdDSA"]
@ -55,7 +55,6 @@ components:
name: symmetric-256 name: symmetric-256
- component: azure.keyvault - component: azure.keyvault
# Althoguh Azure Key Vault supports symmetric keys, those are only available in "Managed HSMs", which are too impractical for our tests # Althoguh Azure Key Vault supports symmetric keys, those are only available in "Managed HSMs", which are too impractical for our tests
allOperations: false
operations: [] operations: []
config: config:
keys: keys:

View File

@ -1,18 +1,18 @@
# Supported operation: publish, subscribe, multiplehandlers, bulkpublish, bulksubscribe # Supported additional operation:
# bulkpublish should only be run for components that implement pubsub.BulkPublisher interface # - bulkpublish (should only be run for components that implement pubsub.BulkPublisher interface)
# bulksubscribe should only be run for components that implement pubsub.BulkSubscriber interface # - bulksubscribe (should only be run for components that implement pubsub.BulkSubscriber interface)
# Config map: # Config map:
## pubsubName : name of the pubsub # - pubsubName : name of the pubsub
## testTopicName: name of the test topic to use # - testTopicName: name of the test topic to use
## publishMetadata: A map of strings that will be part of the publish metadata in the Publish call # - publishMetadata: A map of strings that will be part of the publish metadata in the Publish call
## subscribeMetadata: A map of strings that will be part of the subscribe metadata in the Subscribe call # - subscribeMetadata: A map of strings that will be part of the subscribe metadata in the Subscribe call
## maxReadDuration: duration to wait for read to complete # - maxReadDuration: duration to wait for read to complete
## messageCount: no. of messages to publish # - messageCount: no. of messages to publish
## checkInOrderProcessing: false disables in-order message processing checking # - checkInOrderProcessing: false disables in-order message processing checking
componentType: pubsub componentType: pubsub
components: components:
- component: azure.eventhubs - component: azure.eventhubs
operations: ['publish', 'subscribe', 'multiplehandlers', 'bulkpublish'] operations: ['bulkpublish']
config: config:
pubsubName: azure-eventhubs pubsubName: azure-eventhubs
testTopicName: eventhubs-pubsub-topic testTopicName: eventhubs-pubsub-topic
@ -24,7 +24,7 @@ components:
publishMetadata: publishMetadata:
partitionKey: abcd partitionKey: abcd
- component: azure.servicebus.topics - component: azure.servicebus.topics
allOperations: true operations: ['bulkpublish', 'bulksubscribe']
config: config:
pubsubName: azure-servicebus pubsubName: azure-servicebus
testTopicName: dapr-conf-test testTopicName: dapr-conf-test
@ -33,7 +33,7 @@ components:
testMultiTopic2Name: dapr-conf-test-multi2 testMultiTopic2Name: dapr-conf-test-multi2
checkInOrderProcessing: false checkInOrderProcessing: false
- component: azure.servicebus.queues - component: azure.servicebus.queues
allOperations: true operations: ['bulkpublish', 'bulksubscribe']
config: config:
pubsubName: azure-servicebus pubsubName: azure-servicebus
testTopicName: dapr-conf-queue testTopicName: dapr-conf-queue
@ -42,43 +42,43 @@ components:
testMultiTopic2Name: dapr-conf-queue-multi2 testMultiTopic2Name: dapr-conf-queue-multi2
checkInOrderProcessing: false checkInOrderProcessing: false
- component: redis.v6 - component: redis.v6
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
config: config:
checkInOrderProcessing: false checkInOrderProcessing: false
- component: redis.v7 - component: redis.v7
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
config: config:
checkInOrderProcessing: false checkInOrderProcessing: false
- component: natsstreaming - component: natsstreaming
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
- component: jetstream - component: jetstream
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
- component: kafka - component: kafka
allOperations: true operations: ['bulkpublish', 'bulksubscribe']
- component: kafka - component: kafka
profile: wurstmeister profile: wurstmeister
allOperations: true operations: ['bulkpublish', 'bulksubscribe']
- component: kafka - component: kafka
profile: confluent profile: confluent
allOperations: true operations: ['bulkpublish', 'bulksubscribe']
- component: pulsar - component: pulsar
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
- component: solace.amqp - component: solace.amqp
operations: ['publish', 'subscribe'] operations: []
- component: mqtt3 - component: mqtt3
profile: emqx profile: emqx
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
- component: mqtt3 - component: mqtt3
profile: vernemq profile: vernemq
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
- component: rabbitmq - component: rabbitmq
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
config: config:
checkInOrderProcessing: false checkInOrderProcessing: false
- component: in-memory - component: in-memory
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
- component: aws.snssqs.terraform - component: aws.snssqs.terraform
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
config: config:
pubsubName: aws-snssqs pubsubName: aws-snssqs
testTopicName: ${{PUBSUB_AWS_SNSSQS_TOPIC}} testTopicName: ${{PUBSUB_AWS_SNSSQS_TOPIC}}
@ -86,14 +86,14 @@ components:
testMultiTopic2Name: ${{PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2}} testMultiTopic2Name: ${{PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2}}
checkInOrderProcessing: false checkInOrderProcessing: false
- component: aws.snssqs.docker - component: aws.snssqs.docker
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
config: config:
pubsubName: aws-snssqs pubsubName: aws-snssqs
checkInOrderProcessing: false checkInOrderProcessing: false
- component: kubemq - component: kubemq
operations: ['publish', 'subscribe', 'multiplehandlers'] operations: []
- component: gcp.pubsub.terraform - component: gcp.pubsub.terraform
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
config: config:
pubsubName: gcp-pubsub pubsubName: gcp-pubsub
testTopicName: ${{PUBSUB_GCP_TOPIC}} testTopicName: ${{PUBSUB_GCP_TOPIC}}
@ -102,7 +102,7 @@ components:
testMultiTopic2Name: ${{PUBSUB_GCP_TOPIC_MULTI_2}} testMultiTopic2Name: ${{PUBSUB_GCP_TOPIC_MULTI_2}}
checkInOrderProcessing: false checkInOrderProcessing: false
- component: gcp.pubsub.docker - component: gcp.pubsub.docker
operations: ["publish", "subscribe", "multiplehandlers"] operations: []
config: config:
pubsubName: gcp-pubsub pubsubName: gcp-pubsub
checkInOrderProcessing: false checkInOrderProcessing: false

View File

@ -1,16 +1,16 @@
# Supported operations: get, bulkget # Supported additional operations: (none)
componentType: secretstores componentType: secretstores
components: components:
- component: local.env - component: local.env
allOperations: true operations: []
- component: local.file - component: local.file
allOperations: true operations: []
- component: azure.keyvault.certificate - component: azure.keyvault.certificate
allOperations: true operations: []
- component: azure.keyvault.serviceprincipal - component: azure.keyvault.serviceprincipal
allOperations: true operations: []
- component: kubernetes - component: kubernetes
allOperations: true operations: []
- component: hashicorp.vault - component: hashicorp.vault
allOperations: true operations: []

View File

@ -4,103 +4,78 @@
componentType: state componentType: state
components: components:
- component: redis.v6 - component: redis.v6
allOperations: false
operations: [ "transaction", "etag", "first-write", "query", "ttl" ] operations: [ "transaction", "etag", "first-write", "query", "ttl" ]
config: config:
# This component requires etags to be numeric # This component requires etags to be numeric
badEtag: "9999999" badEtag: "9999999"
- component: redis.v7 - component: redis.v7
allOperations: false
# "query" is not included because redisjson hasn't been updated to Redis v7 yet # "query" is not included because redisjson hasn't been updated to Redis v7 yet
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
config: config:
# This component requires etags to be numeric # This component requires etags to be numeric
badEtag: "9999999" badEtag: "9999999"
- component: mongodb - component: mongodb
allOperations: false
operations: [ "transaction", "etag", "first-write", "query", "ttl" ] operations: [ "transaction", "etag", "first-write", "query", "ttl" ]
- component: memcached - component: memcached
allOperations: false
operations: [ "ttl" ] operations: [ "ttl" ]
- component: azure.cosmosdb - component: azure.cosmosdb
allOperations: false
operations: [ "transaction", "etag", "first-write", "query", "ttl" ] operations: [ "transaction", "etag", "first-write", "query", "ttl" ]
- component: azure.blobstorage - component: azure.blobstorage
allOperations: false
operations: [ "etag", "first-write" ] operations: [ "etag", "first-write" ]
- component: azure.sql - component: azure.sql
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
config: config:
# This component requires etags to be hex-encoded numbers # This component requires etags to be hex-encoded numbers
badEtag: "FFFF" badEtag: "FFFF"
- component: sqlserver - component: sqlserver
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
config: config:
# This component requires etags to be hex-encoded numbers # This component requires etags to be hex-encoded numbers
badEtag: "FFFF" badEtag: "FFFF"
- component: postgresql - component: postgresql
allOperations: false
operations: [ "transaction", "etag", "first-write", "query", "ttl" ] operations: [ "transaction", "etag", "first-write", "query", "ttl" ]
config: config:
# This component requires etags to be numeric # This component requires etags to be numeric
badEtag: "1" badEtag: "1"
- component: sqlite - component: sqlite
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
- component: mysql.mysql - component: mysql.mysql
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
- component: mysql.mariadb - component: mysql.mariadb
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
- component: azure.tablestorage.storage - component: azure.tablestorage.storage
allOperations: false
operations: [ "etag", "first-write"] operations: [ "etag", "first-write"]
config: config:
# This component requires etags to be in this format # This component requires etags to be in this format
badEtag: "W/\"datetime'2023-05-09T12%3A28%3A54.1442151Z'\"" badEtag: "W/\"datetime'2023-05-09T12%3A28%3A54.1442151Z'\""
- component: azure.tablestorage.cosmosdb - component: azure.tablestorage.cosmosdb
allOperations: false
operations: [ "etag", "first-write"] operations: [ "etag", "first-write"]
config: config:
# This component requires etags to be in this format # This component requires etags to be in this format
badEtag: "W/\"datetime'2023-05-09T12%3A28%3A54.1442151Z'\"" badEtag: "W/\"datetime'2023-05-09T12%3A28%3A54.1442151Z'\""
- component: oracledatabase - component: oracledatabase
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
- component: cassandra - component: cassandra
allOperations: false
operations: [ "ttl" ] operations: [ "ttl" ]
- component: cloudflare.workerskv - component: cloudflare.workerskv
allOperations: false
# Although this component supports TTLs, the minimum TTL is 60s, which makes it not suitable for our conformance tests # Although this component supports TTLs, the minimum TTL is 60s, which makes it not suitable for our conformance tests
operations: [] operations: []
- component: cockroachdb - component: cockroachdb
allOperations: false
operations: [ "transaction", "etag", "first-write", "query", "ttl" ] operations: [ "transaction", "etag", "first-write", "query", "ttl" ]
config: config:
# This component requires etags to be numeric # This component requires etags to be numeric
badEtag: "9999999" badEtag: "9999999"
- component: rethinkdb - component: rethinkdb
allOperations: false
operations: [] operations: []
- component: in-memory - component: in-memory
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
- component: aws.dynamodb.docker - component: aws.dynamodb.docker
allOperations: false
operations: [ "transaction", "etag", "first-write" ] operations: [ "transaction", "etag", "first-write" ]
- component: aws.dynamodb.terraform - component: aws.dynamodb.terraform
allOperations: false
operations: [ "transaction", "etag", "first-write" ] operations: [ "transaction", "etag", "first-write" ]
- component: etcd - component: etcd
allOperations: false
operations: [ "transaction", "etag", "first-write", "ttl" ] operations: [ "transaction", "etag", "first-write", "ttl" ]
- component: gcp.firestore.docker - component: gcp.firestore.docker
allOperations: false
operations: [] operations: []
- component: gcp.firestore.cloud - component: gcp.firestore.cloud
allOperations: false
operations: [] operations: []

View File

@ -1,6 +1,5 @@
# Supported operations: start, get, terminate # Supported additional operations: (none)
componentType: workflows componentType: workflows
components: components:
- component: temporal - component: temporal
allOperations: false operations: []
operations: [ "start", "get", "terminate"]

View File

@ -5,12 +5,14 @@
1. `tests/` directory contains the configuration and the test definition for conformance tests. 1. `tests/` directory contains the configuration and the test definition for conformance tests.
2. All the conformance tests are within the `tests/conformance` directory. 2. All the conformance tests are within the `tests/conformance` directory.
3. All the configurations are in the `tests/config` directory. 3. All the configurations are in the `tests/config` directory.
4. Each of the component specific `component` definition are in their specific `component type` folder in the `tests/config` folder. E.g. `redis` statestore component definition within `state` directory. The component types are `bindings`, `state`, `secretstores`, `pubsub`. Cloud specific components will be within their own `cloud` directory within the `component type` folder, e.g. `pubsub/azure/servicebus`. 4. Each of the component specific `component` definition are in their specific `component type` folder in the `tests/config` folder. For example, the `redis` statestore component definition within `state` directory.
- The component types are: `bindings`, `configuration`, `crypto`, `pubsub`, `state`, `secretstores`, `workflows`.
- Cloud specific components will be within their own `cloud` directory within the `component type` folder, e.g. `pubsub/azure/servicebus`.
5. Similar to the component definitions, each component type has its own set of the conformance tests definitions. 5. Similar to the component definitions, each component type has its own set of the conformance tests definitions.
6. Each `component type` contains a `tests.yml` definition that defines the component to be tested along with component specific test configuration. Nested folder names have their `/` in path replaced by `.` in the component name in `tests.yml`, e.g. `azure/servicebus` should be `azure.servicebus` 6. Each `component type` contains a `tests.yml` definition that defines the component to be tested along with component specific test configuration. Nested folder names have their `/` in path replaced by `.` in the component name in `tests.yml`, e.g. `azure/servicebus/topics` should be `azure.servicebus.topics`
7. All the tests configurations are defined in `common.go` file. 7. All the tests configurations are defined in `common.go` file.
8. Each `component type` has its own `_test` file to trigger the conformance tests. E.g. `bindings_test.go`. 8. Each `component type` has its own `_test` file to trigger the conformance tests. E.g. `bindings_test.go`.
9. Each test added will also need to be added to the `conformance.yml` workflow file. 9. Each test added will also need to be added to the `component type/tests.yml` workflow file.
## Conformance test workflow ## Conformance test workflow
@ -48,10 +50,12 @@
```yaml ```yaml
componentType: binding componentType: binding
components: components:
## All other components # For each component
- component: <COMPONENT> - component: <COMPONENT>
allOperations: <true/false> # If the component supports additional (optional) operations
operations: <List of operations if needed> operations: [ '<operation1>', '<operation2'> ]
# If the component does NOT support additional operations
operations: []
``` ```
5. Any UUID generation for keys can be specified using `$((uuid))`. E.g. see [/tests/config/bindings/tests.yml](../config/bindings/tests.yml) 5. Any UUID generation for keys can be specified using `$((uuid))`. E.g. see [/tests/config/bindings/tests.yml](../config/bindings/tests.yml)

View File

@ -59,13 +59,12 @@ type TestConfig struct {
ReadBindingWait time.Duration `mapstructure:"readBindingWait"` ReadBindingWait time.Duration `mapstructure:"readBindingWait"`
} }
func NewTestConfig(name string, allOperations bool, operations []string, configMap map[string]interface{}) (TestConfig, error) { func NewTestConfig(name string, operations []string, configMap map[string]interface{}) (TestConfig, error) {
waitForSetup = false waitForSetup = false
testConfig := TestConfig{ testConfig := TestConfig{
CommonConfig: utils.CommonConfig{ CommonConfig: utils.CommonConfig{
ComponentType: "bindings", ComponentType: "bindings",
ComponentName: name, ComponentName: name,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
InputMetadata: make(map[string]string), InputMetadata: make(map[string]string),

View File

@ -139,11 +139,10 @@ type TestConfiguration struct {
} }
type TestComponent struct { type TestComponent struct {
Component string `yaml:"component,omitempty"` Component string `yaml:"component,omitempty"`
Profile string `yaml:"profile,omitempty"` Profile string `yaml:"profile,omitempty"`
AllOperations bool `yaml:"allOperations,omitempty"` Operations []string `yaml:"operations,omitempty"`
Operations []string `yaml:"operations,omitempty"` Config map[string]interface{} `yaml:"config,omitempty"`
Config map[string]interface{} `yaml:"config,omitempty"`
} }
// NewTestConfiguration reads the tests.yml and loads the TestConfiguration. // NewTestConfiguration reads the tests.yml and loads the TestConfiguration.
@ -368,7 +367,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
store := loadStateStore(comp) store := loadStateStore(comp)
require.NotNilf(t, store, "error running conformance test for component %s", comp.Component) require.NotNilf(t, store, "error running conformance test for component %s", comp.Component)
storeConfig, err := conf_state.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations, comp.Config) storeConfig, err := conf_state.NewTestConfig(comp.Component, comp.Operations, comp.Config)
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
conf_state.ConformanceTests(t, props, store, storeConfig) conf_state.ConformanceTests(t, props, store, storeConfig)
case "secretstores": case "secretstores":
@ -377,7 +376,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
store := loadSecretStore(comp) store := loadSecretStore(comp)
require.NotNilf(t, store, "error running conformance test for component %s", comp.Component) require.NotNilf(t, store, "error running conformance test for component %s", comp.Component)
storeConfig := conf_secret.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations) storeConfig := conf_secret.NewTestConfig(comp.Component, comp.Operations)
conf_secret.ConformanceTests(t, props, store, storeConfig) conf_secret.ConformanceTests(t, props, store, storeConfig)
case "pubsub": case "pubsub":
filepath := fmt.Sprintf("../config/pubsub/%s", componentConfigPath) filepath := fmt.Sprintf("../config/pubsub/%s", componentConfigPath)
@ -385,7 +384,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
pubsub := loadPubSub(comp) pubsub := loadPubSub(comp)
require.NotNil(t, pubsub, "error running conformance test for component %s", comp.Component) require.NotNil(t, pubsub, "error running conformance test for component %s", comp.Component)
pubsubConfig, err := conf_pubsub.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations, comp.Config) pubsubConfig, err := conf_pubsub.NewTestConfig(comp.Component, comp.Operations, comp.Config)
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
conf_pubsub.ConformanceTests(t, props, pubsub, pubsubConfig) conf_pubsub.ConformanceTests(t, props, pubsub, pubsubConfig)
case "bindings": case "bindings":
@ -395,7 +394,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
inputBinding := loadInputBindings(comp) inputBinding := loadInputBindings(comp)
outputBinding := loadOutputBindings(comp) outputBinding := loadOutputBindings(comp)
require.True(t, inputBinding != nil || outputBinding != nil) require.True(t, inputBinding != nil || outputBinding != nil)
bindingsConfig, err := conf_bindings.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations, comp.Config) bindingsConfig, err := conf_bindings.NewTestConfig(comp.Component, comp.Operations, comp.Config)
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
conf_bindings.ConformanceTests(t, props, inputBinding, outputBinding, bindingsConfig) conf_bindings.ConformanceTests(t, props, inputBinding, outputBinding, bindingsConfig)
case "workflows": case "workflows":
@ -403,7 +402,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
props, err := tc.loadComponentsAndProperties(t, filepath) props, err := tc.loadComponentsAndProperties(t, filepath)
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
wf := loadWorkflow(comp) wf := loadWorkflow(comp)
wfConfig := conf_workflows.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations, comp.Config) wfConfig := conf_workflows.NewTestConfig(comp.Component, comp.Operations, comp.Config)
conf_workflows.ConformanceTests(t, props, wf, wfConfig) conf_workflows.ConformanceTests(t, props, wf, wfConfig)
case "crypto": case "crypto":
filepath := fmt.Sprintf("../config/crypto/%s", componentConfigPath) filepath := fmt.Sprintf("../config/crypto/%s", componentConfigPath)
@ -411,7 +410,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
component := loadCryptoProvider(comp) component := loadCryptoProvider(comp)
require.NotNil(t, component, "error running conformance test for component %s", comp.Component) require.NotNil(t, component, "error running conformance test for component %s", comp.Component)
cryptoConfig, err := conf_crypto.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations, comp.Config) cryptoConfig, err := conf_crypto.NewTestConfig(comp.Component, comp.Operations, comp.Config)
require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component) require.NoErrorf(t, err, "error running conformance test for component %s", comp.Component)
conf_crypto.ConformanceTests(t, props, component, cryptoConfig) conf_crypto.ConformanceTests(t, props, component, cryptoConfig)
case "configuration": case "configuration":
@ -421,7 +420,7 @@ func (tc *TestConfiguration) Run(t *testing.T) {
store, updater := loadConfigurationStore(comp) store, updater := loadConfigurationStore(comp)
require.NotNil(t, store, "error running conformance test for component %s", comp.Component) require.NotNil(t, store, "error running conformance test for component %s", comp.Component)
require.NotNil(t, updater, "error running conformance test for component %s", comp.Component) require.NotNil(t, updater, "error running conformance test for component %s", comp.Component)
configurationConfig := conf_configuration.NewTestConfig(comp.Component, comp.AllOperations, comp.Operations, comp.Config) configurationConfig := conf_configuration.NewTestConfig(comp.Component, comp.Operations, comp.Config)
conf_configuration.ConformanceTests(t, props, store, updater, configurationConfig, comp.Component) conf_configuration.ConformanceTests(t, props, store, updater, configurationConfig, comp.Component)
default: default:
t.Fatalf("unknown component type %s", tc.ComponentType) t.Fatalf("unknown component type %s", tc.ComponentType)

View File

@ -29,8 +29,7 @@ func TestDecodeYaml(t *testing.T) {
yam := `componentType: state yam := `componentType: state
components: components:
- component: redis - component: redis
allOperations: false operations: ["foo", "bar"]
operations: ["init", "set"]
config: config:
maxInitDurationInMs: 20 maxInitDurationInMs: 20
maxSetDurationInMs: 20 maxSetDurationInMs: 20
@ -41,9 +40,8 @@ components:
assert.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, config) assert.NotNil(t, config)
assert.Equal(t, 1, len(config.Components)) assert.Equal(t, 1, len(config.Components))
assert.False(t, config.Components[0].AllOperations)
assert.Equal(t, "state", config.ComponentType) assert.Equal(t, "state", config.ComponentType)
assert.Equal(t, 2, len(config.Components[0].Operations)) assert.Equal(t, []string{"foo", "bar"}, config.Components[0].Operations)
assert.Equal(t, 5, len(config.Components[0].Config)) assert.Equal(t, 5, len(config.Components[0].Config))
}) })

View File

@ -46,12 +46,11 @@ type TestConfig struct {
utils.CommonConfig utils.CommonConfig
} }
func NewTestConfig(componentName string, allOperations bool, operations []string, configMap map[string]interface{}) TestConfig { func NewTestConfig(componentName string, operations []string, configMap map[string]interface{}) TestConfig {
tc := TestConfig{ tc := TestConfig{
utils.CommonConfig{ utils.CommonConfig{
ComponentType: "configuration", ComponentType: "configuration",
ComponentName: componentName, ComponentName: componentName,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
} }
@ -177,7 +176,7 @@ func ConformanceTests(t *testing.T, props map[string]string, store configuration
require.NoError(t, err, "expected no error on adding keys") require.NoError(t, err, "expected no error on adding keys")
}) })
if config.HasOperation("get") { t.Run("get", func(t *testing.T) {
t.Run("get with non-empty key list", func(t *testing.T) { t.Run("get with non-empty key list", func(t *testing.T) {
keys := getKeys(initValues1) keys := getKeys(initValues1)
@ -218,9 +217,9 @@ func ConformanceTests(t *testing.T, props map[string]string, store configuration
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, expectedResponse, resp.Items) assert.Equal(t, expectedResponse, resp.Items)
}) })
} })
if config.HasOperation("subscribe") { t.Run("subscribe", func(t *testing.T) {
subscribeMetadata := make(map[string]string) subscribeMetadata := make(map[string]string)
if component == postgresComponent { if component == postgresComponent {
subscribeMetadata[pgNotifyChannelKey] = pgNotifyChannel subscribeMetadata[pgNotifyChannelKey] = pgNotifyChannel
@ -318,9 +317,9 @@ func ConformanceTests(t *testing.T, props map[string]string, store configuration
verifyMessagesReceived(t, processedC2, awaitingMessages2) verifyMessagesReceived(t, processedC2, awaitingMessages2)
verifyMessagesReceived(t, processedC3, awaitingMessages3) verifyMessagesReceived(t, processedC3, awaitingMessages3)
}) })
} })
if config.HasOperation("unsubscribe") { t.Run("unsubscribe", func(t *testing.T) {
t.Run("unsubscribe subscriber 1", func(t *testing.T) { t.Run("unsubscribe subscriber 1", func(t *testing.T) {
ID1 := subscribeIDs[0] ID1 := subscribeIDs[0]
err := store.Unsubscribe(context.Background(), err := store.Unsubscribe(context.Background(),
@ -382,7 +381,7 @@ func ConformanceTests(t *testing.T, props map[string]string, store configuration
verifyNoMessagesReceived(t, processedC3) verifyNoMessagesReceived(t, processedC3)
}) })
} })
} }
func verifyNoMessagesReceived(t *testing.T, processedChan chan *configuration.UpdateEvent) { func verifyNoMessagesReceived(t *testing.T, processedChan chan *configuration.UpdateEvent) {

View File

@ -71,12 +71,11 @@ type TestConfig struct {
Keys []testConfigKey `mapstructure:"keys"` Keys []testConfigKey `mapstructure:"keys"`
} }
func NewTestConfig(name string, allOperations bool, operations []string, configMap map[string]interface{}) (TestConfig, error) { func NewTestConfig(name string, operations []string, configMap map[string]interface{}) (TestConfig, error) {
testConfig := TestConfig{ testConfig := TestConfig{
CommonConfig: utils.CommonConfig{ CommonConfig: utils.CommonConfig{
ComponentType: "crypto", ComponentType: "crypto",
ComponentName: name, ComponentName: name,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
} }

View File

@ -69,13 +69,12 @@ type TestConfig struct {
TestProjectID string `mapstructure:"testProjectID"` TestProjectID string `mapstructure:"testProjectID"`
} }
func NewTestConfig(componentName string, allOperations bool, operations []string, configMap map[string]interface{}) (TestConfig, error) { func NewTestConfig(componentName string, operations []string, configMap map[string]interface{}) (TestConfig, error) {
// Populate defaults // Populate defaults
tc := TestConfig{ tc := TestConfig{
CommonConfig: utils.CommonConfig{ CommonConfig: utils.CommonConfig{
ComponentType: "pubsub", ComponentType: "pubsub",
ComponentName: componentName, ComponentName: componentName,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
PubsubName: defaultPubsubName, PubsubName: defaultPubsubName,
@ -143,79 +142,77 @@ func ConformanceTests(t *testing.T, props map[string]string, ps pubsub.PubSub, c
var muBulk sync.Mutex var muBulk sync.Mutex
// Subscribe // Subscribe
if config.HasOperation("subscribe") { //nolint:nestif t.Run("subscribe", func(t *testing.T) {
t.Run("subscribe", func(t *testing.T) { var counter int
var counter int var lastSequence int
var lastSequence int err := ps.Subscribe(ctx, pubsub.SubscribeRequest{
err := ps.Subscribe(ctx, pubsub.SubscribeRequest{ Topic: config.TestTopicName,
Topic: config.TestTopicName, Metadata: config.SubscribeMetadata,
Metadata: config.SubscribeMetadata, }, func(ctx context.Context, msg *pubsub.NewMessage) error {
}, func(ctx context.Context, msg *pubsub.NewMessage) error { dataString := string(msg.Data)
dataString := string(msg.Data) if !strings.HasPrefix(dataString, dataPrefix) {
if !strings.HasPrefix(dataString, dataPrefix) { t.Logf("Ignoring message without expected prefix")
t.Logf("Ignoring message without expected prefix")
return nil
}
sequence, err := strconv.Atoi(dataString[len(dataPrefix):])
if err != nil {
t.Logf("Message did not contain a sequence number")
assert.Fail(t, "message did not contain a sequence number")
return err
}
// Ignore already processed messages
// in case we receive a redelivery from the broker
// during retries.
mu.Lock()
_, alreadyProcessed := processedMessages[sequence]
mu.Unlock()
if alreadyProcessed {
t.Logf("Message was already processed: %d", sequence)
return nil
}
counter++
// Only consider order when we receive a message for the first time
// Messages that fail and are re-queued will naturally come out of order
if errorCount == 0 {
if sequence < lastSequence {
outOfOrder = true
t.Logf("Message received out of order: expected sequence >= %d, got %d", lastSequence, sequence)
}
lastSequence = sequence
}
// This behavior is standard to repro a failure of one message in a batch.
if errorCount < 2 || counter%5 == 0 {
// First message errors just to give time for more messages to pile up.
// Second error is to force an error in a batch.
errorCount++
// Sleep to allow messages to pile up and be delivered as a batch.
time.Sleep(1 * time.Second)
t.Logf("Simulating subscriber error")
return errors.New("conf test simulated error")
}
t.Logf("Simulating subscriber success")
actualReadCount++
mu.Lock()
processedMessages[sequence] = struct{}{}
mu.Unlock()
processedC <- dataString
return nil return nil
}) }
assert.NoError(t, err, "expected no error on subscribe")
sequence, err := strconv.Atoi(dataString[len(dataPrefix):])
if err != nil {
t.Logf("Message did not contain a sequence number")
assert.Fail(t, "message did not contain a sequence number")
return err
}
// Ignore already processed messages
// in case we receive a redelivery from the broker
// during retries.
mu.Lock()
_, alreadyProcessed := processedMessages[sequence]
mu.Unlock()
if alreadyProcessed {
t.Logf("Message was already processed: %d", sequence)
return nil
}
counter++
// Only consider order when we receive a message for the first time
// Messages that fail and are re-queued will naturally come out of order
if errorCount == 0 {
if sequence < lastSequence {
outOfOrder = true
t.Logf("Message received out of order: expected sequence >= %d, got %d", lastSequence, sequence)
}
lastSequence = sequence
}
// This behavior is standard to repro a failure of one message in a batch.
if errorCount < 2 || counter%5 == 0 {
// First message errors just to give time for more messages to pile up.
// Second error is to force an error in a batch.
errorCount++
// Sleep to allow messages to pile up and be delivered as a batch.
time.Sleep(1 * time.Second)
t.Logf("Simulating subscriber error")
return errors.New("conf test simulated error")
}
t.Logf("Simulating subscriber success")
actualReadCount++
mu.Lock()
processedMessages[sequence] = struct{}{}
mu.Unlock()
processedC <- dataString
return nil
}) })
} assert.NoError(t, err, "expected no error on subscribe")
})
// Bulk Subscribe // Bulk Subscribe
if config.HasOperation("bulksubscribe") { //nolint:nestif if config.HasOperation("bulksubscribe") { //nolint:nestif
@ -316,45 +313,44 @@ func ConformanceTests(t *testing.T, props map[string]string, ps pubsub.PubSub, c
} }
// Publish // Publish
if config.HasOperation("publish") { t.Run("publish", func(t *testing.T) {
// Some pubsub, like Kafka need to wait for Subscriber to be up before messages can be consumed. // Some pubsub, like Kafka need to wait for Subscriber to be up before messages can be consumed.
// So, wait for some time here. // So, wait for some time here.
time.Sleep(config.WaitDurationToPublish) time.Sleep(config.WaitDurationToPublish)
t.Run("publish", func(t *testing.T) {
for k := 1; k <= config.MessageCount; k++ { for k := 1; k <= config.MessageCount; k++ {
data := []byte(fmt.Sprintf("%s%d", dataPrefix, k))
err := ps.Publish(ctx, &pubsub.PublishRequest{
Data: data,
PubsubName: config.PubsubName,
Topic: config.TestTopicName,
Metadata: config.PublishMetadata,
})
if err == nil {
awaitingMessages[string(data)] = struct{}{}
}
assert.NoError(t, err, "expected no error on publishing data %s on topic %s", data, config.TestTopicName)
}
if config.HasOperation("bulksubscribe") {
_, ok := ps.(pubsub.BulkSubscriber)
if !ok {
t.Fatalf("cannot run bulkSubscribe conformance, BulkSubscriber interface not implemented by the component %s", config.ComponentName)
}
for k := bulkSubStartingKey; k <= (bulkSubStartingKey + config.MessageCount); k++ {
data := []byte(fmt.Sprintf("%s%d", dataPrefix, k)) data := []byte(fmt.Sprintf("%s%d", dataPrefix, k))
err := ps.Publish(ctx, &pubsub.PublishRequest{ err := ps.Publish(ctx, &pubsub.PublishRequest{
Data: data, Data: data,
PubsubName: config.PubsubName, PubsubName: config.PubsubName,
Topic: config.TestTopicName, Topic: config.TestTopicForBulkSub,
Metadata: config.PublishMetadata, Metadata: config.PublishMetadata,
}) })
if err == nil { if err == nil {
awaitingMessages[string(data)] = struct{}{} awaitingMessagesBulk[string(data)] = struct{}{}
} }
assert.NoError(t, err, "expected no error on publishing data %s on topic %s", data, config.TestTopicName) assert.NoError(t, err, "expected no error on publishing data %s on topic %s", data, config.TestTopicForBulkSub)
} }
if config.HasOperation("bulksubscribe") { }
_, ok := ps.(pubsub.BulkSubscriber) })
if !ok {
t.Fatalf("cannot run bulkSubscribe conformance, BulkSubscriber interface not implemented by the component %s", config.ComponentName)
}
for k := bulkSubStartingKey; k <= (bulkSubStartingKey + config.MessageCount); k++ {
data := []byte(fmt.Sprintf("%s%d", dataPrefix, k))
err := ps.Publish(ctx, &pubsub.PublishRequest{
Data: data,
PubsubName: config.PubsubName,
Topic: config.TestTopicForBulkSub,
Metadata: config.PublishMetadata,
})
if err == nil {
awaitingMessagesBulk[string(data)] = struct{}{}
}
assert.NoError(t, err, "expected no error on publishing data %s on topic %s", data, config.TestTopicForBulkSub)
}
}
})
}
// assumes that publish operation is run only once for publishing config.MessageCount number of events // assumes that publish operation is run only once for publishing config.MessageCount number of events
// bulkpublish needs to be run after publish operation // bulkpublish needs to be run after publish operation
@ -410,29 +406,27 @@ func ConformanceTests(t *testing.T, props map[string]string, ps pubsub.PubSub, c
} }
// Verify read // Verify read
if (config.HasOperation("publish") || config.HasOperation("bulkpublish")) && config.HasOperation("subscribe") { t.Run("verify read", func(t *testing.T) {
t.Run("verify read", func(t *testing.T) { t.Logf("waiting for %v to complete read", config.MaxReadDuration)
t.Logf("waiting for %v to complete read", config.MaxReadDuration) timeout := time.After(config.MaxReadDuration)
timeout := time.After(config.MaxReadDuration) waiting := true
waiting := true for waiting {
for waiting { select {
select { case processed := <-processedC:
case processed := <-processedC: t.Logf("deleting %s processed message", processed)
t.Logf("deleting %s processed message", processed) delete(awaitingMessages, processed)
delete(awaitingMessages, processed) waiting = len(awaitingMessages) > 0
waiting = len(awaitingMessages) > 0 case <-timeout:
case <-timeout: // Break out after the mamimum read duration has elapsed
// Break out after the mamimum read duration has elapsed waiting = false
waiting = false
}
} }
assert.False(t, config.CheckInOrderProcessing && outOfOrder, "received messages out of order") }
assert.Empty(t, awaitingMessages, "expected to read %v messages", config.MessageCount) assert.False(t, config.CheckInOrderProcessing && outOfOrder, "received messages out of order")
}) assert.Empty(t, awaitingMessages, "expected to read %v messages", config.MessageCount)
} })
// Verify read on bulk subscription // Verify read on bulk subscription
if config.HasOperation("publish") && config.HasOperation("bulksubscribe") { if config.HasOperation("bulksubscribe") {
t.Run("verify read on bulk subscription", func(t *testing.T) { t.Run("verify read on bulk subscription", func(t *testing.T) {
_, ok := ps.(pubsub.BulkSubscriber) _, ok := ps.(pubsub.BulkSubscriber)
if !ok { if !ok {
@ -457,7 +451,7 @@ func ConformanceTests(t *testing.T, props map[string]string, ps pubsub.PubSub, c
} }
// Multiple handlers // Multiple handlers
if config.HasOperation("multiplehandlers") { t.Run("multiple handlers", func(t *testing.T) {
received1Ch := make(chan string) received1Ch := make(chan string)
received2Ch := make(chan string) received2Ch := make(chan string)
subscribe1Ctx, subscribe1Cancel := context.WithCancel(context.Background()) subscribe1Ctx, subscribe1Cancel := context.WithCancel(context.Background())
@ -560,7 +554,7 @@ func ConformanceTests(t *testing.T, props map[string]string, ps pubsub.PubSub, c
<-wait <-wait
} }
}) })
} })
} }
func receiveInBackground(t *testing.T, timeout time.Duration, received1Ch <-chan string, received2Ch <-chan string, sent1Ch <-chan string, sent2Ch <-chan string, allSentCh <-chan bool) <-chan struct{} { func receiveInBackground(t *testing.T, timeout time.Duration, received1Ch <-chan string, received2Ch <-chan string, sent1Ch <-chan string, sent2Ch <-chan string, allSentCh <-chan bool) <-chan struct{} {

View File

@ -29,12 +29,11 @@ type TestConfig struct {
utils.CommonConfig utils.CommonConfig
} }
func NewTestConfig(name string, allOperations bool, operations []string) TestConfig { func NewTestConfig(name string, operations []string) TestConfig {
tc := TestConfig{ tc := TestConfig{
CommonConfig: utils.CommonConfig{ CommonConfig: utils.CommonConfig{
ComponentType: "secretstores", ComponentType: "secretstores",
ComponentName: name, ComponentName: name,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
} }
@ -69,7 +68,7 @@ func ConformanceTests(t *testing.T, props map[string]string, store secretstores.
}) })
// Get // Get
if config.HasOperation("get") { t.Run("get", func(t *testing.T) {
getSecretRequest := secretstores.GetSecretRequest{ getSecretRequest := secretstores.GetSecretRequest{
Name: "conftestsecret", Name: "conftestsecret",
} }
@ -86,10 +85,10 @@ func ConformanceTests(t *testing.T, props map[string]string, store secretstores.
assert.NotNil(t, resp.Data, "expected value to be returned") assert.NotNil(t, resp.Data, "expected value to be returned")
assert.Equal(t, getSecretResponse.Data, resp.Data, "expected values to be equal") assert.Equal(t, getSecretResponse.Data, resp.Data, "expected values to be equal")
}) })
} })
// Bulkget // Bulkget
if config.HasOperation("bulkget") { t.Run("bulkGet", func(t *testing.T) {
bulkReq := secretstores.BulkGetSecretRequest{} bulkReq := secretstores.BulkGetSecretRequest{}
expectedData := map[string]map[string]string{ expectedData := map[string]map[string]string{
"conftestsecret": { "conftestsecret": {
@ -117,5 +116,5 @@ func ConformanceTests(t *testing.T, props map[string]string, store secretstores.
assert.Equal(t, m, resp.Data[k], "expected values to be equal") assert.Equal(t, m, resp.Data[k], "expected values to be equal")
} }
}) })
} })
} }

View File

@ -65,12 +65,11 @@ type TestConfig struct {
BadEtag string `mapstructure:"badEtag"` BadEtag string `mapstructure:"badEtag"`
} }
func NewTestConfig(component string, allOperations bool, operations []string, configMap map[string]interface{}) (TestConfig, error) { func NewTestConfig(component string, operations []string, configMap map[string]interface{}) (TestConfig, error) {
testConfig := TestConfig{ testConfig := TestConfig{
CommonConfig: utils.CommonConfig{ CommonConfig: utils.CommonConfig{
ComponentType: "state", ComponentType: "state",
ComponentName: component, ComponentName: component,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
BadEtag: "bad-etag", BadEtag: "bad-etag",

View File

@ -30,7 +30,6 @@ import (
type CommonConfig struct { type CommonConfig struct {
ComponentType string ComponentType string
ComponentName string ComponentName string
AllOperations bool
Operations map[string]struct{} Operations map[string]struct{}
} }
@ -45,11 +44,7 @@ var (
) )
func (cc CommonConfig) HasOperation(operation string) bool { func (cc CommonConfig) HasOperation(operation string) bool {
if cc.AllOperations {
return true
}
_, exists := cc.Operations[operation] _, exists := cc.Operations[operation]
return exists return exists
} }
@ -58,7 +53,6 @@ func (cc CommonConfig) CopyMap(config map[string]string) map[string]string {
for k, v := range config { for k, v := range config {
m[k] = v m[k] = v
} }
return m return m
} }
@ -134,13 +128,3 @@ func NewStringSet(values ...string) map[string]struct{} {
return set return set
} }
func Contains[V comparable](arr []V, str V) bool {
for _, a := range arr {
if a == str {
return true
}
}
return false
}

View File

@ -21,14 +21,6 @@ import (
) )
func TestHasOperation(t *testing.T) { func TestHasOperation(t *testing.T) {
t.Run("all operations", func(t *testing.T) {
cc := CommonConfig{
ComponentType: "state",
ComponentName: "redis",
AllOperations: true,
}
assert.True(t, cc.HasOperation("op"))
})
t.Run("operations list", func(t *testing.T) { t.Run("operations list", func(t *testing.T) {
cc := CommonConfig{ cc := CommonConfig{
ComponentType: "state", ComponentType: "state",
@ -45,7 +37,6 @@ func TestCopyMap(t *testing.T) {
cc := CommonConfig{ cc := CommonConfig{
ComponentType: "state", ComponentType: "state",
ComponentName: "redis", ComponentName: "redis",
AllOperations: true,
} }
in := map[string]string{ in := map[string]string{
"k": "v", "k": "v",

View File

@ -35,12 +35,11 @@ type TestConfig struct {
utils.CommonConfig utils.CommonConfig
} }
func NewTestConfig(component string, allOperations bool, operations []string, conf map[string]interface{}) TestConfig { func NewTestConfig(component string, operations []string, conf map[string]interface{}) TestConfig {
tc := TestConfig{ tc := TestConfig{
CommonConfig: utils.CommonConfig{ CommonConfig: utils.CommonConfig{
ComponentType: "workflows", ComponentType: "workflows",
ComponentName: component, ComponentName: component,
AllOperations: allOperations,
Operations: utils.NewStringSet(operations...), Operations: utils.NewStringSet(operations...),
}, },
} }
@ -59,7 +58,7 @@ func ConformanceTests(t *testing.T, props map[string]string, workflowItem workfl
}) })
// Everything is within the same task since the workflow needs to persist between operations // Everything is within the same task since the workflow needs to persist between operations
if config.HasOperation("start") { t.Run("start", func(t *testing.T) {
testLogger.Info("Start test running...") testLogger.Info("Start test running...")
inputBytes, _ := json.Marshal(10) // Time that the activity within the workflow runs for inputBytes, _ := json.Marshal(10) // Time that the activity within the workflow runs for
@ -111,5 +110,5 @@ func ConformanceTests(t *testing.T, props map[string]string, workflowItem workfl
assert.Equal(t, "TestID", resp.Workflow.InstanceID) assert.Equal(t, "TestID", resp.Workflow.InstanceID)
}) })
testLogger.Info("Start test done.") testLogger.Info("Start test done.")
} })
} }