Signed-off-by: ItalyPaleAle <43508+ItalyPaleAle@users.noreply.github.com>
This commit is contained in:
ItalyPaleAle 2023-02-14 23:33:17 +00:00
parent 210c8c3c59
commit 5cb6f4d2d7
17 changed files with 787 additions and 457 deletions

6
.github/scripts/.prettierrc.json vendored Normal file
View File

@ -0,0 +1,6 @@
{
"trailingComma": "es5",
"tabWidth": 4,
"semi": false,
"singleQuote": true
}

View File

@ -0,0 +1,7 @@
#!/bin/sh
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/certification/pubsub/aws/dynamodb"
# Run Terraform
terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,15 @@
#!/bin/sh
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/certification/pubsub/aws/dynamodb"
# Run Terraform
terraform init
terraform validate -no-color
terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
# Set variables for GitHub Actions
echo "AWS_REGION=us-east-1" >> $GITHUB_ENV
echo "STATE_AWS_DYNAMODB_TABLE_1=certification-test-terraform-basic-$UNIQUE_ID" >> $GITHUB_ENV
echo "STATE_AWS_DYNAMODB_TABLE_2=certification-test-terraform-partition-key-$UNIQUE_ID" >> $GITHUB_ENV

View File

@ -0,0 +1,7 @@
#!/bin/sh
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/certification/pubsub/aws/snssqs"
# Run Terraform
terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,27 @@
#!/bin/sh
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/certification/pubsub/aws/snssqs"
# Run Terraform
terraform init
terraform validate -no-color
terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
# Set variables for GitHub Actions
echo "AWS_REGION=us-east-1" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_1=sqssnscerttest-q1-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_2=sqssnscerttest-q2-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_3=sqssnscerttest-q3-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_3=sqssnscerttest-t3-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_MVT=sqssnscerttest-q-mvt-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_MVT=sqssnscerttest-tp-mvt-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_DLIN=sqssnscerttest-dlq-in-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_DLOUT=sqssnscerttest-dlq-out-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_DLIN=sqssnscerttest-dlt-in-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_FIFO=sqssnscerttest-q-fifo-$UNIQUE_ID.fifo" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_FIFO=sqssnscerttest-t-fifo-$UNIQUE_ID.fifo" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID=sqssnscerttest-q-fifo-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_QUEUE_NODRT=sqssnscerttest-q-nodrt-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_NODRT=sqssnscerttest-t-nodrt-$UNIQUE_ID" >> $GITHUB_ENV

View File

@ -0,0 +1,7 @@
#!/bin/sh
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/conformance/pubsub/aws/snssqs"
# Run Terraform
terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,16 @@
#!/bin/sh
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/conformance/pubsub/aws/snssqs"
# Run Terraform
terraform init
terraform validate -no-color
terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
# Set variables for GitHub Actions
echo "PUBSUB_AWS_SNSSQS_QUEUE=testQueue-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC=testTopic-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1=multiTopic1-$UNIQUE_ID" >> $GITHUB_ENV
echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2=multiTopic2-$UNIQUE_ID" >> $GITHUB_ENV

View File

@ -0,0 +1,3 @@
#!/bin/sh
docker-compose -f .github/infrastructure/docker-compose-memcached.yml -p memcached up -d

View File

@ -0,0 +1,3 @@
#!/bin/sh
docker-compose -f .github/infrastructure/docker-compose-temporal.yml -p temporal up -d

View File

@ -1,49 +1,59 @@
// list of owner who can control dapr-bot workflow
// TODO: Read owners from OWNERS file.
const owners = [
"yaron2",
"berndverst",
"artursouza",
"mukundansundar",
"halspang",
"tanvigour",
"pkedy",
"amuluyavarote",
"daixiang0",
"ItalyPaleAle",
"jjcollinge",
"pravinpushkar",
"shivamkm07",
"shubham1172",
"skyao",
"msfussell",
"Taction",
"RyanLettieri",
"DeepanshuA",
"yash-nisar",
"addjuarez",
"tmacam",
];
'addjuarez',
'amuluyavarote',
'artursouza',
'berndverst',
'daixiang0',
'DeepanshuA',
'halspang',
'ItalyPaleAle',
'jjcollinge',
'msfussell',
'mukundansundar',
'pkedy',
'pravinpushkar',
'RyanLettieri',
'shivamkm07',
'shubham1172',
'skyao',
'Taction',
'tmacam',
'yaron2',
'yash-nisar',
]
const docsIssueBodyTpl = (issueNumber) => `This issue was automatically created by \
const docsIssueBodyTpl = (
issueNumber
) => `This issue was automatically created by \
[Dapr Bot](https://github.com/dapr/dapr/blob/master/.github/workflows/dapr-bot.yml) because a \"documentation required\" label \
was added to dapr/components-contrib#${issueNumber}. \n\n\
TODO: Add more details as per [this template](.github/ISSUE_TEMPLATE/new-content-needed.md).`;
TODO: Add more details as per [this template](.github/ISSUE_TEMPLATE/new-content-needed.md).`
const newComponentBodyTpl = (issueNumber) => `This issue was automatically created by \
const newComponentBodyTpl = (
issueNumber
) => `This issue was automatically created by \
[Dapr Bot](https://github.com/dapr/dapr/blob/master/.github/workflows/dapr-bot.yml) because a \"new component\" label \
was added to dapr/components-contrib#${issueNumber}. \n\n\
Please register the component in [cmd/daprd/components](https://github.com/dapr/dapr/tree/master/cmd/daprd/components), \
similar to the ones in the folder (one file per component).`;
similar to the ones in the folder (one file per component).`
module.exports = async ({ github, context }) => {
if (context.eventName == "issue_comment" && context.payload.action == "created") {
await handleIssueCommentCreate({ github, context });
} else if ((context.eventName == "issues" || context.eventName == "pull_request") && context.payload.action == "labeled") {
await handleIssueOrPrLabeled({ github, context });
if (
context.eventName == 'issue_comment' &&
context.payload.action == 'created'
) {
await handleIssueCommentCreate({ github, context })
} else if (
(context.eventName == 'issues' ||
context.eventName == 'pull_request') &&
context.payload.action == 'labeled'
) {
await handleIssueOrPrLabeled({ github, context })
} else {
console.log(`[main] event ${context.eventName} not supported, exiting.`);
console.log(`[main] event ${context.eventName} not supported, exiting.`)
}
}
@ -51,78 +61,86 @@ module.exports = async ({ github, context }) => {
* Handle issue comment create event.
*/
async function handleIssueCommentCreate({ github, context }) {
const payload = context.payload;
const issue = context.issue;
const username = (context.actor || "").toLowerCase();
const isFromPulls = !!payload.issue.pull_request;
const commentBody = payload.comment.body;
const payload = context.payload
const issue = context.issue
const username = (context.actor || '').toLowerCase()
const isFromPulls = !!payload.issue.pull_request
const commentBody = payload.comment.body
if (!commentBody) {
console.log("[handleIssueCommentCreate] comment body not found, exiting.");
return;
console.log(
'[handleIssueCommentCreate] comment body not found, exiting.'
)
return
}
const command = commentBody.split(" ")[0];
const command = commentBody.split(' ')[0]
// Commands that can be executed by anyone.
if (command === "/assign") {
await cmdAssign(github, issue, username, isFromPulls);
return;
if (command === '/assign') {
await cmdAssign(github, issue, username, isFromPulls)
return
}
// Commands that can only be executed by owners.
if (owners.map((v) => v.toLowerCase()).indexOf(username) < 0) {
console.log(`[handleIssueCommentCreate] user ${username} is not an owner, exiting.`);
return;
console.log(
`[handleIssueCommentCreate] user ${username} is not an owner, exiting.`
)
return
}
switch (command) {
case "/ok-to-test":
await cmdOkToTest(github, issue, isFromPulls);
break;
case '/ok-to-test':
await cmdOkToTest(github, issue, isFromPulls)
break
default:
console.log(`[handleIssueCommentCreate] command ${command} not found, exiting.`);
break;
console.log(
`[handleIssueCommentCreate] command ${command} not found, exiting.`
)
break
}
}
/**
* Handle issue or PR labeled event.
*/
async function handleIssueOrPrLabeled({ github, context }) {
const payload = context.payload;
const label = payload.label.name;
const issueNumber = payload.issue.number;
const payload = context.payload
const label = payload.label.name
const issueNumber = payload.issue.number
// This should not run in forks.
if (context.repo.owner !== "dapr") {
console.log("[handleIssueOrPrLabeled] not running in dapr repo, exiting.");
return;
if (context.repo.owner !== 'dapr') {
console.log(
'[handleIssueOrPrLabeled] not running in dapr repo, exiting.'
)
return
}
// Authorization is not required here because it's triggered by an issue label event.
// Only authorized users can add labels to issues.
if (label == "documentation required") {
if (label == 'documentation required') {
// Open a new docs issue
await github.rest.issues.create({
owner: "dapr",
repo: "docs",
owner: 'dapr',
repo: 'docs',
title: `New content needed for dapr/components-contrib#${issueNumber}`,
labels: ["content/missing-information", "created-by/dapr-bot"],
labels: ['content/missing-information', 'created-by/dapr-bot'],
body: docsIssueBodyTpl(issueNumber),
});
} else if (label == "new component") {
})
} else if (label == 'new component') {
// Open a new dapr issue
await github.rest.issues.create({
owner: "dapr",
repo: "dapr",
owner: 'dapr',
repo: 'dapr',
title: `Component registration for dapr/components-contrib#${issueNumber}`,
labels: ["area/components", "created-by/dapr-bot"],
labels: ['area/components', 'created-by/dapr-bot'],
body: newComponentBodyTpl(issueNumber),
});
})
} else {
console.log(`[handleIssueOrPrLabeled] label ${label} not supported, exiting.`);
console.log(
`[handleIssueOrPrLabeled] label ${label} not supported, exiting.`
)
}
}
@ -135,11 +153,15 @@ async function handleIssueOrPrLabeled({ github, context }) {
*/
async function cmdAssign(github, issue, username, isFromPulls) {
if (isFromPulls) {
console.log("[cmdAssign] pull requests unsupported, skipping command execution.");
return;
console.log(
'[cmdAssign] pull requests unsupported, skipping command execution.'
)
return
} else if (issue.assignees && issue.assignees.length !== 0) {
console.log("[cmdAssign] issue already has assignees, skipping command execution.");
return;
console.log(
'[cmdAssign] issue already has assignees, skipping command execution.'
)
return
}
await github.rest.issues.addAssignees({
@ -147,10 +169,9 @@ async function cmdAssign(github, issue, username, isFromPulls) {
repo: issue.repo,
issue_number: issue.number,
assignees: [username],
});
})
}
/**
* Trigger e2e test for the pull request.
* @param {*} github GitHub object reference
@ -159,50 +180,56 @@ async function cmdAssign(github, issue, username, isFromPulls) {
*/
async function cmdOkToTest(github, issue, isFromPulls) {
if (!isFromPulls) {
console.log("[cmdOkToTest] only pull requests supported, skipping command execution.");
return;
console.log(
'[cmdOkToTest] only pull requests supported, skipping command execution.'
)
return
}
// Get pull request
const pull = await github.rest.pulls.get({
owner: issue.owner,
repo: issue.repo,
pull_number: issue.number
});
pull_number: issue.number,
})
if (pull && pull.data) {
// Get commit id and repo from pull head
const testPayload = {
pull_head_ref: pull.data.head.sha,
pull_head_repo: pull.data.head.repo.full_name,
command: "ok-to-test",
command: 'ok-to-test',
issue: issue,
};
}
// Fire repository_dispatch event to trigger certification test
await github.rest.repos.createDispatchEvent({
owner: issue.owner,
repo: issue.repo,
event_type: "certification-test",
event_type: 'certification-test',
client_payload: testPayload,
});
})
// Fire repository_dispatch event to trigger conformance test
await github.rest.repos.createDispatchEvent({
owner: issue.owner,
repo: issue.repo,
event_type: "conformance-test",
event_type: 'conformance-test',
client_payload: testPayload,
});
})
// Fire repository_dispatch event to trigger unit tests for other architectures and OS
await github.rest.repos.createDispatchEvent({
owner: issue.owner,
repo: issue.repo,
event_type: "build-all",
event_type: 'build-all',
client_payload: testPayload,
});
})
console.log(`[cmdOkToTest] triggered certification and conformance tests for ${JSON.stringify(testPayload)}`);
console.log(
`[cmdOkToTest] triggered certification and conformance tests for ${JSON.stringify(
testPayload
)}`
)
}
}

504
.github/scripts/test-info.mjs vendored Normal file
View File

@ -0,0 +1,504 @@
import { argv, env, exit } from 'node:process'
import { writeFileSync } from 'node:fs'
/**
* List of all components
* @type {Record<string,ComponentTestProperties>}
*/
const components = {
'bindings.azure.blobstorage': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureBlobStorageAccount',
'AzureBlobStorageAccessKey',
'AzureBlobStorageContainer',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
],
},
'bindings.azure.cosmosdb': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureCosmosDB',
'AzureCosmosDBMasterKey',
'AzureCosmosDBUrl',
'AzureCosmosDB',
'AzureCosmosDBCollection',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
],
},
'bindings.azure.eventgrid': {
conformance: true,
requiredSecrets: [
'AzureEventGridNgrokToken',
'AzureEventGridAccessKey',
'AzureEventGridTopicEndpoint',
'AzureEventGridScope',
'AzureEventGridClientSecret',
'AzureEventGridClientId',
'AzureEventGridTenantId',
'AzureEventGridSubscriptionId',
],
},
'bindings.azure.eventhubs': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureEventHubsBindingsConnectionString',
'AzureBlobStorageAccount',
'AzureBlobStorageAccessKey',
'AzureEventHubsBindingsHub',
'AzureEventHubsBindingsNamespace',
'AzureEventHubsBindingsConsumerGroup',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientSecret',
'AzureResourceGroupName',
'AzureCertificationSubscriptionId',
'AzureEventHubsBindingsContainer',
'AzureIotHubEventHubConnectionString',
'AzureIotHubName',
'AzureIotHubBindingsConsumerGroup',
],
},
'bindings.azure.servicebusqueues': {
conformance: true,
certification: true,
requiredSecrets: ['AzureServiceBusConnectionString'],
},
'bindings.azure.storagequeues': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureBlobStorageAccessKey',
'AzureBlobStorageAccount',
'AzureBlobStorageQueue',
],
},
'bindings.cron': {
conformance: true,
certification: true,
},
'bindings.dubbo': {
certification: true,
},
'bindings.http': {
conformance: true,
},
'bindings.influx': {
conformance: true,
},
'bindings.kafka': {
certification: true,
},
'bindings.kafka-confluent': {
conformance: true,
},
'bindings.kafka-wurstmeister': {
conformance: true,
},
'bindings.kubemq': {
conformance: true,
},
'bindings.localstorage': {
certification: true,
},
'bindings.mqtt3-emqx': {
conformance: true,
},
'bindings.mqtt3-mosquitto': {
conformance: true,
},
'bindings.mqtt3-vernemq': {
conformance: true,
},
'bindings.postgres': {
conformance: true,
certification: true,
},
'bindings.rabbitmq': {
conformance: true,
certification: true,
},
'bindings.redis': {
certification: true,
},
'bindings.redis.v6': {
conformance: true,
},
'bindings.redis.v7': {
conformance: true,
},
'configuration.redis.v6': {
conformance: true,
},
'configuration.redis.v7': {
conformance: true,
},
'pubsub.aws.snssqs': {
certification: true,
requireAWSCredentials: true,
requireTerraform: true,
certificationSetup: 'certification-state.aws.snssqs-setup.sh',
certificationDestroy: 'certification-state.aws.snssqs-destroy.sh',
},
'pubsub.aws.snssqs.docker': {
conformance: true,
},
'pubsub.aws.snssqs.terraform': {
conformance: true,
requireAWSCredentials: true,
requireTerraform: true,
conformanceSetup: 'conformance-state.aws.snssqs-setup.sh',
conformanceDestroy: 'conformance-state.aws.snssqs-destroy.sh',
},
'pubsub.azure.eventhubs': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureEventHubsPubsubTopicActiveConnectionString',
'AzureEventHubsPubsubNamespace',
'AzureEventHubsPubsubConsumerGroup',
'AzureEventHubsPubsubNamespaceConnectionString',
'AzureBlobStorageAccount',
'AzureBlobStorageAccessKey',
'AzureEventHubsPubsubContainer',
'AzureIotHubName',
'AzureIotHubEventHubConnectionString',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
'AzureResourceGroupName',
'AzureCertificationSubscriptionId',
],
},
'pubsub.azure.servicebus.queues': {
conformance: true,
requiredSecrets: ['AzureServiceBusConnectionString'],
},
'pubsub.azure.servicebus.topics': {
conformance: true,
certification: true,
requiredSecrets: [
'zureServiceBusConnectionString',
'AzureServiceBusNamespace',
' AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
],
},
'pubsub.hazelcast': {
conformance: true,
},
'pubsub.in-memory': {
conformance: true,
},
'pubsub.kafka': {
certification: true,
},
'pubsub.kafka-confluent': {
conformance: true,
},
'pubsub.kafka-wurstmeister': {
conformance: true,
},
'pubsub.kubemq': {
conformance: true,
},
'pubsub.mqtt3': {
certification: true,
},
'pubsub.mqtt3-emqx': {
conformance: true,
},
'pubsub.mqtt3-vernemq': {
conformance: true,
},
'pubsub.natsstreaming': {
conformance: true,
},
'pubsub.pulsar': {
conformance: true,
certification: true,
},
'pubsub.rabbitmq': {
conformance: true,
certification: true,
},
'pubsub.redis.v6': {
conformance: true,
},
'pubsub.solace': {
conformance: true,
},
'secretstores.azure.keyvault': {
certification: true,
requiredSecrets: [
'AzureKeyVaultName',
'AzureKeyVaultSecretStoreTenantId',
'AzureKeyVaultSecretStoreClientId',
'AzureKeyVaultSecretStoreServicePrincipalClientId',
'AzureKeyVaultSecretStoreServicePrincipalClientSecret',
'AzureContainerRegistryName',
'AzureResourceGroupName',
],
requiredCerts: ['AzureKeyVaultSecretStoreCert'],
},
'secretstores.azure.keyvault.certificate': {
conformance: true,
requiredSecrets: [
'AzureKeyVaultName',
'AzureKeyVaultSecretStoreTenantId',
'AzureKeyVaultSecretStoreClientId',
],
requiredCerts: ['AzureKeyVaultSecretStoreCert'],
},
'secretstores.azure.keyvault.serviceprincipal': {
conformance: true,
requiredSecrets: [
'AzureKeyVaultName',
'AzureKeyVaultSecretStoreTenantId',
'AzureKeyVaultSecretStoreServicePrincipalClientId',
'AzureKeyVaultSecretStoreServicePrincipalClientSecret',
],
},
'secretstores.hashicorp.vault': {
conformance: true,
certification: true,
},
'secretstores.kubernetes': {
conformance: true,
},
'secretstores.local.env': {
conformance: true,
certification: true,
},
'secretstores.local.file': {
conformance: true,
certification: true,
},
'state.aws.dynamodb': {
certification: true,
requireAWSCredentials: true,
requireTerraform: true,
certificationSetup: 'certification-state.aws.dynamodb-setup.sh',
certificationDestroy: 'certification-state.aws.dynamodb-destroy.sh',
},
'state.aws.dynamodb.terraform': {
conformance: true,
requireAWSCredentials: true,
requireTerraform: true,
conformanceSetup: 'conformance-state.aws.dynamodb-setup.sh',
conformanceDestroy: 'conformance-state.aws.dynamodb-destroy.sh',
},
'state.azure.blobstorage': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureBlobStorageAccount',
'AzureBlobStorageAccessKey',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
'AzureBlobStorageContainer',
],
},
'state.azure.cosmosdb': {
conformance: true,
certification: true,
requiredSecrets: [
'AzureCosmosDBMasterKey',
'AzureCosmosDBUrl',
'AzureCosmosDB',
'AzureCosmosDBCollection',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
],
},
'state.azure.sql': {
conformance: true,
requiredSecrets: [
'AzureResourceGroupName',
'AzureSqlServerName',
'AzureSqlServerConnectionString',
],
},
'state.azure.tablestorage': {
certification: true,
requiredSecrets: [
'AzureBlobStorageAccount',
'AzureBlobStorageAccessKey',
'AzureCertificationTenantId',
'AzureCertificationServicePrincipalClientId',
'AzureCertificationServicePrincipalClientSecret',
],
},
'state.azure.tablestorage.cosmosdb': {
conformance: true,
requiredSecrets: [
'AzureCosmosDBTableAPI',
'AzureCosmosDBTableAPIMasterKey',
],
},
'state.azure.tablestorage.storage': {
conformance: true,
requiredSecrets: [
'AzureBlobStorageAccessKey',
'AzureBlobStorageAccount',
],
},
'state.cassandra': {
conformance: true,
certification: true,
},
'state.cloudflare.workerskv': {
conformance: true,
requireCloudflareCredentials: true,
},
'state.cockroachdb': {
conformance: true,
certification: true,
},
'state.in-memory': {
conformance: true,
},
'state.memcached': {
conformance: true,
certification: true,
conformanceSetup: 'conformance-state.memcached-setup.sh',
},
'state.mongodb': {
conformance: true,
certification: true,
},
'state.mysql': {
certification: true,
},
'state.mysql.mariadb': {
conformance: true,
},
'state.mysql.mysql': {
conformance: true,
},
'state.postgresql': {
conformance: true,
certification: true,
},
'state.redis': {
certification: true,
},
'state.redis.v6': {
conformance: true,
},
'state.redis.v7': {
conformance: true,
},
'state.rethinkdb': {
conformance: true,
},
'state.sqlite': {
conformance: true,
certification: true,
},
'state.sqlserver': {
conformance: true,
certification: true,
requiredSecrets: ['AzureSqlServerConnectionString'],
},
'workflows.temporal': {
conformance: true,
conformanceSetup: 'conformance-workflows.temporal-setup.sh',
},
}
/**
* Type for the objects in the components dictionary
* @typedef {Object} ComponentTestProperties
* @property {boolean?} conformance If true, enables for conformance tests
* @property {boolean?} certification If true, enables for certification tests
* @property {string[]?} requiredSecrets Required secrets (if not empty, test becomes "cloud-only")
* @property {string[]?} requiredCerts Required certs (if not empty, test becomes "cloud-only")
* @property {boolean?} requireAWSCredentials If true, requires AWS credentials and makes the test "cloud-only"
* @property {boolean?} requireCloudflareCredentials If true, requires Cloudflare credentials and makes the test "cloud-only"
* @property {boolean?} requireTerraform If true, requires Terraform
* @property {string?} conformanceSetup Setup script for conformance tests
* @property {string?} conformanceDestroy Destroy script for conformance tests
* @property {string?} certificationSetup Setup script for certification tests
* @property {string?} certificationDestroy Destroy script for certification tests
*/
/**
* Test matrix object
* @typedef {Object} TestMatrixElement
* @property {string} component Component name
* @property {string?} required-secrets Required secrets
* @property {string?} required-certs Required certs
* @property {boolean?} require-aws-credentials Requires AWS credentials
* @property {boolean?} require-cloudflare-credentials Requires Cloudflare credentials
* @property {boolean?} require-terraform Requires Terraform
* @property {string?} setup-script Setup script
* @property {string?} destroy-script Destroy script
*/
/**
* Returns the list of components for the matrix.
* @param {'conformance'|'certification'} testKind Kind of test
* @param {boolean} enableCloudTests If true, returns components that require secrets or credentials too (which can't be used as part of the regular CI in a PR)
* @returns {TestMatrixElement[]} Test matrix object
*/
function GenerateMatrix(testKind, enableCloudTests) {
/** @type {TestMatrixElement[]} */
const res = []
for (const name in components) {
const comp = components[name]
if (!comp[testKind]) {
continue
}
// Skip cloud-only tests if enableCloudTests is false
if (!enableCloudTests) {
if (comp.requiredSecrets?.length || comp.requiredCerts?.length || comp.requireAWSCredentials || comp.requireCloudflareCredentials) {
continue
}
}
// Add the component to the array
res.push({
component: name,
"required-secrets": comp.requiredSecrets?.length ? comp.requiredSecrets : undefined,
"required-certs": comp.requiredCerts?.length ? comp.requiredCerts : undefined,
"require-aws-credentials": comp.requireAWSCredentials ? 'true' : undefined,
"require-cloudflare-credentials": comp.requireCloudflareCredentials ? 'true' : undefined,
"require-terraform": comp.requireTerraform ? 'true' : undefined,
"setup-script": comp[testKind+'Setup'] || undefined,
"destroy-script": comp[testKind+'Destroy'] || undefined,
})
}
return res
}
// Upon invocation, writes the matrix to the $GITHUB_OUTPUT file
if (!env.GITHUB_OUTPUT) {
console.error('Missing environmental variable GITHUB_OUTPUT')
exit(1)
}
if (argv.length < 3 || !['conformance', 'certification'].includes(argv[2])) {
console.error("First parameter must be 'conformance' or 'certification'")
exit(1)
}
if (argv.length < 4 || !['true', 'false'].includes(argv[3])) {
console.error("First parameter must be 'true' or 'false'")
exit(1)
}
const matrixObj = GenerateMatrix(argv[2], argv[3] == 'true')
console.log('Generated matrix:\n\n'+JSON.stringify(matrixObj, null, ' '))
writeFileSync(env.GITHUB_OUTPUT, JSON.stringify(matrixObj))

View File

@ -31,7 +31,7 @@ jobs:
# subset of the certification tests. This allows all the tests not requiring
# secrets to be executed on pull requests.
generate-matrix:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
steps:
- name: Parse repository_dispatch payload
if: github.event_name == 'repository_dispatch'
@ -42,98 +42,16 @@ jobs:
echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV
fi
- name: Install yq
- name: Generate the test matrix
id: generate-matrix
run: |
sudo snap install yq
- name: Specify components that can be run on every PR
id: pr-components
run: |
PR_COMPONENTS=$(yq -I0 --tojson eval - << EOF
- pubsub.kafka
- pubsub.rabbitmq
- pubsub.pulsar
- pubsub.mqtt3
- state.mongodb
- state.redis
- state.cockroachdb
- state.postgresql
- state.cassandra
- state.memcached
- state.mysql
- state.sqlite
- bindings.dubbo
- bindings.kafka
- bindings.redis
- bindings.cron
- secretstores.local.env
- secretstores.local.file
- secretstores.hashicorp.vault
- bindings.rabbitmq
- bindings.localstorage
- bindings.postgres
EOF
)
echo "pr-components=$PR_COMPONENTS" >> $GITHUB_OUTPUT
- name: Specify components requiring cloud resources to run
id: cloud-components
run: |
# Skip cloud-components on PRs, requires scheduled run trigger
# or approver to trigger via respository-dispatch on /ok-to-test
if [ "${{ github.event_name }}" = "pull_request" ]; then
echo "cloud-components=[]" >> $GITHUB_OUTPUT
exit
# Do not generate include tests on PRs
node .github/scripts/test-info.mjs certification false
else
# Include cloud tests
node .github/scripts/test-info.mjs certification true
fi
# Reuse the same cloud infrastructure as conformance.yml
#
# Unfortunately, Azure secrets can't have underscores in
# names, while environment variables with hyphens ('-') are
# troublesome.
#
# We work around here by leveraging the fact that
# environment variable names are case sensitive, so
# CamelCase would still work.
#
# That is slightly better than something like
# AZURECOSMOSDBMASTERKEY, which is extremely hard to read
# and errorprone.
#
# Only list the secrets you need for the component.
CRON_COMPONENTS=$(yq -I0 --tojson eval - << EOF
- component: secretstores.azure.keyvault
required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreClientId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret,AzureContainerRegistryName,AzureResourceGroupName
required-certs: AzureKeyVaultSecretStoreCert
- component: state.sqlserver
required-secrets: AzureSqlServerConnectionString
- component: bindings.azure.servicebusqueues
required-secrets: AzureServiceBusConnectionString
- component: bindings.azure.cosmosdb
required-secrets: AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection,AzureCosmosDBMasterKey,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret
- component: bindings.azure.eventhubs
required-secrets: AzureEventHubsBindingsConnectionString,AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureEventHubsBindingsHub,AzureEventHubsBindingsNamespace,AzureEventHubsBindingsConsumerGroup,AzureCertificationServicePrincipalClientId,AzureCertificationTenantId,AzureCertificationServicePrincipalClientSecret,AzureResourceGroupName,AzureCertificationSubscriptionId,AzureEventHubsBindingsContainer,AzureIotHubEventHubConnectionString,AzureIotHubName,AzureIotHubBindingsConsumerGroup
- component: pubsub.azure.eventhubs
required-secrets: AzureEventHubsPubsubTopicActiveConnectionString,AzureEventHubsPubsubNamespace,AzureEventHubsPubsubNamespaceConnectionString,AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureEventHubsPubsubContainer,AzureIotHubName,AzureIotHubEventHubConnectionString,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret,AzureResourceGroupName,AzureCertificationSubscriptionId
- component: pubsub.azure.servicebus.topics
required-secrets: AzureServiceBusConnectionString,AzureServiceBusNamespace, AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret
- component: bindings.azure.blobstorage
required-secrets: AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureBlobStorageContainer,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret
- component: bindings.azure.storagequeues
required-secrets: AzureBlobStorageAccount, AzureBlobStorageAccessKey
- component: state.azure.tablestorage
required-secrets: AzureBlobStorageAccount, AzureBlobStorageAccessKey, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret
- component: state.azure.blobstorage
required-secrets: AzureBlobStorageContainer,AzureBlobStorageAccount, AzureBlobStorageAccessKey, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret
- component: state.azure.cosmosdb
required-secrets: AzureCosmosDBMasterKey, AzureCosmosDBUrl, AzureCosmosDB, AzureCosmosDBCollection, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret
- component: pubsub.aws.snssqs
terraform-dir: pubsub/aws/snssqs
- component: state.aws.dynamodb
terraform-dir: state/aws/dynamodb
EOF
)
echo "cloud-components=$CRON_COMPONENTS" >> $GITHUB_OUTPUT
- name: Create PR comment
if: env.PR_NUMBER != ''
@ -150,24 +68,21 @@ jobs:
Commit ref: ${{ env.CHECKOUT_REF }}
outputs:
pr-components: ${{ steps.pr-components.outputs.pr-components }}
cloud-components: ${{ steps.cloud-components.outputs.cloud-components }}
test-matrix: ${{ steps.generate-matrix.outputs.test-matrix }}
certification:
name: ${{ matrix.component }} certification
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
defaults:
run:
shell: bash
needs: generate-matrix
env:
UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}}
UNIQUE_ID: "${{ github.run_id }}-${{ github.run_attempt }}"
strategy:
fail-fast: false # Keep running even if one component fails
matrix:
component: ${{ fromJson(needs.generate-matrix.outputs.pr-components) }}
include: ${{ fromJson(needs.generate-matrix.outputs.cloud-components) }}
matrix: ${{ fromJson(needs.generate-matrix.outputs.test-matrix) }}
steps:
- name: Set default payload repo and ref
@ -191,8 +106,7 @@ jobs:
- name: Setup test output
run: |
export TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report
echo "TEST_OUTPUT_FILE_PREFIX=$TEST_OUTPUT_FILE_PREFIX" >> $GITHUB_ENV
echo "TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report" >> $GITHUB_ENV
- name: Configure certification test and source path
run: |
@ -214,7 +128,6 @@ jobs:
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
# Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated
- name: Setup secrets
id: get-azure-secrets
if: matrix.required-secrets != ''
env:
VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }}
@ -245,105 +158,38 @@ jobs:
echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV
done
- name: Get current time
- name: Set current time
run: |
echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
if: matrix.terraform-dir != ''
if: matrix.require-terraform == 'true'
- name: Set AWS Region
if: contains(matrix.component, 'aws')
- name: Set Cloudflare env vars
if: matrix.require-cloudflare-credentials == 'true'
run: |
AWS_REGION="us-west-1"
echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV
echo "CLOUDFLARE_ACCOUNT_ID=${{ secrets.CLOUDFLARE_ACCOUNT_ID }}" >> $GITHUB_ENV
echo "CLOUDFLARE_API_TOKEN=${{ secrets.CLOUDFLARE_API_TOKEN }}" >> $GITHUB_ENV
- name: Set AWS env vars
if: matrix.require-aws-credentials == 'true'
run: |
echo "AWS_REGION=us-west-1" >> $GITHUB_ENV
echo "AWS_ACCESS_KEY=${{ secrets.AWS_ACCESS_KEY }}" >> $GITHUB_ENV
echo "AWS_SECRET_KEY=${{ secrets.AWS_SECRET_KEY }}" >> $GITHUB_ENV
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
if: matrix.require-aws-credentials == 'true'
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
aws-access-key-id: "${{ secrets.AWS_ACCESS_KEY }}"
aws-secret-access-key: "${{ secrets.AWS_SECRET_KEY }}"
aws-region: "${{ env.AWS_REGION }}"
if: matrix.terraform-dir != ''
- name: Terraform Init
id: init
run: terraform init
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
- name: Terraform Validate
id: validate
run: terraform validate -no-color
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
- name: Terraform Plan
id: plan
run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
- name: Terraform Apply
run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
continue-on-error: true
- name: Create aws.snssqs specific variables
if: contains(matrix.component, 'snssqs')
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
run: |
PUBSUB_AWS_SNSSQS_QUEUE_1="sqssnscerttest-q1-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_1=$PUBSUB_AWS_SNSSQS_QUEUE_1" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_2="sqssnscerttest-q2-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_2=$PUBSUB_AWS_SNSSQS_QUEUE_2" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_3="sqssnscerttest-q3-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_3=$PUBSUB_AWS_SNSSQS_QUEUE_3" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_3="sqssnscerttest-t3-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_TOPIC_3=$PUBSUB_AWS_SNSSQS_TOPIC_3" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_MVT="sqssnscerttest-q-mvt-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_MVT=$PUBSUB_AWS_SNSSQS_QUEUE_MVT" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_MVT="sqssnscerttest-tp-mvt-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_TOPIC_MVT=$PUBSUB_AWS_SNSSQS_TOPIC_MVT" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_DLIN="sqssnscerttest-dlq-in-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_DLIN=$PUBSUB_AWS_SNSSQS_QUEUE_DLIN" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_DLOUT="sqssnscerttest-dlq-out-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_DLOUT=$PUBSUB_AWS_SNSSQS_QUEUE_DLOUT" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_DLIN="sqssnscerttest-dlt-in-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_TOPIC_DLIN=$PUBSUB_AWS_SNSSQS_TOPIC_DLIN" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_FIFO="sqssnscerttest-q-fifo-${{env.UNIQUE_ID}}.fifo"
echo "PUBSUB_AWS_SNSSQS_QUEUE_FIFO=$PUBSUB_AWS_SNSSQS_QUEUE_FIFO" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_FIFO="sqssnscerttest-t-fifo-${{env.UNIQUE_ID}}.fifo"
echo "PUBSUB_AWS_SNSSQS_TOPIC_FIFO=$PUBSUB_AWS_SNSSQS_TOPIC_FIFO" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID="sqssnscerttest-q-fifo-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID=$PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_QUEUE_NODRT="sqssnscerttest-q-nodrt-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_QUEUE_NODRT=$PUBSUB_AWS_SNSSQS_QUEUE_NODRT" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_NODRT="sqssnscerttest-t-nodrt-${{env.UNIQUE_ID}}"
echo "PUBSUB_AWS_SNSSQS_TOPIC_NODRT=$PUBSUB_AWS_SNSSQS_TOPIC_NODRT" >> $GITHUB_ENV
AWS_REGION="us-east-1"
echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV
- name: Create state aws.dynamodb specific variables
if: contains(matrix.component, 'dynamodb')
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
run: |
STATE_AWS_DYNAMODB_TABLE_1="certification-test-terraform-basic-${{ env.UNIQUE_ID }}"
echo "STATE_AWS_DYNAMODB_TABLE_1=$STATE_AWS_DYNAMODB_TABLE_1" >> $GITHUB_ENV
STATE_AWS_DYNAMODB_TABLE_2="certification-test-terraform-partition-key-${{ env.UNIQUE_ID }}"
echo "STATE_AWS_DYNAMODB_TABLE_2=$STATE_AWS_DYNAMODB_TABLE_2" >> $GITHUB_ENV
AWS_REGION="us-east-1"
echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV
- name: Run setup script
if: matrix.setup-script != ''
run: ${{ matrix.setup-script }}
- name: Set up Go
uses: actions/setup-go@v3
@ -363,7 +209,12 @@ jobs:
go mod tidy -compat=1.19
git diff --exit-code ./go.mod
git diff --exit-code ./go.sum
- name: Catch setup failures
if: failure()
run: |
echo "CERTIFICATION_FAILURE=true" >> $GITHUB_ENV
- name: Run tests
continue-on-error: false
working-directory: ${{ env.TEST_PATH }}
@ -462,11 +313,7 @@ jobs:
name: ${{ matrix.component }}_certification_test
path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.*
- name: Terraform Destroy
continue-on-error: true
run: terraform destroy -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
# HERE: RUN TEARDOWN SCRIPTS
post_job:
name: Post-completion
@ -500,9 +347,7 @@ jobs:
uses: actions/github-script@v6
with:
script: |
const prComponents = ('${{ needs.generate-matrix.outputs.pr-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.pr-components }}')) || []
const cloudComponents = ('${{ needs.generate-matrix.outputs.cloud-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.cloud-components }}')) || []
const allComponents = [...prComponents, ...cloudComponents]
const allComponents = JSON.parse('${{ needs.generate-matrix.outputs.test-matrix }}')
const basePath = '${{ steps.testresults.outputs.download-path }}'
const testType = 'certification'
@ -530,7 +375,7 @@ jobs:
let found = false
let success = false
try {
let read =fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8')
let read = fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8')
read = read.split('\n')[0]
switch (read) {
case '1':

View File

@ -33,7 +33,7 @@ jobs:
# subset of the conformance tests. This allows all the tests not requiring
# secrets to be executed on pull requests.
generate-matrix:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
steps:
- name: Parse repository_dispatch payload
if: github.event_name == 'repository_dispatch'
@ -44,126 +44,16 @@ jobs:
echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV
fi
- name: Install yq
run: |
sudo snap install yq
- name: Specify components not requiring secrets nor certs
id: pr-components
run: |
PR_COMPONENTS=$(yq -I0 --tojson eval - << EOF
- bindings.cron
- bindings.http
- bindings.influx
- bindings.kafka-wurstmeister
- bindings.kafka-confluent
- bindings.mqtt3-emqx
- bindings.mqtt3-mosquitto
- bindings.mqtt3-vernemq
- bindings.postgres
- bindings.redis.v6
- bindings.redis.v7
- bindings.kubemq
- bindings.rabbitmq
- pubsub.aws.snssqs.docker
- configuration.redis.v6
- configuration.redis.v7
- pubsub.hazelcast
- pubsub.in-memory
- pubsub.mqtt3-emqx
- pubsub.mqtt3-vernemq
- pubsub.natsstreaming
- pubsub.pulsar
- pubsub.rabbitmq
- pubsub.redis.v6
- pubsub.kafka-wurstmeister
- pubsub.kafka-confluent
- pubsub.kubemq
- pubsub.solace
- secretstores.kubernetes
- secretstores.localenv
- secretstores.localfile
- secretstores.hashicorp.vault
- state.cassandra
- state.memcached
- state.mongodb
- state.mysql.mysql
- state.mysql.mariadb
- state.postgresql
- state.redis.v6
- state.redis.v7
- state.sqlite
- state.sqlserver
- state.in-memory
- state.cockroachdb
- workflows.temporal
- state.rethinkdb
EOF
)
echo "pr-components=$PR_COMPONENTS" >> $GITHUB_OUTPUT
- name: Specify components requiring secrets or certs
id: cron-components
- name: Generate the test matrix
id: generate-matrix
run: |
if [ "${{ github.event_name }}" = "pull_request" ]; then
echo "cron-components=[]" >> $GITHUB_OUTPUT
exit
# Do not generate include tests on PRs
node .github/scripts/test-info.mjs conformance false
else
# Include cloud tests
node .github/scripts/test-info.mjs conformance true
fi
# Unfortunately, Azure secrets can't have underscores in
# names, while environment variables with hyphens ('-') are
# troublesome.
#
# We work around here by leveraging the fact that
# environment variable names are case sensitive, so
# CamelCase would still work.
#
# That is slightly better than something like
# AZURECOSMOSDBMASTERKEY, which is extremely hard to read
# and errorprone.
#
# Only list the secrets you need for the component.
CRON_COMPONENTS=$(yq -I0 --tojson eval - << EOF
- component: state.azure.blobstorage
required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount
- component: state.azure.cosmosdb
required-secrets: AzureCosmosDBMasterKey,AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection
- component: state.azure.sql
required-secrets: AzureResourceGroupName, AzureSqlServerName, AzureSqlServerConnectionString
- component: state.azure.tablestorage.storage
required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount
- component: state.azure.tablestorage.cosmosdb
required-secrets: AzureCosmosDBTableAPI,AzureCosmosDBTableAPIMasterKey
- component: pubsub.azure.eventhubs
required-secrets: AzureEventHubsPubsubNamespaceConnectionString,AzureEventHubsPubsubConsumerGroup,AzureBlobStorageAccessKey,AzureBlobStorageAccount,AzureEventHubsPubsubContainer
- component: pubsub.azure.servicebus.topics
required-secrets: AzureServiceBusConnectionString
- component: pubsub.azure.servicebus.queues
required-secrets: AzureServiceBusConnectionString
- component: bindings.azure.blobstorage
required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount
- component: bindings.azure.eventgrid
required-secrets: AzureEventGridNgrokToken,AzureEventGridAccessKey,AzureEventGridTopicEndpoint,AzureEventGridScope,AzureEventGridClientSecret,AzureEventGridClientId,AzureEventGridTenantId,AzureEventGridSubscriptionId
- component: bindings.azure.eventhubs
required-secrets: AzureEventHubsBindingsConnectionString,AzureEventHubsBindingsConsumerGroup,AzureBlobStorageAccessKey,AzureBlobStorageAccount,AzureEventHubsBindingsContainer
- component: bindings.azure.servicebusqueues
required-secrets: AzureServiceBusConnectionString
- component: bindings.azure.storagequeues
required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount,AzureBlobStorageQueue
- component: secretstores.azure.keyvault.certificate
required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreClientId
required-certs: AzureKeyVaultSecretStoreCert
- component: secretstores.azure.keyvault.serviceprincipal
required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret
- component: bindings.azure.cosmosdb
required-secrets: AzureCosmosDBMasterKey,AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection
- component: pubsub.aws.snssqs.terraform
terraform-dir: pubsub/aws/snssqs
- component: state.aws.dynamodb.terraform
terraform-dir: state/aws/dynamodb
- component: state.cloudflare.workerskv
EOF
)
echo "cron-components=$CRON_COMPONENTS" >> $GITHUB_OUTPUT
- name: Create PR comment
if: env.PR_NUMBER != ''
@ -180,12 +70,11 @@ jobs:
Commit ref: ${{ env.CHECKOUT_REF }}
outputs:
pr-components: ${{ steps.pr-components.outputs.pr-components }}
cron-components: ${{ steps.cron-components.outputs.cron-components }}
test-matrix: ${{ steps.generate-matrix.outputs.test-matrix }}
conformance:
name: ${{ matrix.component }} conformance
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
env:
# Version of Node.js to use
# Currently used by the Cloudflare components
@ -198,9 +87,7 @@ jobs:
strategy:
fail-fast: false # Keep running even if one component fails
matrix:
component: ${{ fromJson(needs.generate-matrix.outputs.pr-components) }}
include: ${{ fromJson(needs.generate-matrix.outputs.cron-components) }}
matrix: ${{ fromJson(needs.generate-matrix.outputs.test-matrix) }}
steps:
- name: Set default payload repo and ref
@ -239,7 +126,6 @@ jobs:
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
# Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated
- name: Setup secrets
id: get-azure-secrets
if: matrix.required-secrets != ''
env:
VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }}
@ -289,53 +175,33 @@ jobs:
echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v2
if: matrix.terraform-dir != ''
uses: hashicorp/setup-terraform@v2
- name: Set Cloudflare env vars
if: matrix.require-cloudflare-credentials == 'true'
run: |
echo "CLOUDFLARE_ACCOUNT_ID=${{ secrets.CLOUDFLARE_ACCOUNT_ID }}" >> $GITHUB_ENV
echo "CLOUDFLARE_API_TOKEN=${{ secrets.CLOUDFLARE_API_TOKEN }}" >> $GITHUB_ENV
- name: Set AWS env vars
if: matrix.require-aws-credentials == 'true'
run: |
echo "AWS_ACCESS_KEY=${{ secrets.AWS_ACCESS_KEY }}" >> $GITHUB_ENV
echo "AWS_SECRET_KEY=${{ secrets.AWS_SECRET_KEY }}" >> $GITHUB_ENV
- name: Configure AWS Credentials
if: matrix.require-aws-credentials == 'true'
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
aws-region: us-west-1
if: matrix.terraform-dir != ''
- name: Terraform Init
id: init
run: terraform init
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
- name: Terraform Validate
id: validate
run: terraform validate -no-color
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
- name: Run setup script
if: matrix.setup-script != ''
run: ${{ matrix.setup-script }}
- name: Terraform Plan
id: plan
run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
- name: Terraform Apply
run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
if: matrix.terraform-dir != ''
continue-on-error: true
- name: Create aws.snssqs variables
run: |
PUBSUB_AWS_SNSSQS_QUEUE="testQueue-${{ env.UNIQUE_ID }}"
echo "PUBSUB_AWS_SNSSQS_QUEUE=$PUBSUB_AWS_SNSSQS_QUEUE" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC="testTopic-${{ env.UNIQUE_ID }}"
echo "PUBSUB_AWS_SNSSQS_TOPIC=$PUBSUB_AWS_SNSSQS_TOPIC" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1="multiTopic1-${{ env.UNIQUE_ID }}"
echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1=$PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1" >> $GITHUB_ENV
PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2="multiTopic2-${{ env.UNIQUE_ID }}"
echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2=$PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2" >> $GITHUB_ENV
if: contains(matrix.component, 'snssqs')
- name: Create aws.dynamodb variables
run: |
STATE_AWS_DYNAMODB_TABLE_1="conformance-test-terraform-basic-${{ env.UNIQUE_ID }}"
@ -352,10 +218,6 @@ jobs:
run: docker-compose -f ./.github/infrastructure/docker-compose-redis7.yml -p redis up -d
if: contains(matrix.component, 'redis.v7')
- name: Start Temporal
run: docker-compose -f ./.github/infrastructure/docker-compose-temporal.yml -p temporal up -d
if: contains(matrix.component, 'temporal')
- name: Start MongoDB
uses: supercharge/mongodb-github-action@1.3.0
with:
@ -375,10 +237,6 @@ jobs:
run: docker-compose -f ./.github/infrastructure/docker-compose-confluent.yml -p confluent up -d
if: contains(matrix.component, 'confluent')
- name: Start memcached
run: docker-compose -f ./.github/infrastructure/docker-compose-memcached.yml -p memcached up -d
if: contains(matrix.component, 'memcached')
- name: Start natsstreaming
run: docker-compose -f ./.github/infrastructure/docker-compose-natsstreaming.yml -p natsstreaming up -d
if: contains(matrix.component, 'natsstreaming')
@ -530,6 +388,11 @@ jobs:
echo "AzureSqlServerDbName=$AzureSqlServerDbName" >> $GITHUB_ENV
if: contains(matrix.component, 'azure.sql')
- name: Catch setup failures
if: failure()
run: |
echo "CONFORMANCE_FAILURE=true" >> $GITHUB_ENV
- name: Run tests
continue-on-error: true
env:

View File

@ -510,9 +510,9 @@ func loadSecretStore(tc TestComponent) secretstores.SecretStore {
store = ss_azure.NewAzureKeyvaultSecretStore(testLogger)
case "kubernetes":
store = ss_kubernetes.NewKubernetesSecretStore(testLogger)
case "localenv":
case "local.env":
store = ss_local_env.NewEnvSecretStore(testLogger)
case "localfile":
case "local.file":
store = ss_local_file.NewLocalSecretStore(testLogger)
case "hashicorp.vault":
store = ss_hashicorp_vault.NewHashiCorpVaultSecretStore(testLogger)