diff --git a/.build-tools/component-folders.json b/.build-tools/component-folders.json index 2b92fe4eb..897b031a2 100644 --- a/.build-tools/component-folders.json +++ b/.build-tools/component-folders.json @@ -40,4 +40,4 @@ "state/oci", "state/utils" ] -} \ No newline at end of file +} diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 5c0048ce4..cd17771f2 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,76 +1,76 @@ { - "name": "Dapr Components Contributor Environment", - "image": "ghcr.io/dapr/dapr-dev:latest", - "containerEnv": { - // Uncomment to overwrite devcontainer .kube/config and .minikube certs with the localhost versions - // each time the devcontainer starts, if the respective .kube-localhost/config and .minikube-localhost - // folders respectively are bind mounted to the devcontainer. - // "SYNC_LOCALHOST_KUBECONFIG": "true", + "name": "Dapr Components Contributor Environment", + "image": "ghcr.io/dapr/dapr-dev:latest", + "containerEnv": { + // Uncomment to overwrite devcontainer .kube/config and .minikube certs with the localhost versions + // each time the devcontainer starts, if the respective .kube-localhost/config and .minikube-localhost + // folders respectively are bind mounted to the devcontainer. + // "SYNC_LOCALHOST_KUBECONFIG": "true", - // Uncomment to disable docker-in-docker and automatically proxy default /var/run/docker.sock to - // the localhost bind-mount /var/run/docker-host.sock. - // "BIND_LOCALHOST_DOCKER": "true", + // Uncomment to disable docker-in-docker and automatically proxy default /var/run/docker.sock to + // the localhost bind-mount /var/run/docker-host.sock. + // "BIND_LOCALHOST_DOCKER": "true", - // Necessary for components-contrib's certification tests - "GOLANG_PROTOBUF_REGISTRATION_CONFLICT": "true" - }, - "extensions": [ - "davidanson.vscode-markdownlint", - "golang.go", - "ms-azuretools.vscode-dapr", - "ms-azuretools.vscode-docker", - "ms-kubernetes-tools.vscode-kubernetes-tools" - ], - "features": { - "ghcr.io/devcontainers/features/sshd:1": {}, - "ghcr.io/devcontainers/features/github-cli:1": {}, - "ghcr.io/devcontainers/features/azure-cli:1": {} - }, - "mounts": [ - // Mount docker-in-docker library volume - "type=volume,source=dind-var-lib-docker,target=/var/lib/docker", + // Necessary for components-contrib's certification tests + "GOLANG_PROTOBUF_REGISTRATION_CONFLICT": "true" + }, + "extensions": [ + "davidanson.vscode-markdownlint", + "golang.go", + "ms-azuretools.vscode-dapr", + "ms-azuretools.vscode-docker", + "ms-kubernetes-tools.vscode-kubernetes-tools" + ], + "features": { + "ghcr.io/devcontainers/features/sshd:1": {}, + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/azure-cli:1": {} + }, + "mounts": [ + // Mount docker-in-docker library volume + "type=volume,source=dind-var-lib-docker,target=/var/lib/docker", - // Bind mount docker socket under an alias to support docker-from-docker - "type=bind,source=/var/run/docker.sock,target=/var/run/docker-host.sock", + // Bind mount docker socket under an alias to support docker-from-docker + "type=bind,source=/var/run/docker.sock,target=/var/run/docker-host.sock" - // Uncomment to clone local .kube/config into devcontainer - // "type=bind,source=${env:HOME}${env:USERPROFILE}/.kube,target=/home/dapr/.kube-localhost", + // Uncomment to clone local .kube/config into devcontainer + // "type=bind,source=${env:HOME}${env:USERPROFILE}/.kube,target=/home/dapr/.kube-localhost", - // Uncomment to additionally clone minikube certs into devcontainer for use with .kube/config - // "type=bind,source=${env:HOME}${env:USERPROFILE}/.minikube,target=/home/dapr/.minikube-localhost" - ], - // Always run image-defined default command - "overrideCommand": false, - // On Linux, this will prevent new files getting created as root, but you - // may need to update the USER_UID and USER_GID in docker/Dockerfile-dev - // to match your user if not 1000. - "remoteUser": "dapr", - "runArgs": [ - // Enable ptrace-based debugging for go - "--cap-add=SYS_PTRACE", - "--security-opt", - "seccomp=unconfined", + // Uncomment to additionally clone minikube certs into devcontainer for use with .kube/config + // "type=bind,source=${env:HOME}${env:USERPROFILE}/.minikube,target=/home/dapr/.minikube-localhost" + ], + // Always run image-defined default command + "overrideCommand": false, + // On Linux, this will prevent new files getting created as root, but you + // may need to update the USER_UID and USER_GID in docker/Dockerfile-dev + // to match your user if not 1000. + "remoteUser": "dapr", + "runArgs": [ + // Enable ptrace-based debugging for go + "--cap-add=SYS_PTRACE", + "--security-opt", + "seccomp=unconfined", - // Uncomment to bind to host network for local devcontainer; this is necessary if using the - // bind-mounted /var/run/docker-host.sock directly. - // "--net=host", + // Uncomment to bind to host network for local devcontainer; this is necessary if using the + // bind-mounted /var/run/docker-host.sock directly. + // "--net=host", - // Enable docker-in-docker configuration. Comment out if not using for better security. - "--privileged", + // Enable docker-in-docker configuration. Comment out if not using for better security. + "--privileged", - // Run the entrypoint defined in container image. - "--init" - ], - "settings": { - "go.toolsManagement.checkForUpdates": "local", - "go.useLanguageServer": true, - "go.gopath": "/go", - "go.buildTags": "e2e,perf,conftests,unit,integration_test,certtests", - "git.alwaysSignOff": true, - "terminal.integrated.env.linux": { - "GOLANG_PROTOBUF_REGISTRATION_CONFLICT": "ignore" - } - }, - "workspaceFolder": "/workspaces/components-contrib", - "workspaceMount": "type=bind,source=${localWorkspaceFolder},target=/workspaces/components-contrib", + // Run the entrypoint defined in container image. + "--init" + ], + "settings": { + "go.toolsManagement.checkForUpdates": "local", + "go.useLanguageServer": true, + "go.gopath": "/go", + "go.buildTags": "e2e,perf,conftests,unit,integration_test,certtests", + "git.alwaysSignOff": true, + "terminal.integrated.env.linux": { + "GOLANG_PROTOBUF_REGISTRATION_CONFLICT": "ignore" + } + }, + "workspaceFolder": "/workspaces/components-contrib", + "workspaceMount": "type=bind,source=${localWorkspaceFolder},target=/workspaces/components-contrib" } diff --git a/.github/scripts/components-scripts/certification-pubsub.aws.snssqs-destroy.sh b/.github/scripts/components-scripts/certification-pubsub.aws.snssqs-destroy.sh new file mode 100755 index 000000000..1f7d9e650 --- /dev/null +++ b/.github/scripts/components-scripts/certification-pubsub.aws.snssqs-destroy.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set +e + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/certification/pubsub/aws/snssqs" + +# Run Terraform +terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/certification-pubsub.aws.snssqs-setup.sh b/.github/scripts/components-scripts/certification-pubsub.aws.snssqs-setup.sh new file mode 100755 index 000000000..88fd85954 --- /dev/null +++ b/.github/scripts/components-scripts/certification-pubsub.aws.snssqs-setup.sh @@ -0,0 +1,29 @@ +#!/bin/sh + +set -e + +# Set variables for GitHub Actions +echo "AWS_REGION=us-east-1" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_1=sqssnscerttest-q1-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_2=sqssnscerttest-q2-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_3=sqssnscerttest-q3-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_3=sqssnscerttest-t3-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_MVT=sqssnscerttest-q-mvt-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_MVT=sqssnscerttest-tp-mvt-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_DLIN=sqssnscerttest-dlq-in-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_DLOUT=sqssnscerttest-dlq-out-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_DLIN=sqssnscerttest-dlt-in-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_FIFO=sqssnscerttest-q-fifo-$UNIQUE_ID.fifo" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_FIFO=sqssnscerttest-t-fifo-$UNIQUE_ID.fifo" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID=sqssnscerttest-q-fifo-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_QUEUE_NODRT=sqssnscerttest-q-nodrt-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_NODRT=sqssnscerttest-t-nodrt-$UNIQUE_ID" >> $GITHUB_ENV + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/certification/pubsub/aws/snssqs" + +# Run Terraform +terraform init +terraform validate -no-color +terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" +terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/certification-state.aws.dynamodb-destroy.sh b/.github/scripts/components-scripts/certification-state.aws.dynamodb-destroy.sh new file mode 100755 index 000000000..840312541 --- /dev/null +++ b/.github/scripts/components-scripts/certification-state.aws.dynamodb-destroy.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set +e + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/certification/state/aws/dynamodb" + +# Run Terraform +terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/certification-state.aws.dynamodb-setup.sh b/.github/scripts/components-scripts/certification-state.aws.dynamodb-setup.sh new file mode 100755 index 000000000..21c587ce8 --- /dev/null +++ b/.github/scripts/components-scripts/certification-state.aws.dynamodb-setup.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +set -e + +# Set variables for GitHub Actions +echo "AWS_REGION=us-east-1" >> $GITHUB_ENV +echo "STATE_AWS_DYNAMODB_TABLE_1=certification-test-terraform-basic-$UNIQUE_ID" >> $GITHUB_ENV +echo "STATE_AWS_DYNAMODB_TABLE_2=certification-test-terraform-partition-key-$UNIQUE_ID" >> $GITHUB_ENV + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/certification/state/aws/dynamodb" + +# Run Terraform +terraform init +terraform validate -no-color +terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" +terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/conformance-bindings.azure.eventgrid-destroy.sh b/.github/scripts/components-scripts/conformance-bindings.azure.eventgrid-destroy.sh new file mode 100755 index 000000000..244687180 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-bindings.azure.eventgrid-destroy.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +set +e + +# Stop ngrok +echo "GET ngrok tunnels:" +curl http://localhost:4040/api/tunnels +echo "GET ngrok http requests:" +curl http://localhost:4040/api/requests/http +pkill ngrok +cat /tmp/ngrok.log diff --git a/.github/scripts/components-scripts/conformance-bindings.azure.eventgrid-setup.sh b/.github/scripts/components-scripts/conformance-bindings.azure.eventgrid-setup.sh new file mode 100755 index 000000000..9dcea682d --- /dev/null +++ b/.github/scripts/components-scripts/conformance-bindings.azure.eventgrid-setup.sh @@ -0,0 +1,18 @@ +#!/bin/sh + +set -e + +# Start ngrok +wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip +unzip -qq ngrok-stable-linux-amd64.zip +./ngrok authtoken ${AzureEventGridNgrokToken} +./ngrok http -log=stdout --log-level debug -host-header=localhost 9000 > /tmp/ngrok.log & +sleep 10 + +NGROK_ENDPOINT=`cat /tmp/ngrok.log | grep -Eom1 'https://.*' | sed 's/\s.*//'` +echo "Ngrok endpoint: ${NGROK_ENDPOINT}" +echo "AzureEventGridSubscriberEndpoint=${NGROK_ENDPOINT}/api/events" >> $GITHUB_ENV +cat /tmp/ngrok.log + +# Schedule trigger to kill ngrok +bash -c "sleep 600 && pkill ngrok" & diff --git a/.github/scripts/components-scripts/conformance-bindings.influx-setup.sh b/.github/scripts/components-scripts/conformance-bindings.influx-setup.sh new file mode 100755 index 000000000..ea97035a4 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-bindings.influx-setup.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +set -e + +export INFLUX_TOKEN=$(openssl rand -base64 32) +echo "INFLUX_TOKEN=$INFLUX_TOKEN" >> $GITHUB_ENV +docker-compose -f .github/infrastructure/docker-compose-influxdb.yml -p influxdb up -d diff --git a/.github/scripts/components-scripts/conformance-pubsub.aws.snssqs.terraform-destroy.sh b/.github/scripts/components-scripts/conformance-pubsub.aws.snssqs.terraform-destroy.sh new file mode 100755 index 000000000..e3a2f1b20 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-pubsub.aws.snssqs.terraform-destroy.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set +e + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/conformance/pubsub/aws/snssqs" + +# Run Terraform +terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/conformance-pubsub.aws.snssqs.terraform-setup.sh b/.github/scripts/components-scripts/conformance-pubsub.aws.snssqs.terraform-setup.sh new file mode 100755 index 000000000..415bcfb3c --- /dev/null +++ b/.github/scripts/components-scripts/conformance-pubsub.aws.snssqs.terraform-setup.sh @@ -0,0 +1,18 @@ +#!/bin/sh + +set -e + +# Set variables for GitHub Actions +echo "PUBSUB_AWS_SNSSQS_QUEUE=testQueue-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC=testTopic-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1=multiTopic1-$UNIQUE_ID" >> $GITHUB_ENV +echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2=multiTopic2-$UNIQUE_ID" >> $GITHUB_ENV + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/conformance/pubsub/aws/snssqs" + +# Run Terraform +terraform init +terraform validate -no-color +terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" +terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/conformance-secretstores.kubernetes-setup.sh b/.github/scripts/components-scripts/conformance-secretstores.kubernetes-setup.sh new file mode 100755 index 000000000..40475f8b6 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-secretstores.kubernetes-setup.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +kubectl apply -f tests/config/kind-data.yaml +echo "NAMESPACE=default" >> $GITHUB_ENV diff --git a/.github/scripts/components-scripts/conformance-state.aws.dynamodb-destroy.sh b/.github/scripts/components-scripts/conformance-state.aws.dynamodb-destroy.sh new file mode 100755 index 000000000..add6101a0 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-state.aws.dynamodb-destroy.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set +e + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/conformance/state/aws/dynamodb" + +# Run Terraform +terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/conformance-state.aws.dynamodb-setup.sh b/.github/scripts/components-scripts/conformance-state.aws.dynamodb-setup.sh new file mode 100755 index 000000000..3c2fdd740 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-state.aws.dynamodb-setup.sh @@ -0,0 +1,16 @@ +#!/bin/sh + +set -e + +# Set variables for GitHub Actions +echo "STATE_AWS_DYNAMODB_TABLE_1=conformance-test-terraform-basic-${UNIQUE_ID}" >> $GITHUB_ENV +echo "STATE_AWS_DYNAMODB_TABLE_2=conformance-test-terraform-partition-key-${UNIQUE_ID}" >> $GITHUB_ENV + +# Navigate to the Terraform directory +cd ".github/infrastructure/terraform/conformance/state/aws/dynamodb" + +# Run Terraform +terraform init +terraform validate -no-color +terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" +terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME" diff --git a/.github/scripts/components-scripts/conformance-state.azure.sql-destroy.sh b/.github/scripts/components-scripts/conformance-state.azure.sql-destroy.sh new file mode 100755 index 000000000..02f40228f --- /dev/null +++ b/.github/scripts/components-scripts/conformance-state.azure.sql-destroy.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +set +e + +# Wait for the creation of the DB by the test to propagate to ARM, otherwise deletion succeeds as no-op. +# The wait should be under 30s, but is capped at 1m as flakiness here results in an accumulation of expensive DB instances over time. +# Also note that the deletion call only blocks until the request is process, do not rely on it for mutex on the same DB, +# deletion may be ongoing in sequential runs. +sleep 1m +az sql db delete --resource-group "$AzureResourceGroupName" --server "$AzureSqlServerName" -n "$AzureSqlServerDbName" --yes diff --git a/.github/scripts/components-scripts/conformance-state.azure.sql-setup.sh b/.github/scripts/components-scripts/conformance-state.azure.sql-setup.sh new file mode 100755 index 000000000..85984c6d1 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-state.azure.sql-setup.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +set -e + +# Use UUID with `-` stripped out for DB names to prevent collisions between workflows +AzureSqlServerDbName=$(cat /proc/sys/kernel/random/uuid | sed -E 's/-//g') +echo "AzureSqlServerDbName=$AzureSqlServerDbName" >> $GITHUB_ENV diff --git a/.github/scripts/components-scripts/conformance-state.cloudflare.workerskv-destroy.sh b/.github/scripts/components-scripts/conformance-state.cloudflare.workerskv-destroy.sh new file mode 100755 index 000000000..064b52747 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-state.cloudflare.workerskv-destroy.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +set +e + +# Delete the Worker +curl -X DELETE "https://api.cloudflare.com/client/v4/accounts/${CLOUDFLARE_ACCOUNT_ID}/workers/scripts/${CloudflareWorkerName}" \ + -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" + +# Delete the KV namespace +curl -X DELETE "https://api.cloudflare.com/client/v4/accounts/${CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces/${CloudflareKVNamespaceID}" \ + -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" \ No newline at end of file diff --git a/.github/scripts/components-scripts/conformance-state.cloudflare.workerskv-setup.sh b/.github/scripts/components-scripts/conformance-state.cloudflare.workerskv-setup.sh new file mode 100755 index 000000000..eda3b59b9 --- /dev/null +++ b/.github/scripts/components-scripts/conformance-state.cloudflare.workerskv-setup.sh @@ -0,0 +1,30 @@ +#!/bin/sh + +set -e + +# Rebuild the Worker +( + cd internal/component/cloudflare/worker-src; + npm ci; + npm run build; +) + +# Check that the code of the worker is correct +git diff --exit-code ./internal/component/cloudflare/workers/code \ + || (echo "The source code of the Cloudflare Worker has changed, but the Worker has not been recompiled. Please re-compile the Worker by running 'npm ci && npm run build' in 'internal/component/cloudflare/worker-src'" && exit 1) + +# Remove dashes from UNIQUE_ID +Suffix=$(echo "$UNIQUE_ID" | sed -E 's/-//g') + +# Ensure the Workers KV namespace exists +CloudflareWorkerName="daprconfkv${Suffix}" +CloudflareKVNamespaceID=$( curl -s -X POST "https://api.cloudflare.com/client/v4/accounts/${CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces" \ + -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" \ + -H "Content-Type: application/json" \ + --data "{\"title\":\"${CloudflareWorkerName}\"}" \ + | jq -r ".result.id" ) + +echo "CloudflareWorkerName=${CloudflareWorkerName}" >> $GITHUB_ENV +echo "CloudflareAPIToken=${CLOUDFLARE_API_TOKEN}" >> $GITHUB_ENV +echo "CloudflareAccountID=${CLOUDFLARE_ACCOUNT_ID}" >> $GITHUB_ENV +echo "CloudflareKVNamespaceID=${CloudflareKVNamespaceID}" >> $GITHUB_ENV diff --git a/.github/scripts/components-scripts/docker-compose.sh b/.github/scripts/components-scripts/docker-compose.sh new file mode 100755 index 000000000..c71bac388 --- /dev/null +++ b/.github/scripts/components-scripts/docker-compose.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set -e + +FILE="$1" +PROJECT="${2:-$FILE}" + +docker-compose -f .github/infrastructure/docker-compose-${FILE}.yml -p ${PROJECT} up -d diff --git a/.github/scripts/dapr_bot.js b/.github/scripts/dapr_bot.js index a1972a031..472af14b4 100644 --- a/.github/scripts/dapr_bot.js +++ b/.github/scripts/dapr_bot.js @@ -1,49 +1,59 @@ // list of owner who can control dapr-bot workflow // TODO: Read owners from OWNERS file. const owners = [ - "yaron2", - "berndverst", - "artursouza", - "mukundansundar", - "halspang", - "tanvigour", - "pkedy", - "amuluyavarote", - "daixiang0", - "ItalyPaleAle", - "jjcollinge", - "pravinpushkar", - "shivamkm07", - "shubham1172", - "skyao", - "msfussell", - "Taction", - "RyanLettieri", - "DeepanshuA", - "yash-nisar", - "addjuarez", - "tmacam", -]; + 'addjuarez', + 'amuluyavarote', + 'artursouza', + 'berndverst', + 'daixiang0', + 'DeepanshuA', + 'halspang', + 'ItalyPaleAle', + 'jjcollinge', + 'msfussell', + 'mukundansundar', + 'pkedy', + 'pravinpushkar', + 'RyanLettieri', + 'shivamkm07', + 'shubham1172', + 'skyao', + 'Taction', + 'tmacam', + 'yaron2', + 'yash-nisar', +] -const docsIssueBodyTpl = (issueNumber) => `This issue was automatically created by \ +const docsIssueBodyTpl = ( + issueNumber +) => `This issue was automatically created by \ [Dapr Bot](https://github.com/dapr/dapr/blob/master/.github/workflows/dapr-bot.yml) because a \"documentation required\" label \ was added to dapr/components-contrib#${issueNumber}. \n\n\ -TODO: Add more details as per [this template](.github/ISSUE_TEMPLATE/new-content-needed.md).`; +TODO: Add more details as per [this template](.github/ISSUE_TEMPLATE/new-content-needed.md).` -const newComponentBodyTpl = (issueNumber) => `This issue was automatically created by \ +const newComponentBodyTpl = ( + issueNumber +) => `This issue was automatically created by \ [Dapr Bot](https://github.com/dapr/dapr/blob/master/.github/workflows/dapr-bot.yml) because a \"new component\" label \ was added to dapr/components-contrib#${issueNumber}. \n\n\ Please register the component in [cmd/daprd/components](https://github.com/dapr/dapr/tree/master/cmd/daprd/components), \ -similar to the ones in the folder (one file per component).`; +similar to the ones in the folder (one file per component).` module.exports = async ({ github, context }) => { - if (context.eventName == "issue_comment" && context.payload.action == "created") { - await handleIssueCommentCreate({ github, context }); - } else if ((context.eventName == "issues" || context.eventName == "pull_request") && context.payload.action == "labeled") { - await handleIssueOrPrLabeled({ github, context }); + if ( + context.eventName == 'issue_comment' && + context.payload.action == 'created' + ) { + await handleIssueCommentCreate({ github, context }) + } else if ( + (context.eventName == 'issues' || + context.eventName == 'pull_request') && + context.payload.action == 'labeled' + ) { + await handleIssueOrPrLabeled({ github, context }) } else { - console.log(`[main] event ${context.eventName} not supported, exiting.`); + console.log(`[main] event ${context.eventName} not supported, exiting.`) } } @@ -51,78 +61,86 @@ module.exports = async ({ github, context }) => { * Handle issue comment create event. */ async function handleIssueCommentCreate({ github, context }) { - const payload = context.payload; - const issue = context.issue; - const username = (context.actor || "").toLowerCase(); - const isFromPulls = !!payload.issue.pull_request; - const commentBody = payload.comment.body; + const payload = context.payload + const issue = context.issue + const username = (context.actor || '').toLowerCase() + const isFromPulls = !!payload.issue.pull_request + const commentBody = payload.comment.body if (!commentBody) { - console.log("[handleIssueCommentCreate] comment body not found, exiting."); - return; + console.log( + '[handleIssueCommentCreate] comment body not found, exiting.' + ) + return } - const command = commentBody.split(" ")[0]; + const command = commentBody.split(' ')[0] // Commands that can be executed by anyone. - if (command === "/assign") { - await cmdAssign(github, issue, username, isFromPulls); - return; + if (command === '/assign') { + await cmdAssign(github, issue, username, isFromPulls) + return } // Commands that can only be executed by owners. if (owners.map((v) => v.toLowerCase()).indexOf(username) < 0) { - console.log(`[handleIssueCommentCreate] user ${username} is not an owner, exiting.`); - return; + console.log( + `[handleIssueCommentCreate] user ${username} is not an owner, exiting.` + ) + return } switch (command) { - case "/ok-to-test": - await cmdOkToTest(github, issue, isFromPulls); - break; + case '/ok-to-test': + await cmdOkToTest(github, issue, isFromPulls) + break default: - console.log(`[handleIssueCommentCreate] command ${command} not found, exiting.`); - break; + console.log( + `[handleIssueCommentCreate] command ${command} not found, exiting.` + ) + break } } - - /** * Handle issue or PR labeled event. */ async function handleIssueOrPrLabeled({ github, context }) { - const payload = context.payload; - const label = payload.label.name; - const issueNumber = payload.issue.number; + const payload = context.payload + const label = payload.label.name + const issueNumber = payload.issue.number // This should not run in forks. - if (context.repo.owner !== "dapr") { - console.log("[handleIssueOrPrLabeled] not running in dapr repo, exiting."); - return; + if (context.repo.owner !== 'dapr') { + console.log( + '[handleIssueOrPrLabeled] not running in dapr repo, exiting.' + ) + return } // Authorization is not required here because it's triggered by an issue label event. // Only authorized users can add labels to issues. - if (label == "documentation required") { + if (label == 'documentation required') { // Open a new docs issue await github.rest.issues.create({ - owner: "dapr", - repo: "docs", + owner: 'dapr', + repo: 'docs', title: `New content needed for dapr/components-contrib#${issueNumber}`, - labels: ["content/missing-information", "created-by/dapr-bot"], + labels: ['content/missing-information', 'created-by/dapr-bot'], body: docsIssueBodyTpl(issueNumber), - }); - } else if (label == "new component") { + }) + } else if (label == 'new component') { // Open a new dapr issue await github.rest.issues.create({ - owner: "dapr", - repo: "dapr", + owner: 'dapr', + repo: 'dapr', title: `Component registration for dapr/components-contrib#${issueNumber}`, - labels: ["area/components", "created-by/dapr-bot"], + labels: ['area/components', 'created-by/dapr-bot'], body: newComponentBodyTpl(issueNumber), - }); + }) } else { - console.log(`[handleIssueOrPrLabeled] label ${label} not supported, exiting.`); + console.log( + `[handleIssueOrPrLabeled] label ${label} not supported, exiting.` + ) } } @@ -135,11 +153,15 @@ async function handleIssueOrPrLabeled({ github, context }) { */ async function cmdAssign(github, issue, username, isFromPulls) { if (isFromPulls) { - console.log("[cmdAssign] pull requests unsupported, skipping command execution."); - return; + console.log( + '[cmdAssign] pull requests unsupported, skipping command execution.' + ) + return } else if (issue.assignees && issue.assignees.length !== 0) { - console.log("[cmdAssign] issue already has assignees, skipping command execution."); - return; + console.log( + '[cmdAssign] issue already has assignees, skipping command execution.' + ) + return } await github.rest.issues.addAssignees({ @@ -147,10 +169,9 @@ async function cmdAssign(github, issue, username, isFromPulls) { repo: issue.repo, issue_number: issue.number, assignees: [username], - }); + }) } - /** * Trigger e2e test for the pull request. * @param {*} github GitHub object reference @@ -159,50 +180,56 @@ async function cmdAssign(github, issue, username, isFromPulls) { */ async function cmdOkToTest(github, issue, isFromPulls) { if (!isFromPulls) { - console.log("[cmdOkToTest] only pull requests supported, skipping command execution."); - return; + console.log( + '[cmdOkToTest] only pull requests supported, skipping command execution.' + ) + return } // Get pull request const pull = await github.rest.pulls.get({ owner: issue.owner, repo: issue.repo, - pull_number: issue.number - }); + pull_number: issue.number, + }) if (pull && pull.data) { // Get commit id and repo from pull head const testPayload = { pull_head_ref: pull.data.head.sha, pull_head_repo: pull.data.head.repo.full_name, - command: "ok-to-test", + command: 'ok-to-test', issue: issue, - }; + } // Fire repository_dispatch event to trigger certification test await github.rest.repos.createDispatchEvent({ owner: issue.owner, repo: issue.repo, - event_type: "certification-test", + event_type: 'certification-test', client_payload: testPayload, - }); + }) // Fire repository_dispatch event to trigger conformance test await github.rest.repos.createDispatchEvent({ owner: issue.owner, repo: issue.repo, - event_type: "conformance-test", + event_type: 'conformance-test', client_payload: testPayload, - }); + }) // Fire repository_dispatch event to trigger unit tests for other architectures and OS await github.rest.repos.createDispatchEvent({ owner: issue.owner, repo: issue.repo, - event_type: "build-all", + event_type: 'build-all', client_payload: testPayload, - }); + }) - console.log(`[cmdOkToTest] triggered certification and conformance tests for ${JSON.stringify(testPayload)}`); + console.log( + `[cmdOkToTest] triggered certification and conformance tests for ${JSON.stringify( + testPayload + )}` + ) } } diff --git a/.github/scripts/test-info.mjs b/.github/scripts/test-info.mjs new file mode 100644 index 000000000..c5607b242 --- /dev/null +++ b/.github/scripts/test-info.mjs @@ -0,0 +1,580 @@ +import { argv, env, exit } from 'node:process' +import { writeFileSync } from 'node:fs' + +/** + * List of all components + * @type {Record} + */ +const components = { + 'bindings.azure.blobstorage': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureBlobStorageAccount', + 'AzureBlobStorageAccessKey', + 'AzureBlobStorageContainer', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + ], + }, + 'bindings.azure.cosmosdb': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureCosmosDB', + 'AzureCosmosDBMasterKey', + 'AzureCosmosDBUrl', + 'AzureCosmosDB', + 'AzureCosmosDBCollection', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + ], + }, + 'bindings.azure.eventgrid': { + conformance: true, + requiredSecrets: [ + 'AzureEventGridNgrokToken', + 'AzureEventGridAccessKey', + 'AzureEventGridTopicEndpoint', + 'AzureEventGridScope', + 'AzureEventGridClientSecret', + 'AzureEventGridClientId', + 'AzureEventGridTenantId', + 'AzureEventGridSubscriptionId', + ], + conformanceSetup: 'conformance-bindings.azure.eventgrid-setup.sh', + conformanceDestroy: 'conformance-bindings.azure.eventgrid-destroy.sh', + }, + 'bindings.azure.eventhubs': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureEventHubsBindingsConnectionString', + 'AzureBlobStorageAccount', + 'AzureBlobStorageAccessKey', + 'AzureEventHubsBindingsHub', + 'AzureEventHubsBindingsNamespace', + 'AzureEventHubsBindingsConsumerGroup', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientSecret', + 'AzureResourceGroupName', + 'AzureCertificationSubscriptionId', + 'AzureEventHubsBindingsContainer', + 'AzureIotHubEventHubConnectionString', + 'AzureIotHubName', + 'AzureIotHubBindingsConsumerGroup', + ], + }, + 'bindings.azure.servicebusqueues': { + conformance: true, + certification: true, + requiredSecrets: ['AzureServiceBusConnectionString'], + }, + 'bindings.azure.storagequeues': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureBlobStorageAccessKey', + 'AzureBlobStorageAccount', + 'AzureBlobStorageQueue', + ], + }, + 'bindings.cron': { + conformance: true, + certification: true, + }, + 'bindings.dubbo': { + certification: true, + }, + 'bindings.http': { + conformance: true, + }, + 'bindings.influx': { + conformance: true, + conformanceSetup: 'conformance-bindings.influx-setup.sh', + }, + 'bindings.kafka': { + certification: true, + }, + 'bindings.kafka-confluent': { + conformance: true, + conformanceSetup: 'docker-compose.sh confluent', + }, + 'bindings.kafka-wurstmeister': { + conformance: true, + conformanceSetup: 'docker-compose.sh kafka', + }, + 'bindings.kubemq': { + conformance: true, + conformanceSetup: 'docker-compose.sh kubemq', + }, + 'bindings.localstorage': { + certification: true, + }, + 'bindings.mqtt3-emqx': { + conformance: true, + conformanceSetup: 'docker-compose.sh emqx', + }, + 'bindings.mqtt3-mosquitto': { + conformance: true, + conformanceSetup: 'docker-compose.sh mosquitto', + }, + 'bindings.mqtt3-vernemq': { + conformance: true, + conformanceSetup: 'docker-compose.sh vernemq', + }, + 'bindings.postgres': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh postgresql', + }, + 'bindings.rabbitmq': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh rabbitmq', + }, + 'bindings.redis': { + certification: true, + }, + 'bindings.redis.v6': { + conformance: true, + conformanceSetup: 'docker-compose.sh redisjson redis', + }, + 'bindings.redis.v7': { + conformance: true, + conformanceSetup: 'docker-compose.sh redis7 redis', + }, + 'configuration.redis.v6': { + conformance: true, + conformanceSetup: 'docker-compose.sh redisjson redis', + }, + 'configuration.redis.v7': { + conformance: true, + conformanceSetup: 'docker-compose.sh redis7 redis', + }, + 'pubsub.aws.snssqs': { + certification: true, + requireAWSCredentials: true, + requireTerraform: true, + certificationSetup: 'certification-pubsub.aws.snssqs-setup.sh', + certificationDestroy: 'certification-pubsub.aws.snssqs-destroy.sh', + }, + 'pubsub.aws.snssqs.docker': { + conformance: true, + conformanceSetup: 'docker-compose.sh snssqs', + }, + 'pubsub.aws.snssqs.terraform': { + conformance: true, + requireAWSCredentials: true, + requireTerraform: true, + conformanceSetup: 'conformance-pubsub.aws.snssqs.terraform-setup.sh', + conformanceDestroy: 'conformance-pubsub.aws.snssqs.terraform-destroy.sh', + }, + 'pubsub.azure.eventhubs': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureEventHubsPubsubTopicActiveConnectionString', + 'AzureEventHubsPubsubNamespace', + 'AzureEventHubsPubsubConsumerGroup', + 'AzureEventHubsPubsubNamespaceConnectionString', + 'AzureBlobStorageAccount', + 'AzureBlobStorageAccessKey', + 'AzureEventHubsPubsubContainer', + 'AzureIotHubName', + 'AzureIotHubEventHubConnectionString', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + 'AzureResourceGroupName', + 'AzureCertificationSubscriptionId', + ], + }, + 'pubsub.azure.servicebus.queues': { + conformance: true, + requiredSecrets: ['AzureServiceBusConnectionString'], + }, + 'pubsub.azure.servicebus.topics': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureServiceBusConnectionString', + 'AzureServiceBusNamespace', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + ], + }, + 'pubsub.hazelcast': { + conformance: true, + conformanceSetup: 'docker-compose.sh hazelcast', + }, + 'pubsub.in-memory': { + conformance: true, + }, + 'pubsub.jetstream': { + conformance: true, + conformanceSetup: 'docker-compose.sh jetstream', + }, + 'pubsub.kafka': { + certification: true, + }, + 'pubsub.kafka-confluent': { + conformance: true, + conformanceSetup: 'docker-compose.sh confluent', + }, + 'pubsub.kafka-wurstmeister': { + conformance: true, + conformanceSetup: 'docker-compose.sh kafka', + }, + 'pubsub.kubemq': { + conformance: true, + conformanceSetup: 'docker-compose.sh kubemq', + }, + 'pubsub.mqtt3': { + certification: true, + }, + 'pubsub.mqtt3-emqx': { + conformance: true, + conformanceSetup: 'docker-compose.sh emqx', + }, + 'pubsub.mqtt3-vernemq': { + conformance: true, + conformanceSetup: 'docker-compose.sh vernemq', + }, + 'pubsub.natsstreaming': { + conformance: true, + conformanceSetup: 'docker-compose.sh natsstreaming', + }, + 'pubsub.pulsar': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh pulsar', + }, + 'pubsub.rabbitmq': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh rabbitmq', + }, + 'pubsub.redis.v6': { + conformance: true, + conformanceSetup: 'docker-compose.sh redisjson redis', + }, + // This test is currently disabled due to issues with Redis v7 + /*'pubsub.redis.v7': { + conformance: true, + conformanceSetup: 'docker-compose.sh redis7 redis', + },*/ + 'pubsub.solace': { + conformance: true, + conformanceSetup: 'docker-compose.sh solace', + }, + 'secretstores.azure.keyvault': { + certification: true, + requiredSecrets: [ + 'AzureKeyVaultName', + 'AzureKeyVaultSecretStoreTenantId', + 'AzureKeyVaultSecretStoreClientId', + 'AzureKeyVaultSecretStoreServicePrincipalClientId', + 'AzureKeyVaultSecretStoreServicePrincipalClientSecret', + 'AzureContainerRegistryName', + 'AzureResourceGroupName', + ], + requiredCerts: ['AzureKeyVaultSecretStoreCert'], + }, + 'secretstores.azure.keyvault.certificate': { + conformance: true, + requiredSecrets: [ + 'AzureKeyVaultName', + 'AzureKeyVaultSecretStoreTenantId', + 'AzureKeyVaultSecretStoreClientId', + ], + requiredCerts: ['AzureKeyVaultSecretStoreCert'], + }, + 'secretstores.azure.keyvault.serviceprincipal': { + conformance: true, + requiredSecrets: [ + 'AzureKeyVaultName', + 'AzureKeyVaultSecretStoreTenantId', + 'AzureKeyVaultSecretStoreServicePrincipalClientId', + 'AzureKeyVaultSecretStoreServicePrincipalClientSecret', + ], + }, + 'secretstores.hashicorp.vault': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh hashicorp-vault vault', + }, + 'secretstores.kubernetes': { + conformance: true, + requireKind: true, + conformanceSetup: 'conformance-secretstores.kubernetes-setup.sh', + }, + 'secretstores.local.env': { + conformance: true, + certification: true, + }, + 'secretstores.local.file': { + conformance: true, + certification: true, + }, + 'state.aws.dynamodb': { + certification: true, + requireAWSCredentials: true, + requireTerraform: true, + certificationSetup: 'certification-state.aws.dynamodb-setup.sh', + certificationDestroy: 'certification-state.aws.dynamodb-destroy.sh', + }, + 'state.aws.dynamodb.terraform': { + conformance: true, + requireAWSCredentials: true, + requireTerraform: true, + conformanceSetup: 'conformance-state.aws.dynamodb-setup.sh', + conformanceDestroy: 'conformance-state.aws.dynamodb-destroy.sh', + }, + 'state.azure.blobstorage': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureBlobStorageAccount', + 'AzureBlobStorageAccessKey', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + 'AzureBlobStorageContainer', + ], + }, + 'state.azure.cosmosdb': { + conformance: true, + certification: true, + requiredSecrets: [ + 'AzureCosmosDBMasterKey', + 'AzureCosmosDBUrl', + 'AzureCosmosDB', + 'AzureCosmosDBCollection', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + ], + }, + 'state.azure.sql': { + conformance: true, + requiredSecrets: [ + 'AzureResourceGroupName', + 'AzureSqlServerName', + 'AzureSqlServerConnectionString', + ], + conformanceSetup: 'conformance-state.azure.sql-setup.sh', + conformanceDestroy: 'conformance-state.azure.sql-destroy.sh', + }, + 'state.azure.tablestorage': { + certification: true, + requiredSecrets: [ + 'AzureBlobStorageAccount', + 'AzureBlobStorageAccessKey', + 'AzureCertificationTenantId', + 'AzureCertificationServicePrincipalClientId', + 'AzureCertificationServicePrincipalClientSecret', + ], + }, + 'state.azure.tablestorage.cosmosdb': { + conformance: true, + requiredSecrets: [ + 'AzureCosmosDBTableAPI', + 'AzureCosmosDBTableAPIMasterKey', + ], + }, + 'state.azure.tablestorage.storage': { + conformance: true, + requiredSecrets: [ + 'AzureBlobStorageAccessKey', + 'AzureBlobStorageAccount', + ], + }, + 'state.cassandra': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh cassandra', + }, + 'state.cloudflare.workerskv': { + conformance: true, + requireCloudflareCredentials: true, + nodeJsVersion: '18.x', + conformanceSetup: 'conformance-state.cloudflare.workerskv-setup.sh', + conformanceDestroy: 'conformance-state.cloudflare.workerskv-destroy.sh', + }, + 'state.cockroachdb': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh cockroachdb', + }, + 'state.in-memory': { + conformance: true, + }, + 'state.memcached': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh memcached', + }, + 'state.mongodb': { + conformance: true, + certification: true, + mongoDbVersion: '4.2', + }, + 'state.mysql': { + certification: true, + }, + 'state.mysql.mariadb': { + conformance: true, + conformanceSetup: 'docker-compose.sh mariadb', + }, + 'state.mysql.mysql': { + conformance: true, + conformanceSetup: 'docker-compose.sh mysql', + }, + 'state.postgresql': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh postgresql', + }, + 'state.redis': { + certification: true, + }, + 'state.redis.v6': { + conformance: true, + conformanceSetup: 'docker-compose.sh redisjson redis', + }, + 'state.redis.v7': { + conformance: true, + conformanceSetup: 'docker-compose.sh redis7 redis', + }, + 'state.rethinkdb': { + conformance: true, + conformanceSetup: 'docker-compose.sh rethinkdb', + }, + 'state.sqlite': { + conformance: true, + certification: true, + }, + 'state.sqlserver': { + conformance: true, + certification: true, + conformanceSetup: 'docker-compose.sh sqlserver', + requiredSecrets: ['AzureSqlServerConnectionString'], + }, + 'workflows.temporal': { + conformance: true, + conformanceSetup: 'docker-compose.sh temporal', + }, +} + +/** + * Type for the objects in the components dictionary + * @typedef {Object} ComponentTestProperties + * @property {boolean?} conformance If true, enables for conformance tests + * @property {boolean?} certification If true, enables for certification tests + * @property {string[]?} requiredSecrets Required secrets (if not empty, test becomes "cloud-only") + * @property {string[]?} requiredCerts Required certs (if not empty, test becomes "cloud-only") + * @property {boolean?} requireAWSCredentials If true, requires AWS credentials and makes the test "cloud-only" + * @property {boolean?} requireCloudflareCredentials If true, requires Cloudflare credentials and makes the test "cloud-only" + * @property {boolean?} requireTerraform If true, requires Terraform + * @property {boolean?} requireKind If true, requires KinD + * @property {string?} conformanceSetup Setup script for conformance tests + * @property {string?} conformanceDestroy Destroy script for conformance tests + * @property {string?} certificationSetup Setup script for certification tests + * @property {string?} certificationDestroy Destroy script for certification tests + * @property {string?} nodeJsVersion If set, installs the specified Node.js version + * @property {string?} mongoDbVersion If set, installs the specified MongoDB version + */ + +/** + * Test matrix object + * @typedef {Object} TestMatrixElement + * @property {string} component Component name + * @property {string?} required-secrets Required secrets + * @property {string?} required-certs Required certs + * @property {boolean?} require-aws-credentials Requires AWS credentials + * @property {boolean?} require-cloudflare-credentials Requires Cloudflare credentials + * @property {boolean?} require-terraform Requires Terraform + * @property {boolean?} require-kind Requires KinD + * @property {string?} setup-script Setup script + * @property {string?} destroy-script Destroy script + * @property {string?} nodejs-version Install the specified Node.js version if set + * @property {string?} mongodb-version Install the specified MongoDB version if set + */ + +/** + * Returns the list of components for the matrix. + * @param {'conformance'|'certification'} testKind Kind of test + * @param {boolean} enableCloudTests If true, returns components that require secrets or credentials too (which can't be used as part of the regular CI in a PR) + * @returns {TestMatrixElement[]} Test matrix object + */ +function GenerateMatrix(testKind, enableCloudTests) { + /** @type {TestMatrixElement[]} */ + const res = [] + for (const name in components) { + const comp = components[name] + if (!comp[testKind]) { + continue + } + + // Skip cloud-only tests if enableCloudTests is false + if (!enableCloudTests) { + if ( + comp.requiredSecrets?.length || + comp.requiredCerts?.length || + comp.requireAWSCredentials || + comp.requireCloudflareCredentials + ) { + continue + } + } + + // Add the component to the array + res.push({ + component: name, + 'required-secrets': comp.requiredSecrets?.length + ? comp.requiredSecrets.join(',') + : undefined, + 'required-certs': comp.requiredCerts?.length + ? comp.requiredCerts.join(',') + : undefined, + 'require-aws-credentials': comp.requireAWSCredentials + ? 'true' + : undefined, + 'require-cloudflare-credentials': comp.requireCloudflareCredentials + ? 'true' + : undefined, + 'require-terraform': comp.requireTerraform ? 'true' : undefined, + 'require-kind': comp.requireKind ? 'true' : undefined, + 'setup-script': comp[testKind + 'Setup'] || undefined, + 'destroy-script': comp[testKind + 'Destroy'] || undefined, + 'nodejs-version': comp.nodeJsVersion || undefined, + 'mongodb-version': comp.mongoDbVersion || undefined, + }) + } + + return res +} + +// Upon invocation, writes the matrix to the $GITHUB_OUTPUT file +if (!env.GITHUB_OUTPUT) { + console.error('Missing environmental variable GITHUB_OUTPUT') + exit(1) +} +if (argv.length < 3 || !['conformance', 'certification'].includes(argv[2])) { + console.error("First parameter must be 'conformance' or 'certification'") + exit(1) +} +if (argv.length < 4 || !['true', 'false'].includes(argv[3])) { + console.error("First parameter must be 'true' or 'false'") + exit(1) +} + +const matrixObj = GenerateMatrix(argv[2], argv[3] == 'true') +console.log('Generated matrix:\n\n' + JSON.stringify(matrixObj, null, ' ')) + +writeFileSync(env.GITHUB_OUTPUT, 'test-matrix=' + JSON.stringify(matrixObj)) diff --git a/.github/workflows/certification.yml b/.github/workflows/certification.yml index 92e9510d9..34a852c0b 100644 --- a/.github/workflows/certification.yml +++ b/.github/workflows/certification.yml @@ -21,17 +21,23 @@ on: - cron: '25 */8 * * *' push: branches: - - release-* + - 'release-*' pull_request: branches: - - release-* + # TODO: REMOVE "master" BEFORE MERGING + - 'master' + - 'release-*' + +env: + # Only specify a major version, such as 1.20 + GO_VERSION: '1.19' jobs: # Based on whether this is a PR or a scheduled run, we will run a different # subset of the certification tests. This allows all the tests not requiring # secrets to be executed on pull requests. generate-matrix: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Parse repository_dispatch payload if: github.event_name == 'repository_dispatch' @@ -42,98 +48,24 @@ jobs: echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV fi - - name: Install yq - run: | - sudo snap install yq + - name: Check out code + uses: actions/checkout@v3 + with: + repository: ${{ env.CHECKOUT_REPO }} + ref: ${{ env.CHECKOUT_REF }} - - name: Specify components that can be run on every PR - id: pr-components + - name: Generate test matrix + id: generate-matrix + env: + VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }} run: | - PR_COMPONENTS=$(yq -I0 --tojson eval - << EOF - - pubsub.kafka - - pubsub.rabbitmq - - pubsub.pulsar - - pubsub.mqtt3 - - state.mongodb - - state.redis - - state.cockroachdb - - state.postgresql - - state.cassandra - - state.memcached - - state.mysql - - state.sqlite - - bindings.dubbo - - bindings.kafka - - bindings.redis - - bindings.cron - - secretstores.local.env - - secretstores.local.file - - secretstores.hashicorp.vault - - bindings.rabbitmq - - bindings.localstorage - - bindings.postgres - EOF - ) - echo "pr-components=$PR_COMPONENTS" >> $GITHUB_OUTPUT - - - name: Specify components requiring cloud resources to run - id: cloud-components - run: | - # Skip cloud-components on PRs, requires scheduled run trigger - # or approver to trigger via respository-dispatch on /ok-to-test - if [ "${{ github.event_name }}" = "pull_request" ]; then - echo "cloud-components=[]" >> $GITHUB_OUTPUT - exit + if [ -z "$VAULT_NAME" ]; then + # Do not include cloud tests when credentials are not available + node .github/scripts/test-info.mjs certification false + else + # Include cloud tests + node .github/scripts/test-info.mjs certification true fi - # Reuse the same cloud infrastructure as conformance.yml - # - # Unfortunately, Azure secrets can't have underscores in - # names, while environment variables with hyphens ('-') are - # troublesome. - # - # We work around here by leveraging the fact that - # environment variable names are case sensitive, so - # CamelCase would still work. - # - # That is slightly better than something like - # AZURECOSMOSDBMASTERKEY, which is extremely hard to read - # and errorprone. - # - # Only list the secrets you need for the component. - CRON_COMPONENTS=$(yq -I0 --tojson eval - << EOF - - component: secretstores.azure.keyvault - required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreClientId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret,AzureContainerRegistryName,AzureResourceGroupName - required-certs: AzureKeyVaultSecretStoreCert - - component: state.sqlserver - required-secrets: AzureSqlServerConnectionString - - component: bindings.azure.servicebusqueues - required-secrets: AzureServiceBusConnectionString - - component: bindings.azure.cosmosdb - required-secrets: AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection,AzureCosmosDBMasterKey,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret - - component: bindings.azure.eventhubs - required-secrets: AzureEventHubsBindingsConnectionString,AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureEventHubsBindingsHub,AzureEventHubsBindingsNamespace,AzureEventHubsBindingsConsumerGroup,AzureCertificationServicePrincipalClientId,AzureCertificationTenantId,AzureCertificationServicePrincipalClientSecret,AzureResourceGroupName,AzureCertificationSubscriptionId,AzureEventHubsBindingsContainer,AzureIotHubEventHubConnectionString,AzureIotHubName,AzureIotHubBindingsConsumerGroup - - component: pubsub.azure.eventhubs - required-secrets: AzureEventHubsPubsubTopicActiveConnectionString,AzureEventHubsPubsubNamespace,AzureEventHubsPubsubNamespaceConnectionString,AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureEventHubsPubsubContainer,AzureIotHubName,AzureIotHubEventHubConnectionString,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret,AzureResourceGroupName,AzureCertificationSubscriptionId - - component: pubsub.azure.servicebus.topics - required-secrets: AzureServiceBusConnectionString,AzureServiceBusNamespace, AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret - - component: bindings.azure.blobstorage - required-secrets: AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureBlobStorageContainer,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret - - component: bindings.azure.storagequeues - required-secrets: AzureBlobStorageAccount, AzureBlobStorageAccessKey - - component: state.azure.tablestorage - required-secrets: AzureBlobStorageAccount, AzureBlobStorageAccessKey, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret - - component: state.azure.blobstorage - required-secrets: AzureBlobStorageContainer,AzureBlobStorageAccount, AzureBlobStorageAccessKey, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret - - component: state.azure.cosmosdb - required-secrets: AzureCosmosDBMasterKey, AzureCosmosDBUrl, AzureCosmosDB, AzureCosmosDBCollection, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret - - component: pubsub.aws.snssqs - terraform-dir: pubsub/aws/snssqs - - component: state.aws.dynamodb - terraform-dir: state/aws/dynamodb - - EOF - ) - echo "cloud-components=$CRON_COMPONENTS" >> $GITHUB_OUTPUT - name: Create PR comment if: env.PR_NUMBER != '' @@ -150,24 +82,24 @@ jobs: Commit ref: ${{ env.CHECKOUT_REF }} outputs: - pr-components: ${{ steps.pr-components.outputs.pr-components }} - cloud-components: ${{ steps.cloud-components.outputs.cloud-components }} + test-matrix: ${{ steps.generate-matrix.outputs.test-matrix }} certification: name: ${{ matrix.component }} certification - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 + env: + UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}} defaults: run: shell: bash - needs: generate-matrix - env: - UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}} + + needs: + - generate-matrix strategy: fail-fast: false # Keep running even if one component fails - matrix: - component: ${{ fromJson(needs.generate-matrix.outputs.pr-components) }} - include: ${{ fromJson(needs.generate-matrix.outputs.cloud-components) }} + matrix: + include: ${{ fromJson(needs.generate-matrix.outputs.test-matrix) }} steps: - name: Set default payload repo and ref @@ -189,22 +121,23 @@ jobs: repository: ${{ env.CHECKOUT_REPO }} ref: ${{ env.CHECKOUT_REF }} - - name: Setup test output + - name: Configure environment run: | - export TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report - echo "TEST_OUTPUT_FILE_PREFIX=$TEST_OUTPUT_FILE_PREFIX" >> $GITHUB_ENV + # Output file + echo "TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report" >> $GITHUB_ENV - - name: Configure certification test and source path - run: | - TEST_COMPONENT=$(echo ${{ matrix.component }} | sed -E 's/\./\//g') - export TEST_PATH="tests/certification/${TEST_COMPONENT}" - echo "TEST_PATH=$TEST_PATH" >> $GITHUB_ENV - export SOURCE_PATH="github.com/dapr/components-contrib/${TEST_COMPONENT}" + # Certification test and source path + TEST_COMPONENT=$(echo "${{ matrix.component }}" | sed -E 's/\./\//g') + echo "TEST_PATH=tests/certification/${TEST_COMPONENT}" >> $GITHUB_ENV + SOURCE_PATH="github.com/dapr/components-contrib/${TEST_COMPONENT}" echo "SOURCE_PATH=$SOURCE_PATH" >> $GITHUB_ENV # converts slashes to dots in this string, so that it doesn't consider them sub-folders - export SOURCE_PATH_LINEAR=$(echo "$SOURCE_PATH" |sed 's#/#\.#g') + SOURCE_PATH_LINEAR=$(echo "$SOURCE_PATH" |sed 's#/#\.#g') echo "SOURCE_PATH_LINEAR=$SOURCE_PATH_LINEAR" >> $GITHUB_ENV + # Current time (used by Terraform) + echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV} + - uses: Azure/login@v1 with: creds: ${{ secrets.AZURE_CREDENTIALS }} @@ -214,7 +147,6 @@ jobs: # az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID # Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated - name: Setup secrets - id: get-azure-secrets if: matrix.required-secrets != '' env: VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }} @@ -245,110 +177,37 @@ jobs: echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV done - - name: Get current time - run: | - echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV} - - name: Setup Terraform - uses: hashicorp/setup-terraform@v2 - if: matrix.terraform-dir != '' + uses: hashicorp/setup-terraform@v2.0.3 + if: matrix.require-terraform == 'true' - - name: Set AWS Region - if: contains(matrix.component, 'aws') + - name: Set Cloudflare env vars + if: matrix.require-cloudflare-credentials == 'true' run: | - AWS_REGION="us-west-1" - echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV - + echo "CLOUDFLARE_ACCOUNT_ID=${{ secrets.CLOUDFLARE_ACCOUNT_ID }}" >> $GITHUB_ENV + echo "CLOUDFLARE_API_TOKEN=${{ secrets.CLOUDFLARE_API_TOKEN }}" >> $GITHUB_ENV + + - name: Set AWS env vars + if: matrix.require-aws-credentials == 'true' + run: | + echo "AWS_REGION=us-west-1" >> $GITHUB_ENV + echo "AWS_ACCESS_KEY=${{ secrets.AWS_ACCESS_KEY }}" >> $GITHUB_ENV + echo "AWS_SECRET_KEY=${{ secrets.AWS_SECRET_KEY }}" >> $GITHUB_ENV + - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 + # TODO: Remove "v1-node16" when v2 is released + # See: https://github.com/aws-actions/configure-aws-credentials/issues/489 + uses: aws-actions/configure-aws-credentials@v1-node16 + if: matrix.require-aws-credentials == 'true' with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }} + aws-access-key-id: "${{ secrets.AWS_ACCESS_KEY }}" + aws-secret-access-key: "${{ secrets.AWS_SECRET_KEY }}" aws-region: "${{ env.AWS_REGION }}" - if: matrix.terraform-dir != '' - - - name: Terraform Init - id: init - run: terraform init - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - - name: Terraform Validate - id: validate - run: terraform validate -no-color - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - - name: Terraform Plan - id: plan - run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}" - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - - name: Terraform Apply - run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}" - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - continue-on-error: true - - - name: Create aws.snssqs specific variables - if: contains(matrix.component, 'snssqs') - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - run: | - PUBSUB_AWS_SNSSQS_QUEUE_1="sqssnscerttest-q1-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_1=$PUBSUB_AWS_SNSSQS_QUEUE_1" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_QUEUE_2="sqssnscerttest-q2-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_2=$PUBSUB_AWS_SNSSQS_QUEUE_2" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_QUEUE_3="sqssnscerttest-q3-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_3=$PUBSUB_AWS_SNSSQS_QUEUE_3" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC_3="sqssnscerttest-t3-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_TOPIC_3=$PUBSUB_AWS_SNSSQS_TOPIC_3" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_QUEUE_MVT="sqssnscerttest-q-mvt-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_MVT=$PUBSUB_AWS_SNSSQS_QUEUE_MVT" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC_MVT="sqssnscerttest-tp-mvt-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_TOPIC_MVT=$PUBSUB_AWS_SNSSQS_TOPIC_MVT" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_QUEUE_DLIN="sqssnscerttest-dlq-in-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_DLIN=$PUBSUB_AWS_SNSSQS_QUEUE_DLIN" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_QUEUE_DLOUT="sqssnscerttest-dlq-out-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_DLOUT=$PUBSUB_AWS_SNSSQS_QUEUE_DLOUT" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_TOPIC_DLIN="sqssnscerttest-dlt-in-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_TOPIC_DLIN=$PUBSUB_AWS_SNSSQS_TOPIC_DLIN" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_QUEUE_FIFO="sqssnscerttest-q-fifo-${{env.UNIQUE_ID}}.fifo" - echo "PUBSUB_AWS_SNSSQS_QUEUE_FIFO=$PUBSUB_AWS_SNSSQS_QUEUE_FIFO" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC_FIFO="sqssnscerttest-t-fifo-${{env.UNIQUE_ID}}.fifo" - echo "PUBSUB_AWS_SNSSQS_TOPIC_FIFO=$PUBSUB_AWS_SNSSQS_TOPIC_FIFO" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID="sqssnscerttest-q-fifo-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID=$PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID" >> $GITHUB_ENV - - PUBSUB_AWS_SNSSQS_QUEUE_NODRT="sqssnscerttest-q-nodrt-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_QUEUE_NODRT=$PUBSUB_AWS_SNSSQS_QUEUE_NODRT" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC_NODRT="sqssnscerttest-t-nodrt-${{env.UNIQUE_ID}}" - echo "PUBSUB_AWS_SNSSQS_TOPIC_NODRT=$PUBSUB_AWS_SNSSQS_TOPIC_NODRT" >> $GITHUB_ENV - - AWS_REGION="us-east-1" - echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV - - - name: Create state aws.dynamodb specific variables - if: contains(matrix.component, 'dynamodb') - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - run: | - STATE_AWS_DYNAMODB_TABLE_1="certification-test-terraform-basic-${{ env.UNIQUE_ID }}" - echo "STATE_AWS_DYNAMODB_TABLE_1=$STATE_AWS_DYNAMODB_TABLE_1" >> $GITHUB_ENV - STATE_AWS_DYNAMODB_TABLE_2="certification-test-terraform-partition-key-${{ env.UNIQUE_ID }}" - echo "STATE_AWS_DYNAMODB_TABLE_2=$STATE_AWS_DYNAMODB_TABLE_2" >> $GITHUB_ENV - AWS_REGION="us-east-1" - echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV - name: Set up Go uses: actions/setup-go@v3 with: - go-version: '^1.19' + go-version: '^${{ env.GO_VERSION }}' - name: Download Go dependencies working-directory: ${{ env.TEST_PATH }} @@ -360,10 +219,19 @@ jobs: - name: Check that go mod tidy is up-to-date working-directory: ${{ env.TEST_PATH }} run: | - go mod tidy -compat=1.19 + go mod tidy -compat=${{ env.GO_VERSION }} git diff --exit-code ./go.mod git diff --exit-code ./go.sum - + + - name: Run setup script + if: matrix.setup-script != '' + run: .github/scripts/components-scripts/${{ matrix.setup-script }} + + - name: Catch setup failures + if: failure() + run: | + echo "CERTIFICATION_FAILURE=true" >> $GITHUB_ENV + - name: Run tests continue-on-error: false working-directory: ${{ env.TEST_PATH }} @@ -402,7 +270,7 @@ jobs: CERT_FILE=$(printenv $CERT_NAME) echo "Cleaning up the certificate file $CERT_FILE..." - rm $CERT_FILE + rm $CERT_FILE || true done if [[ -v CERTIFICATION_FAILURE ]]; then @@ -462,15 +330,13 @@ jobs: name: ${{ matrix.component }}_certification_test path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.* - - name: Terraform Destroy - continue-on-error: true - run: terraform destroy -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}" - working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' + - name: Run destroy script + if: always() && matrix.destroy-script != '' + run: .github/scripts/components-scripts/${{ matrix.destroy-script }} post_job: name: Post-completion - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 if: always() needs: - certification @@ -500,9 +366,7 @@ jobs: uses: actions/github-script@v6 with: script: | - const prComponents = ('${{ needs.generate-matrix.outputs.pr-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.pr-components }}')) || [] - const cloudComponents = ('${{ needs.generate-matrix.outputs.cloud-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.cloud-components }}')) || [] - const allComponents = [...prComponents, ...cloudComponents] + const allComponents = JSON.parse('${{ needs.generate-matrix.outputs.test-matrix }}') const basePath = '${{ steps.testresults.outputs.download-path }}' const testType = 'certification' @@ -530,7 +394,7 @@ jobs: let found = false let success = false try { - let read =fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8') + let read = fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8') read = read.split('\n')[0] switch (read) { case '1': diff --git a/.github/workflows/conformance.yml b/.github/workflows/conformance.yml index dcba25abf..a8553abbe 100644 --- a/.github/workflows/conformance.yml +++ b/.github/workflows/conformance.yml @@ -22,18 +22,22 @@ on: - cron: '0 */8 * * *' push: branches: - - 'release-*' + - 'release-*' pull_request: branches: - - master + - 'master' - 'release-*' +env: + # Only specify a major version, such as 1.20 + GO_VERSION: '1.19' + jobs: # Based on whether this is a PR or a scheduled run, we will run a different # subset of the conformance tests. This allows all the tests not requiring # secrets to be executed on pull requests. generate-matrix: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Parse repository_dispatch payload if: github.event_name == 'repository_dispatch' @@ -44,126 +48,24 @@ jobs: echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV fi - - name: Install yq - run: | - sudo snap install yq + - name: Check out code + uses: actions/checkout@v3 + with: + repository: ${{ env.CHECKOUT_REPO }} + ref: ${{ env.CHECKOUT_REF }} - - name: Specify components not requiring secrets nor certs - id: pr-components + - name: Generate test matrix + id: generate-matrix + env: + VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }} run: | - PR_COMPONENTS=$(yq -I0 --tojson eval - << EOF - - bindings.cron - - bindings.http - - bindings.influx - - bindings.kafka-wurstmeister - - bindings.kafka-confluent - - bindings.mqtt3-emqx - - bindings.mqtt3-mosquitto - - bindings.mqtt3-vernemq - - bindings.postgres - - bindings.redis.v6 - - bindings.redis.v7 - - bindings.kubemq - - bindings.rabbitmq - - pubsub.aws.snssqs.docker - - configuration.redis.v6 - - configuration.redis.v7 - - pubsub.hazelcast - - pubsub.in-memory - - pubsub.mqtt3-emqx - - pubsub.mqtt3-vernemq - - pubsub.natsstreaming - - pubsub.pulsar - - pubsub.rabbitmq - - pubsub.redis.v6 - - pubsub.kafka-wurstmeister - - pubsub.kafka-confluent - - pubsub.kubemq - - pubsub.solace - - secretstores.kubernetes - - secretstores.localenv - - secretstores.localfile - - secretstores.hashicorp.vault - - state.cassandra - - state.memcached - - state.mongodb - - state.mysql.mysql - - state.mysql.mariadb - - state.postgresql - - state.redis.v6 - - state.redis.v7 - - state.sqlite - - state.sqlserver - - state.in-memory - - state.cockroachdb - - workflows.temporal - - state.rethinkdb - EOF - ) - echo "pr-components=$PR_COMPONENTS" >> $GITHUB_OUTPUT - - - name: Specify components requiring secrets or certs - id: cron-components - run: | - if [ "${{ github.event_name }}" = "pull_request" ]; then - echo "cron-components=[]" >> $GITHUB_OUTPUT - exit + if [ -z "$VAULT_NAME" ]; then + # Do not include cloud tests when credentials are not available + node .github/scripts/test-info.mjs conformance false + else + # Include cloud tests + node .github/scripts/test-info.mjs conformance true fi - # Unfortunately, Azure secrets can't have underscores in - # names, while environment variables with hyphens ('-') are - # troublesome. - # - # We work around here by leveraging the fact that - # environment variable names are case sensitive, so - # CamelCase would still work. - # - # That is slightly better than something like - # AZURECOSMOSDBMASTERKEY, which is extremely hard to read - # and errorprone. - # - # Only list the secrets you need for the component. - CRON_COMPONENTS=$(yq -I0 --tojson eval - << EOF - - component: state.azure.blobstorage - required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount - - component: state.azure.cosmosdb - required-secrets: AzureCosmosDBMasterKey,AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection - - component: state.azure.sql - required-secrets: AzureResourceGroupName, AzureSqlServerName, AzureSqlServerConnectionString - - component: state.azure.tablestorage.storage - required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount - - component: state.azure.tablestorage.cosmosdb - required-secrets: AzureCosmosDBTableAPI,AzureCosmosDBTableAPIMasterKey - - component: pubsub.azure.eventhubs - required-secrets: AzureEventHubsPubsubNamespaceConnectionString,AzureEventHubsPubsubConsumerGroup,AzureBlobStorageAccessKey,AzureBlobStorageAccount,AzureEventHubsPubsubContainer - - component: pubsub.azure.servicebus.topics - required-secrets: AzureServiceBusConnectionString - - component: pubsub.azure.servicebus.queues - required-secrets: AzureServiceBusConnectionString - - component: bindings.azure.blobstorage - required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount - - component: bindings.azure.eventgrid - required-secrets: AzureEventGridNgrokToken,AzureEventGridAccessKey,AzureEventGridTopicEndpoint,AzureEventGridScope,AzureEventGridClientSecret,AzureEventGridClientId,AzureEventGridTenantId,AzureEventGridSubscriptionId - - component: bindings.azure.eventhubs - required-secrets: AzureEventHubsBindingsConnectionString,AzureEventHubsBindingsConsumerGroup,AzureBlobStorageAccessKey,AzureBlobStorageAccount,AzureEventHubsBindingsContainer - - component: bindings.azure.servicebusqueues - required-secrets: AzureServiceBusConnectionString - - component: bindings.azure.storagequeues - required-secrets: AzureBlobStorageAccessKey,AzureBlobStorageAccount,AzureBlobStorageQueue - - component: secretstores.azure.keyvault.certificate - required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreClientId - required-certs: AzureKeyVaultSecretStoreCert - - component: secretstores.azure.keyvault.serviceprincipal - required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret - - component: bindings.azure.cosmosdb - required-secrets: AzureCosmosDBMasterKey,AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection - - component: pubsub.aws.snssqs.terraform - terraform-dir: pubsub/aws/snssqs - - component: state.aws.dynamodb.terraform - terraform-dir: state/aws/dynamodb - - component: state.cloudflare.workerskv - EOF - ) - echo "cron-components=$CRON_COMPONENTS" >> $GITHUB_OUTPUT - name: Create PR comment if: env.PR_NUMBER != '' @@ -180,27 +82,24 @@ jobs: Commit ref: ${{ env.CHECKOUT_REF }} outputs: - pr-components: ${{ steps.pr-components.outputs.pr-components }} - cron-components: ${{ steps.cron-components.outputs.cron-components }} + test-matrix: ${{ steps.generate-matrix.outputs.test-matrix }} conformance: name: ${{ matrix.component }} conformance - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 env: - # Version of Node.js to use - # Currently used by the Cloudflare components - NODE_VERSION: 18.x UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}} defaults: run: shell: bash - needs: generate-matrix + + needs: + - generate-matrix strategy: fail-fast: false # Keep running even if one component fails - matrix: - component: ${{ fromJson(needs.generate-matrix.outputs.pr-components) }} - include: ${{ fromJson(needs.generate-matrix.outputs.cron-components) }} + matrix: + include: ${{ fromJson(needs.generate-matrix.outputs.test-matrix) }} steps: - name: Set default payload repo and ref @@ -219,27 +118,29 @@ jobs: echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV fi - - name: Check out code onto GOPATH + - name: Check out code uses: actions/checkout@v3 with: repository: ${{ env.CHECKOUT_REPO }} ref: ${{ env.CHECKOUT_REF }} - - name: Setup test output + - name: Setup test environment run: | - export TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report - echo "TEST_OUTPUT_FILE_PREFIX=$TEST_OUTPUT_FILE_PREFIX" >> $GITHUB_ENV + # Output file + echo "TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report" >> $GITHUB_ENV + + # Current time (used by Terraform) + echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV} - uses: Azure/login@v1 + if: matrix.required-secrets != '' with: creds: ${{ secrets.AZURE_CREDENTIALS }} - if: matrix.required-secrets != '' # Set this GitHub secret to your KeyVault, and grant the KeyVault policy to your Service Principal: # az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID # Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated - name: Setup secrets - id: get-azure-secrets if: matrix.required-secrets != '' env: VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }} @@ -256,21 +157,6 @@ jobs: echo "$secretName=$value" >> $GITHUB_ENV done - - name: Start ngrok - if: contains(matrix.component, 'azure.eventgrid') - run: | - wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip - unzip -qq ngrok-stable-linux-amd64.zip - ./ngrok authtoken ${{ env.AzureEventGridNgrokToken }} - ./ngrok http -log=stdout --log-level debug -host-header=localhost 9000 > /tmp/ngrok.log & - sleep 10 - export NGROK_ENDPOINT=`cat /tmp/ngrok.log | grep -Eom1 'https://.*' | sed 's/\s.*//'` - echo "Ngrok's endpoint: ${NGROK_ENDPOINT}" - echo "AzureEventGridSubscriberEndpoint=${NGROK_ENDPOINT}/api/events" >> $GITHUB_ENV - cat /tmp/ngrok.log - # Schedule trigger to kill ngrok - bash -c "sleep 500 && pkill ngrok" & - # Download the required certificates into files, and set env var pointing to their names - name: Setup certs if: matrix.required-certs != '' @@ -284,257 +170,70 @@ jobs: echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV done - - name: Get current time - run: | - echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV} - - name: Setup Terraform - uses: hashicorp/setup-terraform@v2 - if: matrix.terraform-dir != '' + if: matrix.require-terraform == 'true' + uses: hashicorp/setup-terraform@v2.0.3 + + - name: Set Cloudflare env vars + if: matrix.require-cloudflare-credentials == 'true' + run: | + echo "CLOUDFLARE_ACCOUNT_ID=${{ secrets.CLOUDFLARE_ACCOUNT_ID }}" >> $GITHUB_ENV + echo "CLOUDFLARE_API_TOKEN=${{ secrets.CLOUDFLARE_API_TOKEN }}" >> $GITHUB_ENV + + - name: Set AWS env vars + if: matrix.require-aws-credentials == 'true' + run: | + echo "AWS_ACCESS_KEY=${{ secrets.AWS_ACCESS_KEY }}" >> $GITHUB_ENV + echo "AWS_SECRET_KEY=${{ secrets.AWS_SECRET_KEY }}" >> $GITHUB_ENV - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v1 + if: matrix.require-aws-credentials == 'true' + # TODO: Remove "v1-node16" when v2 is released + # See: https://github.com/aws-actions/configure-aws-credentials/issues/489 + uses: aws-actions/configure-aws-credentials@v1-node16 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }} aws-region: us-west-1 - if: matrix.terraform-dir != '' - - - name: Terraform Init - id: init - run: terraform init - working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - - name: Terraform Validate - id: validate - run: terraform validate -no-color - working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - - name: Terraform Plan - id: plan - run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}" - working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - - name: Terraform Apply - run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}" - working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - continue-on-error: true - - - name: Create aws.snssqs variables - run: | - PUBSUB_AWS_SNSSQS_QUEUE="testQueue-${{ env.UNIQUE_ID }}" - echo "PUBSUB_AWS_SNSSQS_QUEUE=$PUBSUB_AWS_SNSSQS_QUEUE" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC="testTopic-${{ env.UNIQUE_ID }}" - echo "PUBSUB_AWS_SNSSQS_TOPIC=$PUBSUB_AWS_SNSSQS_TOPIC" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1="multiTopic1-${{ env.UNIQUE_ID }}" - echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1=$PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1" >> $GITHUB_ENV - PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2="multiTopic2-${{ env.UNIQUE_ID }}" - echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2=$PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2" >> $GITHUB_ENV - if: contains(matrix.component, 'snssqs') - - - name: Create aws.dynamodb variables - run: | - STATE_AWS_DYNAMODB_TABLE_1="conformance-test-terraform-basic-${{ env.UNIQUE_ID }}" - echo "STATE_AWS_DYNAMODB_TABLE_1=$STATE_AWS_DYNAMODB_TABLE_1" >> $GITHUB_ENV - STATE_AWS_DYNAMODB_TABLE_2="conformance-test-terraform-partition-key-${{ env.UNIQUE_ID }}" - echo "STATE_AWS_DYNAMODB_TABLE_2=$STATE_AWS_DYNAMODB_TABLE_2" >> $GITHUB_ENV - if: contains(matrix.component, 'dynamodb') - - - name: Start Redis 6 with Redis JSON - run: docker-compose -f ./.github/infrastructure/docker-compose-redisjson.yml -p redis up -d - if: contains(matrix.component, 'redis.v6') - - - name: Start Redis 7 - run: docker-compose -f ./.github/infrastructure/docker-compose-redis7.yml -p redis up -d - if: contains(matrix.component, 'redis.v7') - - - name: Start Temporal - run: docker-compose -f ./.github/infrastructure/docker-compose-temporal.yml -p temporal up -d - if: contains(matrix.component, 'temporal') - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.3.0 + if: matrix.mongodb-version != '' + uses: supercharge/mongodb-github-action@1.8.0 with: - mongodb-version: 4.2 + mongodb-version: ${{ matrix.mongodb-version }} mongodb-replica-set: test-rs - if: contains(matrix.component, 'mongodb') - - - name: Start sqlserver - run: docker-compose -f ./.github/infrastructure/docker-compose-sqlserver.yml -p sqlserver up -d - if: contains(matrix.component, 'sqlserver') - - - name: Start kafka - run: docker-compose -f ./.github/infrastructure/docker-compose-kafka.yml -p kafka up -d - if: contains(matrix.component, 'wurstmeister') - - - name: Start kafka Confluent - run: docker-compose -f ./.github/infrastructure/docker-compose-confluent.yml -p confluent up -d - if: contains(matrix.component, 'confluent') - - - name: Start memcached - run: docker-compose -f ./.github/infrastructure/docker-compose-memcached.yml -p memcached up -d - if: contains(matrix.component, 'memcached') - - - name: Start natsstreaming - run: docker-compose -f ./.github/infrastructure/docker-compose-natsstreaming.yml -p natsstreaming up -d - if: contains(matrix.component, 'natsstreaming') - - - name: Start pulsar - run: docker-compose -f ./.github/infrastructure/docker-compose-pulsar.yml -p pulsar up -d - if: contains(matrix.component, 'pulsar') - - - name: Start Eclipse Mosquitto (MQTT3) - run: docker-compose -f ./.github/infrastructure/docker-compose-mosquitto.yml -p mosquitto up -d - if: contains(matrix.component, 'mqtt3-mosquitto') - - - name: Start EMQ X (MQTT3) - run: docker-compose -f ./.github/infrastructure/docker-compose-emqx.yml -p emqx up -d - if: contains(matrix.component, 'mqtt3-emqx') - - - name: Start VerneMQ (MQTT3) - run: docker-compose -f ./.github/infrastructure/docker-compose-vernemq.yml -p vernemq up -d - if: contains(matrix.component, 'mqtt3-vernemq') - - - name: Start hazelcast - run: docker-compose -f ./.github/infrastructure/docker-compose-hazelcast.yml -p hazelcast up -d - if: contains(matrix.component, 'hazelcast') - - - name: Start rabbitmq - run: docker-compose -f ./.github/infrastructure/docker-compose-rabbitmq.yml -p rabbitmq up -d - if: contains(matrix.component, 'rabbitmq') - - - name: Install Node.js ${{ env.NODE_VERSION }} - if: contains(matrix.component, 'cloudflare') - uses: actions/setup-node@v3 - with: - node-version: ${{ env.NODE_VERSION }} - - - name: Check Cloudflare Workers code - if: contains(matrix.component, 'cloudflare') - run: | - # Build the Worker - ( - cd internal/component/cloudflare/worker-src; - npm ci; - npm run build; - ) - # Check no changes - git diff --exit-code ./internal/component/cloudflare/workers/code \ - || (echo "The source code of the Cloudflare Worker has changed, but the Worker has not been recompiled. Please re-compile the Worker by running 'npm ci && npm run build' in 'internal/component/cloudflare/worker-src'" && exit 1) - - - name: Setup Cloudflare KV - if: matrix.component == 'state.cloudflare.workerskv' - env: - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - run: | - CloudflareWorkerName="daprconfkv${{ github.run_id }}${{ github.run_attempt }}" - CloudflareKVNamespaceID=$( curl -s -X POST "https://api.cloudflare.com/client/v4/accounts/${CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces" \ - -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" \ - -H "Content-Type: application/json" \ - --data "{\"title\":\"${CloudflareWorkerName}\"}" \ - | jq -r ".result.id" ) - - echo "CloudflareWorkerName=${CloudflareWorkerName}" >> $GITHUB_ENV - echo "CloudflareAPIToken=${CLOUDFLARE_API_TOKEN}" >> $GITHUB_ENV - echo "CloudflareAccountID=${CLOUDFLARE_ACCOUNT_ID}" >> $GITHUB_ENV - echo "CloudflareKVNamespaceID=${CloudflareKVNamespaceID}" >> $GITHUB_ENV - - - name: Start aws snssqs - run: docker-compose -f ./.github/infrastructure/docker-compose-snssqs.yml -p snssqs up -d - if: contains(matrix.component, 'aws.snssqs.docker') - - - name: Start influxdb - run: | - export INFLUX_TOKEN=$(openssl rand -base64 32) - echo "INFLUX_TOKEN=$INFLUX_TOKEN" >> $GITHUB_ENV - docker-compose -f ./.github/infrastructure/docker-compose-influxdb.yml -p influxdb up -d - if: contains(matrix.component, 'influx') - - - name: Start mysql - run: | - docker-compose -f ./.github/infrastructure/docker-compose-mysql.yml -p mysql up -d - if: contains(matrix.component, 'mysql.mysql') - - - name: Start mariadb - run: | - docker-compose -f ./.github/infrastructure/docker-compose-mariadb.yml -p mariadb up -d - if: contains(matrix.component, 'mysql.mariadb') - - - name: Start KinD - uses: helm/kind-action@v1.4.0 - if: contains(matrix.component, 'kubernetes') - - - name: Start postgresql - run: | - docker-compose -f ./.github/infrastructure/docker-compose-postgresql.yml -p postgresql up -d - if: contains(matrix.component, 'postgres') - - - name: Start cassandra - run: | - docker-compose -f ./.github/infrastructure/docker-compose-cassandra.yml -p cassandra up -d - if: contains(matrix.component, 'cassandra') - - - name: Start cockroachdb - run: | - docker-compose -f ./.github/infrastructure/docker-compose-cockroachdb.yml -p cockroachdb up -d - if: contains(matrix.component, 'cockroachdb') - - - name: Start vault - run: | - docker-compose -f ./.github/infrastructure/docker-compose-hashicorp-vault.yml -p vault up -d - if: contains(matrix.component, 'hashicorp.vault') - - - name: Start rethinkdb - run: | - docker-compose -f ./.github/infrastructure/docker-compose-rethinkdb.yml -p rethinkdb up -d - if: contains(matrix.component, 'rethinkdb') - - - name: Start kubemq - run: docker-compose -f ./.github/infrastructure/docker-compose-kubemq.yml -p kubemq up -d - if: contains(matrix.component, 'kubemq') - - - name: Start solace - run: docker-compose -f ./.github/infrastructure/docker-compose-solace.yml -p solace up -d - if: contains(matrix.component, 'solace') - - - name: Start nats with JetStream - run: | - docker-compose -f ./.github/infrastructure/docker-compose-jetstream.yml up -p jetstream -d - if: contains(matrix.component, 'jetstream') - - - name: Setup KinD test data - if: contains(matrix.component, 'kubernetes') - run: | - kubectl apply -f tests/config/kind-data.yaml - echo "NAMESPACE=default" >> $GITHUB_ENV - name: Set up Go uses: actions/setup-go@v3 with: - go-version: '^1.19' + go-version: '^${{ env.GO_VERSION }}' + + - name: Install Node.js ${{ matrix.nodejs-version }} + if: matrix.nodejs-version != '' + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.nodejs-version }} + + - name: Start KinD + uses: helm/kind-action@v1.5.0 + if: matrix.require-kind == 'true' - name: Download Go dependencies run: | go mod download go install gotest.tools/gotestsum@latest - - name: Generate Azure SQL DB name + - name: Run setup script + if: matrix.setup-script != '' + run: .github/scripts/components-scripts/${{ matrix.setup-script }} + + - name: Catch setup failures + if: failure() run: | - # Use UUID with `-` stripped out for DB names to prevent collisions between workflows - export AzureSqlServerDbName=$(cat /proc/sys/kernel/random/uuid | sed -E 's/-//g') - echo "AzureSqlServerDbName=$AzureSqlServerDbName" >> $GITHUB_ENV - if: contains(matrix.component, 'azure.sql') + echo "CONFORMANCE_FAILURE=true" >> $GITHUB_ENV - name: Run tests continue-on-error: true - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }} run: | set -e KIND=$(echo ${{ matrix.component }} | cut -d. -f1) @@ -566,60 +265,16 @@ jobs: exit -1 fi - - name: Stop ngrok - if: contains(matrix.component, 'azure.eventgrid') - continue-on-error: true - run: | - set +e - echo "GET ngrok tunnels:" - curl http://localhost:4040/api/tunnels - echo "GET ngrok http requests:" - curl http://localhost:4040/api/requests/http - pkill ngrok - cat /tmp/ngrok.log - - - name: Cleanup Azure SQL test DB instance - if: contains(matrix.component, 'azure.sql') - continue-on-error: true - run: | - # Wait for the creation of the DB by the test to propagate to ARM, otherwise deletion succeeds as no-op. - # The wait should be under 30s, but is capped at 1m as flakiness here results in an accumulation of expensive DB instances over time. - # Also note that the deletion call only blocks until the request is process, do not rely on it for mutex on the same DB, - # deletion may be ongoing in sequential runs. - sleep 1m - az sql db delete --resource-group ${{ env.AzureResourceGroupName }} --server ${{ env.AzureSqlServerName }} -n ${{ env.AzureSqlServerDbName }} --yes - - - name: Cleanup Cloudflare KV - if: always() && matrix.component == 'state.cloudflare.workerskv' - continue-on-error: true - env: - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - run: | - # Delete the Worker - curl -X DELETE "https://api.cloudflare.com/client/v4/accounts/${CLOUDFLARE_ACCOUNT_ID}/workers/scripts/${{ env.CloudflareWorkerName }}" \ - -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" - # Delete the KV namespace - curl -X DELETE "https://api.cloudflare.com/client/v4/accounts/${CLOUDFLARE_ACCOUNT_ID}/storage/kv/namespaces/${{ env.CloudflareKVNamespaceID }}" \ - -H "Authorization: Bearer ${CLOUDFLARE_API_TOKEN}" - - # Download the required certificates into files, and set env var pointing to their names - - name: Clean up certs - if: matrix.required-certs != '' + - name: Delete downloaded up certs + if: always() && matrix.required-certs != '' run: | for CERT_NAME in $(echo "${{ matrix.required-certs }}" | sed 's/,/ /g'); do CERT_FILE=$(printenv $CERT_NAME) echo "Cleaning up the certificate file $CERT_FILE..." - rm $CERT_FILE + rm $CERT_FILE || true done - - name: Terraform Destroy - continue-on-error: true - run: terraform destroy -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}" - working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}" - if: matrix.terraform-dir != '' - - name: Check conformance test passed continue-on-error: false run: | @@ -655,9 +310,13 @@ jobs: name: ${{ matrix.component }}_conformance_test path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_conformance.* + - name: Run destroy script + if: always() && matrix.destroy-script != '' + run: .github/scripts/components-scripts/${{ matrix.destroy-script }} + post_job: name: Post-completion - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 if: always() needs: - conformance @@ -687,9 +346,7 @@ jobs: uses: actions/github-script@v6 with: script: | - const prComponents = ('${{ needs.generate-matrix.outputs.pr-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.pr-components }}')) || [] - const cronComponents = ('${{ needs.generate-matrix.outputs.cron-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.cron-components }}')) || [] - const allComponents = [...prComponents, ...cronComponents] + const allComponents = JSON.parse('${{ needs.generate-matrix.outputs.test-matrix }}') const basePath = '${{ steps.testresults.outputs.download-path }}' const testType = 'conformance' @@ -717,7 +374,7 @@ jobs: let found = false let success = false try { - let read =fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8') + let read = fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8') read = read.split('\n')[0] switch (read) { case '1': diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..50b720cfe --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +internal/component/cloudflare/workers/code/ \ No newline at end of file diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 000000000..e74ed9ff3 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,6 @@ +{ + "trailingComma": "es5", + "tabWidth": 4, + "semi": false, + "singleQuote": true +} diff --git a/Makefile b/Makefile index 4f3c4c913..0d12ef6e6 100644 --- a/Makefile +++ b/Makefile @@ -216,6 +216,19 @@ check-component-metadata-schema-diff: component-metadata-schema bundle-component-metadata: $(RUN_BUILD_TOOLS) bundle-component-metadata > ../component-metadata-bundle.json +################################################################################ +# Prettier # +################################################################################ +.PHONY: prettier-install prettier-check prettier-format +prettier-install: + npm install --global prettier + +prettier-check: + npx prettier --check "*/**/*.{ts,js,mjs,json}" + +prettier-format: + npx prettier --write "*/**/*.{ts,js,mjs,json}" + ################################################################################ # Target: conf-tests # ################################################################################ diff --git a/internal/component/cloudflare/worker-src/worker.ts b/internal/component/cloudflare/worker-src/worker.ts index b146d8566..5a1d1fe1c 100644 --- a/internal/component/cloudflare/worker-src/worker.ts +++ b/internal/component/cloudflare/worker-src/worker.ts @@ -106,13 +106,13 @@ const router = Router() return errorRes } - let expirationTtl: number|undefined = undefined + let expirationTtl: number | undefined = undefined const reqUrl = new URL(req.url) - const ttlParam = parseInt(reqUrl.searchParams.get('ttl') ||'', 10) + const ttlParam = parseInt(reqUrl.searchParams.get('ttl') || '', 10) if (ttlParam > 0) { expirationTtl = ttlParam } - await namespace!.put(key!, req.body!, {expirationTtl}) + await namespace!.put(key!, req.body!, { expirationTtl }) return new Response('', { status: 201 }) } @@ -172,7 +172,10 @@ async function setupKVRequest( return { errorRes: new Response('Bad request', { status: 400 }) } } const namespace = env[req.params.namespace] as KVNamespace - if (typeof namespace != 'object' || !['KVNamespace', 'KvNamespace'].includes(namespace?.constructor?.name)) { + if ( + typeof namespace != 'object' || + !['KVNamespace', 'KvNamespace'].includes(namespace?.constructor?.name) + ) { return { errorRes: new Response( `Worker is not bound to KV '${req.params.kv}'`, diff --git a/internal/component/cloudflare/workers/code/worker.js.map b/internal/component/cloudflare/workers/code/worker.js.map index db659c1b9..e69de29bb 100644 --- a/internal/component/cloudflare/workers/code/worker.js.map +++ b/internal/component/cloudflare/workers/code/worker.js.map @@ -1,7 +0,0 @@ -{ - "version": 3, - "sources": ["../../worker-src/node_modules/itty-router/dist/itty-router.min.mjs", "../../worker-src/node_modules/jose/dist/browser/runtime/webcrypto.js", "../../worker-src/node_modules/jose/dist/browser/lib/buffer_utils.js", "../../worker-src/node_modules/jose/dist/browser/runtime/base64url.js", "../../worker-src/node_modules/jose/dist/browser/util/errors.js", "../../worker-src/node_modules/jose/dist/browser/runtime/random.js", "../../worker-src/node_modules/jose/dist/browser/runtime/env.js", "../../worker-src/node_modules/jose/dist/browser/lib/crypto_key.js", "../../worker-src/node_modules/jose/dist/browser/lib/invalid_key_input.js", "../../worker-src/node_modules/jose/dist/browser/runtime/is_key_like.js", "../../worker-src/node_modules/jose/dist/browser/lib/is_disjoint.js", "../../worker-src/node_modules/jose/dist/browser/lib/is_object.js", "../../worker-src/node_modules/jose/dist/browser/runtime/check_key_length.js", "../../worker-src/node_modules/jose/dist/browser/runtime/asn1.js", "../../worker-src/node_modules/jose/dist/browser/key/import.js", "../../worker-src/node_modules/jose/dist/browser/lib/check_key_type.js", "../../worker-src/node_modules/jose/dist/browser/lib/validate_crit.js", "../../worker-src/node_modules/jose/dist/browser/lib/validate_algorithms.js", "../../worker-src/node_modules/jose/dist/browser/jwe/flattened/encrypt.js", "../../worker-src/node_modules/jose/dist/browser/runtime/subtle_dsa.js", "../../worker-src/node_modules/jose/dist/browser/runtime/get_sign_verify_key.js", "../../worker-src/node_modules/jose/dist/browser/runtime/verify.js", "../../worker-src/node_modules/jose/dist/browser/jws/flattened/verify.js", "../../worker-src/node_modules/jose/dist/browser/jws/compact/verify.js", "../../worker-src/node_modules/jose/dist/browser/lib/epoch.js", "../../worker-src/node_modules/jose/dist/browser/lib/secs.js", "../../worker-src/node_modules/jose/dist/browser/lib/jwt_claims_set.js", "../../worker-src/node_modules/jose/dist/browser/jwt/verify.js", "../../worker-src/lib/jwt-auth.ts", "../../worker-src/worker.ts"], - "sourcesContent": ["function e({base:t=\"\",routes:n=[]}={}){return{__proto__:new Proxy({},{get:(e,a,o)=>(e,...r)=>n.push([a.toUpperCase(),RegExp(`^${(t+e).replace(/(\\/?)\\*/g,\"($1.*)?\").replace(/(\\/$)|((?<=\\/)\\/)/,\"\").replace(/:(\\w+)(\\?)?(\\.)?/g,\"$2(?<$1>[^/]+)$2$3\").replace(/\\.(?=[\\w(])/,\"\\\\.\").replace(/\\)\\.\\?\\(([^\\[]+)\\[\\^/g,\"?)\\\\.?($1(?<=\\\\.)[^\\\\.\")}/*$`),r])&&o}),routes:n,async handle(e,...r){let a,o,t=new URL(e.url);e.query=Object.fromEntries(t.searchParams);for(var[p,s,u]of n)if((p===e.method||\"ALL\"===p)&&(o=t.pathname.match(s))){e.params=o.groups;for(var c of u)if(void 0!==(a=await c(e.proxy||e,...r)))return a}}}}export default{Router:e};export{e as Router};\n", "export default crypto;\nexport const isCryptoKey = (key) => key instanceof CryptoKey;\n", "import digest from '../runtime/digest.js';\nexport const encoder = new TextEncoder();\nexport const decoder = new TextDecoder();\nconst MAX_INT32 = 2 ** 32;\nexport function concat(...buffers) {\n const size = buffers.reduce((acc, { length }) => acc + length, 0);\n const buf = new Uint8Array(size);\n let i = 0;\n buffers.forEach((buffer) => {\n buf.set(buffer, i);\n i += buffer.length;\n });\n return buf;\n}\nexport function p2s(alg, p2sInput) {\n return concat(encoder.encode(alg), new Uint8Array([0]), p2sInput);\n}\nfunction writeUInt32BE(buf, value, offset) {\n if (value < 0 || value >= MAX_INT32) {\n throw new RangeError(`value must be >= 0 and <= ${MAX_INT32 - 1}. Received ${value}`);\n }\n buf.set([value >>> 24, value >>> 16, value >>> 8, value & 0xff], offset);\n}\nexport function uint64be(value) {\n const high = Math.floor(value / MAX_INT32);\n const low = value % MAX_INT32;\n const buf = new Uint8Array(8);\n writeUInt32BE(buf, high, 0);\n writeUInt32BE(buf, low, 4);\n return buf;\n}\nexport function uint32be(value) {\n const buf = new Uint8Array(4);\n writeUInt32BE(buf, value);\n return buf;\n}\nexport function lengthAndInput(input) {\n return concat(uint32be(input.length), input);\n}\nexport async function concatKdf(secret, bits, value) {\n const iterations = Math.ceil((bits >> 3) / 32);\n const res = new Uint8Array(iterations * 32);\n for (let iter = 0; iter < iterations; iter++) {\n const buf = new Uint8Array(4 + secret.length + value.length);\n buf.set(uint32be(iter + 1));\n buf.set(secret, 4);\n buf.set(value, 4 + secret.length);\n res.set(await digest('sha256', buf), iter * 32);\n }\n return res.slice(0, bits >> 3);\n}\n", "import { encoder, decoder } from '../lib/buffer_utils.js';\nexport const encodeBase64 = (input) => {\n let unencoded = input;\n if (typeof unencoded === 'string') {\n unencoded = encoder.encode(unencoded);\n }\n const CHUNK_SIZE = 0x8000;\n const arr = [];\n for (let i = 0; i < unencoded.length; i += CHUNK_SIZE) {\n arr.push(String.fromCharCode.apply(null, unencoded.subarray(i, i + CHUNK_SIZE)));\n }\n return btoa(arr.join(''));\n};\nexport const encode = (input) => {\n return encodeBase64(input).replace(/=/g, '').replace(/\\+/g, '-').replace(/\\//g, '_');\n};\nexport const decodeBase64 = (encoded) => {\n const binary = atob(encoded);\n const bytes = new Uint8Array(binary.length);\n for (let i = 0; i < binary.length; i++) {\n bytes[i] = binary.charCodeAt(i);\n }\n return bytes;\n};\nexport const decode = (input) => {\n let encoded = input;\n if (encoded instanceof Uint8Array) {\n encoded = decoder.decode(encoded);\n }\n encoded = encoded.replace(/-/g, '+').replace(/_/g, '/').replace(/\\s/g, '');\n try {\n return decodeBase64(encoded);\n }\n catch (_a) {\n throw new TypeError('The input to be decoded is not correctly encoded.');\n }\n};\n", "export class JOSEError extends Error {\n constructor(message) {\n var _a;\n super(message);\n this.code = 'ERR_JOSE_GENERIC';\n this.name = this.constructor.name;\n (_a = Error.captureStackTrace) === null || _a === void 0 ? void 0 : _a.call(Error, this, this.constructor);\n }\n static get code() {\n return 'ERR_JOSE_GENERIC';\n }\n}\nexport class JWTClaimValidationFailed extends JOSEError {\n constructor(message, claim = 'unspecified', reason = 'unspecified') {\n super(message);\n this.code = 'ERR_JWT_CLAIM_VALIDATION_FAILED';\n this.claim = claim;\n this.reason = reason;\n }\n static get code() {\n return 'ERR_JWT_CLAIM_VALIDATION_FAILED';\n }\n}\nexport class JWTExpired extends JOSEError {\n constructor(message, claim = 'unspecified', reason = 'unspecified') {\n super(message);\n this.code = 'ERR_JWT_EXPIRED';\n this.claim = claim;\n this.reason = reason;\n }\n static get code() {\n return 'ERR_JWT_EXPIRED';\n }\n}\nexport class JOSEAlgNotAllowed extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JOSE_ALG_NOT_ALLOWED';\n }\n static get code() {\n return 'ERR_JOSE_ALG_NOT_ALLOWED';\n }\n}\nexport class JOSENotSupported extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JOSE_NOT_SUPPORTED';\n }\n static get code() {\n return 'ERR_JOSE_NOT_SUPPORTED';\n }\n}\nexport class JWEDecryptionFailed extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWE_DECRYPTION_FAILED';\n this.message = 'decryption operation failed';\n }\n static get code() {\n return 'ERR_JWE_DECRYPTION_FAILED';\n }\n}\nexport class JWEInvalid extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWE_INVALID';\n }\n static get code() {\n return 'ERR_JWE_INVALID';\n }\n}\nexport class JWSInvalid extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWS_INVALID';\n }\n static get code() {\n return 'ERR_JWS_INVALID';\n }\n}\nexport class JWTInvalid extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWT_INVALID';\n }\n static get code() {\n return 'ERR_JWT_INVALID';\n }\n}\nexport class JWKInvalid extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWK_INVALID';\n }\n static get code() {\n return 'ERR_JWK_INVALID';\n }\n}\nexport class JWKSInvalid extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWKS_INVALID';\n }\n static get code() {\n return 'ERR_JWKS_INVALID';\n }\n}\nexport class JWKSNoMatchingKey extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWKS_NO_MATCHING_KEY';\n this.message = 'no applicable key found in the JSON Web Key Set';\n }\n static get code() {\n return 'ERR_JWKS_NO_MATCHING_KEY';\n }\n}\nexport class JWKSMultipleMatchingKeys extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWKS_MULTIPLE_MATCHING_KEYS';\n this.message = 'multiple matching keys found in the JSON Web Key Set';\n }\n static get code() {\n return 'ERR_JWKS_MULTIPLE_MATCHING_KEYS';\n }\n}\nexport class JWKSTimeout extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWKS_TIMEOUT';\n this.message = 'request timed out';\n }\n static get code() {\n return 'ERR_JWKS_TIMEOUT';\n }\n}\nexport class JWSSignatureVerificationFailed extends JOSEError {\n constructor() {\n super(...arguments);\n this.code = 'ERR_JWS_SIGNATURE_VERIFICATION_FAILED';\n this.message = 'signature verification failed';\n }\n static get code() {\n return 'ERR_JWS_SIGNATURE_VERIFICATION_FAILED';\n }\n}\n", "import crypto from './webcrypto.js';\nexport default crypto.getRandomValues.bind(crypto);\n", "export function isCloudflareWorkers() {\n return (typeof WebSocketPair !== 'undefined' ||\n (typeof navigator !== 'undefined' && navigator.userAgent === 'Cloudflare-Workers') ||\n (typeof EdgeRuntime !== 'undefined' && EdgeRuntime === 'vercel'));\n}\n", "import { isCloudflareWorkers } from '../runtime/env.js';\nfunction unusable(name, prop = 'algorithm.name') {\n return new TypeError(`CryptoKey does not support this operation, its ${prop} must be ${name}`);\n}\nfunction isAlgorithm(algorithm, name) {\n return algorithm.name === name;\n}\nfunction getHashLength(hash) {\n return parseInt(hash.name.slice(4), 10);\n}\nfunction getNamedCurve(alg) {\n switch (alg) {\n case 'ES256':\n return 'P-256';\n case 'ES384':\n return 'P-384';\n case 'ES512':\n return 'P-521';\n default:\n throw new Error('unreachable');\n }\n}\nfunction checkUsage(key, usages) {\n if (usages.length && !usages.some((expected) => key.usages.includes(expected))) {\n let msg = 'CryptoKey does not support this operation, its usages must include ';\n if (usages.length > 2) {\n const last = usages.pop();\n msg += `one of ${usages.join(', ')}, or ${last}.`;\n }\n else if (usages.length === 2) {\n msg += `one of ${usages[0]} or ${usages[1]}.`;\n }\n else {\n msg += `${usages[0]}.`;\n }\n throw new TypeError(msg);\n }\n}\nexport function checkSigCryptoKey(key, alg, ...usages) {\n switch (alg) {\n case 'HS256':\n case 'HS384':\n case 'HS512': {\n if (!isAlgorithm(key.algorithm, 'HMAC'))\n throw unusable('HMAC');\n const expected = parseInt(alg.slice(2), 10);\n const actual = getHashLength(key.algorithm.hash);\n if (actual !== expected)\n throw unusable(`SHA-${expected}`, 'algorithm.hash');\n break;\n }\n case 'RS256':\n case 'RS384':\n case 'RS512': {\n if (!isAlgorithm(key.algorithm, 'RSASSA-PKCS1-v1_5'))\n throw unusable('RSASSA-PKCS1-v1_5');\n const expected = parseInt(alg.slice(2), 10);\n const actual = getHashLength(key.algorithm.hash);\n if (actual !== expected)\n throw unusable(`SHA-${expected}`, 'algorithm.hash');\n break;\n }\n case 'PS256':\n case 'PS384':\n case 'PS512': {\n if (!isAlgorithm(key.algorithm, 'RSA-PSS'))\n throw unusable('RSA-PSS');\n const expected = parseInt(alg.slice(2), 10);\n const actual = getHashLength(key.algorithm.hash);\n if (actual !== expected)\n throw unusable(`SHA-${expected}`, 'algorithm.hash');\n break;\n }\n case isCloudflareWorkers() && 'EdDSA': {\n if (!isAlgorithm(key.algorithm, 'NODE-ED25519'))\n throw unusable('NODE-ED25519');\n break;\n }\n case 'EdDSA': {\n if (key.algorithm.name !== 'Ed25519' && key.algorithm.name !== 'Ed448') {\n throw unusable('Ed25519 or Ed448');\n }\n break;\n }\n case 'ES256':\n case 'ES384':\n case 'ES512': {\n if (!isAlgorithm(key.algorithm, 'ECDSA'))\n throw unusable('ECDSA');\n const expected = getNamedCurve(alg);\n const actual = key.algorithm.namedCurve;\n if (actual !== expected)\n throw unusable(expected, 'algorithm.namedCurve');\n break;\n }\n default:\n throw new TypeError('CryptoKey does not support this operation');\n }\n checkUsage(key, usages);\n}\nexport function checkEncCryptoKey(key, alg, ...usages) {\n switch (alg) {\n case 'A128GCM':\n case 'A192GCM':\n case 'A256GCM': {\n if (!isAlgorithm(key.algorithm, 'AES-GCM'))\n throw unusable('AES-GCM');\n const expected = parseInt(alg.slice(1, 4), 10);\n const actual = key.algorithm.length;\n if (actual !== expected)\n throw unusable(expected, 'algorithm.length');\n break;\n }\n case 'A128KW':\n case 'A192KW':\n case 'A256KW': {\n if (!isAlgorithm(key.algorithm, 'AES-KW'))\n throw unusable('AES-KW');\n const expected = parseInt(alg.slice(1, 4), 10);\n const actual = key.algorithm.length;\n if (actual !== expected)\n throw unusable(expected, 'algorithm.length');\n break;\n }\n case 'ECDH': {\n switch (key.algorithm.name) {\n case 'ECDH':\n case 'X25519':\n case 'X448':\n break;\n default:\n throw unusable('ECDH, X25519, or X448');\n }\n break;\n }\n case 'PBES2-HS256+A128KW':\n case 'PBES2-HS384+A192KW':\n case 'PBES2-HS512+A256KW':\n if (!isAlgorithm(key.algorithm, 'PBKDF2'))\n throw unusable('PBKDF2');\n break;\n case 'RSA-OAEP':\n case 'RSA-OAEP-256':\n case 'RSA-OAEP-384':\n case 'RSA-OAEP-512': {\n if (!isAlgorithm(key.algorithm, 'RSA-OAEP'))\n throw unusable('RSA-OAEP');\n const expected = parseInt(alg.slice(9), 10) || 1;\n const actual = getHashLength(key.algorithm.hash);\n if (actual !== expected)\n throw unusable(`SHA-${expected}`, 'algorithm.hash');\n break;\n }\n default:\n throw new TypeError('CryptoKey does not support this operation');\n }\n checkUsage(key, usages);\n}\n", "function message(msg, actual, ...types) {\n if (types.length > 2) {\n const last = types.pop();\n msg += `one of type ${types.join(', ')}, or ${last}.`;\n }\n else if (types.length === 2) {\n msg += `one of type ${types[0]} or ${types[1]}.`;\n }\n else {\n msg += `of type ${types[0]}.`;\n }\n if (actual == null) {\n msg += ` Received ${actual}`;\n }\n else if (typeof actual === 'function' && actual.name) {\n msg += ` Received function ${actual.name}`;\n }\n else if (typeof actual === 'object' && actual != null) {\n if (actual.constructor && actual.constructor.name) {\n msg += ` Received an instance of ${actual.constructor.name}`;\n }\n }\n return msg;\n}\nexport default (actual, ...types) => {\n return message('Key must be ', actual, ...types);\n};\nexport function withAlg(alg, actual, ...types) {\n return message(`Key for the ${alg} algorithm must be `, actual, ...types);\n}\n", "import { isCryptoKey } from './webcrypto.js';\nexport default (key) => {\n return isCryptoKey(key);\n};\nexport const types = ['CryptoKey'];\n", "const isDisjoint = (...headers) => {\n const sources = headers.filter(Boolean);\n if (sources.length === 0 || sources.length === 1) {\n return true;\n }\n let acc;\n for (const header of sources) {\n const parameters = Object.keys(header);\n if (!acc || acc.size === 0) {\n acc = new Set(parameters);\n continue;\n }\n for (const parameter of parameters) {\n if (acc.has(parameter)) {\n return false;\n }\n acc.add(parameter);\n }\n }\n return true;\n};\nexport default isDisjoint;\n", "function isObjectLike(value) {\n return typeof value === 'object' && value !== null;\n}\nexport default function isObject(input) {\n if (!isObjectLike(input) || Object.prototype.toString.call(input) !== '[object Object]') {\n return false;\n }\n if (Object.getPrototypeOf(input) === null) {\n return true;\n }\n let proto = input;\n while (Object.getPrototypeOf(proto) !== null) {\n proto = Object.getPrototypeOf(proto);\n }\n return Object.getPrototypeOf(input) === proto;\n}\n", "export default (alg, key) => {\n if (alg.startsWith('RS') || alg.startsWith('PS')) {\n const { modulusLength } = key.algorithm;\n if (typeof modulusLength !== 'number' || modulusLength < 2048) {\n throw new TypeError(`${alg} requires key modulusLength to be 2048 bits or larger`);\n }\n }\n};\n", "import { isCloudflareWorkers } from './env.js';\nimport crypto, { isCryptoKey } from './webcrypto.js';\nimport invalidKeyInput from '../lib/invalid_key_input.js';\nimport { encodeBase64 } from './base64url.js';\nimport formatPEM from '../lib/format_pem.js';\nimport { JOSENotSupported } from '../util/errors.js';\nimport { types } from './is_key_like.js';\nconst genericExport = async (keyType, keyFormat, key) => {\n if (!isCryptoKey(key)) {\n throw new TypeError(invalidKeyInput(key, ...types));\n }\n if (!key.extractable) {\n throw new TypeError('CryptoKey is not extractable');\n }\n if (key.type !== keyType) {\n throw new TypeError(`key is not a ${keyType} key`);\n }\n return formatPEM(encodeBase64(new Uint8Array(await crypto.subtle.exportKey(keyFormat, key))), `${keyType.toUpperCase()} KEY`);\n};\nexport const toSPKI = (key) => {\n return genericExport('public', 'spki', key);\n};\nexport const toPKCS8 = (key) => {\n return genericExport('private', 'pkcs8', key);\n};\nconst findOid = (keyData, oid, from = 0) => {\n if (from === 0) {\n oid.unshift(oid.length);\n oid.unshift(0x06);\n }\n let i = keyData.indexOf(oid[0], from);\n if (i === -1)\n return false;\n const sub = keyData.subarray(i, i + oid.length);\n if (sub.length !== oid.length)\n return false;\n return sub.every((value, index) => value === oid[index]) || findOid(keyData, oid, i + 1);\n};\nconst getNamedCurve = (keyData) => {\n switch (true) {\n case findOid(keyData, [0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03, 0x01, 0x07]):\n return 'P-256';\n case findOid(keyData, [0x2b, 0x81, 0x04, 0x00, 0x22]):\n return 'P-384';\n case findOid(keyData, [0x2b, 0x81, 0x04, 0x00, 0x23]):\n return 'P-521';\n case findOid(keyData, [0x2b, 0x65, 0x6e]):\n return 'X25519';\n case findOid(keyData, [0x2b, 0x65, 0x6f]):\n return 'X448';\n case findOid(keyData, [0x2b, 0x65, 0x70]):\n return 'Ed25519';\n case findOid(keyData, [0x2b, 0x65, 0x71]):\n return 'Ed448';\n default:\n throw new JOSENotSupported('Invalid or unsupported EC Key Curve or OKP Key Sub Type');\n }\n};\nconst genericImport = async (replace, keyFormat, pem, alg, options) => {\n var _a;\n let algorithm;\n let keyUsages;\n const keyData = new Uint8Array(atob(pem.replace(replace, ''))\n .split('')\n .map((c) => c.charCodeAt(0)));\n const isPublic = keyFormat === 'spki';\n switch (alg) {\n case 'PS256':\n case 'PS384':\n case 'PS512':\n algorithm = { name: 'RSA-PSS', hash: `SHA-${alg.slice(-3)}` };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n case 'RS256':\n case 'RS384':\n case 'RS512':\n algorithm = { name: 'RSASSA-PKCS1-v1_5', hash: `SHA-${alg.slice(-3)}` };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n case 'RSA-OAEP':\n case 'RSA-OAEP-256':\n case 'RSA-OAEP-384':\n case 'RSA-OAEP-512':\n algorithm = {\n name: 'RSA-OAEP',\n hash: `SHA-${parseInt(alg.slice(-3), 10) || 1}`,\n };\n keyUsages = isPublic ? ['encrypt', 'wrapKey'] : ['decrypt', 'unwrapKey'];\n break;\n case 'ES256':\n algorithm = { name: 'ECDSA', namedCurve: 'P-256' };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n case 'ES384':\n algorithm = { name: 'ECDSA', namedCurve: 'P-384' };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n case 'ES512':\n algorithm = { name: 'ECDSA', namedCurve: 'P-521' };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n case 'ECDH-ES':\n case 'ECDH-ES+A128KW':\n case 'ECDH-ES+A192KW':\n case 'ECDH-ES+A256KW': {\n const namedCurve = getNamedCurve(keyData);\n algorithm = namedCurve.startsWith('P-') ? { name: 'ECDH', namedCurve } : { name: namedCurve };\n keyUsages = isPublic ? [] : ['deriveBits'];\n break;\n }\n case isCloudflareWorkers() && 'EdDSA': {\n const namedCurve = getNamedCurve(keyData).toUpperCase();\n algorithm = { name: `NODE-${namedCurve}`, namedCurve: `NODE-${namedCurve}` };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n }\n case 'EdDSA':\n algorithm = { name: getNamedCurve(keyData) };\n keyUsages = isPublic ? ['verify'] : ['sign'];\n break;\n default:\n throw new JOSENotSupported('Invalid or unsupported \"alg\" (Algorithm) value');\n }\n return crypto.subtle.importKey(keyFormat, keyData, algorithm, (_a = options === null || options === void 0 ? void 0 : options.extractable) !== null && _a !== void 0 ? _a : false, keyUsages);\n};\nexport const fromPKCS8 = (pem, alg, options) => {\n return genericImport(/(?:-----(?:BEGIN|END) PRIVATE KEY-----|\\s)/g, 'pkcs8', pem, alg, options);\n};\nexport const fromSPKI = (pem, alg, options) => {\n return genericImport(/(?:-----(?:BEGIN|END) PUBLIC KEY-----|\\s)/g, 'spki', pem, alg, options);\n};\n", "import { decode as decodeBase64URL, encodeBase64, decodeBase64 } from '../runtime/base64url.js';\nimport { fromSPKI as importPublic } from '../runtime/asn1.js';\nimport { fromPKCS8 as importPrivate } from '../runtime/asn1.js';\nimport asKeyObject from '../runtime/jwk_to_key.js';\nimport { JOSENotSupported } from '../util/errors.js';\nimport formatPEM from '../lib/format_pem.js';\nimport isObject from '../lib/is_object.js';\nfunction getElement(seq) {\n let result = [];\n let next = 0;\n while (next < seq.length) {\n let nextPart = parseElement(seq.subarray(next));\n result.push(nextPart);\n next += nextPart.byteLength;\n }\n return result;\n}\nfunction parseElement(bytes) {\n let position = 0;\n let tag = bytes[0] & 0x1f;\n position++;\n if (tag === 0x1f) {\n tag = 0;\n while (bytes[position] >= 0x80) {\n tag = tag * 128 + bytes[position] - 0x80;\n position++;\n }\n tag = tag * 128 + bytes[position] - 0x80;\n position++;\n }\n let length = 0;\n if (bytes[position] < 0x80) {\n length = bytes[position];\n position++;\n }\n else if (length === 0x80) {\n length = 0;\n while (bytes[position + length] !== 0 || bytes[position + length + 1] !== 0) {\n if (length > bytes.byteLength) {\n throw new TypeError('invalid indefinite form length');\n }\n length++;\n }\n const byteLength = position + length + 2;\n return {\n byteLength,\n contents: bytes.subarray(position, position + length),\n raw: bytes.subarray(0, byteLength),\n };\n }\n else {\n let numberOfDigits = bytes[position] & 0x7f;\n position++;\n length = 0;\n for (let i = 0; i < numberOfDigits; i++) {\n length = length * 256 + bytes[position];\n position++;\n }\n }\n const byteLength = position + length;\n return {\n byteLength,\n contents: bytes.subarray(position, byteLength),\n raw: bytes.subarray(0, byteLength),\n };\n}\nfunction spkiFromX509(buf) {\n const tbsCertificate = getElement(getElement(parseElement(buf).contents)[0].contents);\n return encodeBase64(tbsCertificate[tbsCertificate[0].raw[0] === 0xa0 ? 6 : 5].raw);\n}\nfunction getSPKI(x509) {\n const pem = x509.replace(/(?:-----(?:BEGIN|END) CERTIFICATE-----|\\s)/g, '');\n const raw = decodeBase64(pem);\n return formatPEM(spkiFromX509(raw), 'PUBLIC KEY');\n}\nexport async function importSPKI(spki, alg, options) {\n if (typeof spki !== 'string' || spki.indexOf('-----BEGIN PUBLIC KEY-----') !== 0) {\n throw new TypeError('\"spki\" must be SPKI formatted string');\n }\n return importPublic(spki, alg, options);\n}\nexport async function importX509(x509, alg, options) {\n if (typeof x509 !== 'string' || x509.indexOf('-----BEGIN CERTIFICATE-----') !== 0) {\n throw new TypeError('\"x509\" must be X.509 formatted string');\n }\n let spki;\n try {\n spki = getSPKI(x509);\n }\n catch (cause) {\n throw new TypeError('failed to parse the X.509 certificate', { cause });\n }\n return importPublic(spki, alg, options);\n}\nexport async function importPKCS8(pkcs8, alg, options) {\n if (typeof pkcs8 !== 'string' || pkcs8.indexOf('-----BEGIN PRIVATE KEY-----') !== 0) {\n throw new TypeError('\"pkcs8\" must be PKCS#8 formatted string');\n }\n return importPrivate(pkcs8, alg, options);\n}\nexport async function importJWK(jwk, alg, octAsKeyObject) {\n var _a;\n if (!isObject(jwk)) {\n throw new TypeError('JWK must be an object');\n }\n alg || (alg = jwk.alg);\n if (typeof alg !== 'string' || !alg) {\n throw new TypeError('\"alg\" argument is required when \"jwk.alg\" is not present');\n }\n switch (jwk.kty) {\n case 'oct':\n if (typeof jwk.k !== 'string' || !jwk.k) {\n throw new TypeError('missing \"k\" (Key Value) Parameter value');\n }\n octAsKeyObject !== null && octAsKeyObject !== void 0 ? octAsKeyObject : (octAsKeyObject = jwk.ext !== true);\n if (octAsKeyObject) {\n return asKeyObject({ ...jwk, alg, ext: (_a = jwk.ext) !== null && _a !== void 0 ? _a : false });\n }\n return decodeBase64URL(jwk.k);\n case 'RSA':\n if (jwk.oth !== undefined) {\n throw new JOSENotSupported('RSA JWK \"oth\" (Other Primes Info) Parameter value is not supported');\n }\n case 'EC':\n case 'OKP':\n return asKeyObject({ ...jwk, alg });\n default:\n throw new JOSENotSupported('Unsupported \"kty\" (Key Type) Parameter value');\n }\n}\n", "import { withAlg as invalidKeyInput } from './invalid_key_input.js';\nimport isKeyLike, { types } from '../runtime/is_key_like.js';\nconst symmetricTypeCheck = (alg, key) => {\n if (key instanceof Uint8Array)\n return;\n if (!isKeyLike(key)) {\n throw new TypeError(invalidKeyInput(alg, key, ...types, 'Uint8Array'));\n }\n if (key.type !== 'secret') {\n throw new TypeError(`${types.join(' or ')} instances for symmetric algorithms must be of type \"secret\"`);\n }\n};\nconst asymmetricTypeCheck = (alg, key, usage) => {\n if (!isKeyLike(key)) {\n throw new TypeError(invalidKeyInput(alg, key, ...types));\n }\n if (key.type === 'secret') {\n throw new TypeError(`${types.join(' or ')} instances for asymmetric algorithms must not be of type \"secret\"`);\n }\n if (usage === 'sign' && key.type === 'public') {\n throw new TypeError(`${types.join(' or ')} instances for asymmetric algorithm signing must be of type \"private\"`);\n }\n if (usage === 'decrypt' && key.type === 'public') {\n throw new TypeError(`${types.join(' or ')} instances for asymmetric algorithm decryption must be of type \"private\"`);\n }\n if (key.algorithm && usage === 'verify' && key.type === 'private') {\n throw new TypeError(`${types.join(' or ')} instances for asymmetric algorithm verifying must be of type \"public\"`);\n }\n if (key.algorithm && usage === 'encrypt' && key.type === 'private') {\n throw new TypeError(`${types.join(' or ')} instances for asymmetric algorithm encryption must be of type \"public\"`);\n }\n};\nconst checkKeyType = (alg, key, usage) => {\n const symmetric = alg.startsWith('HS') ||\n alg === 'dir' ||\n alg.startsWith('PBES2') ||\n /^A\\d{3}(?:GCM)?KW$/.test(alg);\n if (symmetric) {\n symmetricTypeCheck(alg, key);\n }\n else {\n asymmetricTypeCheck(alg, key, usage);\n }\n};\nexport default checkKeyType;\n", "import { JOSENotSupported } from '../util/errors.js';\nfunction validateCrit(Err, recognizedDefault, recognizedOption, protectedHeader, joseHeader) {\n if (joseHeader.crit !== undefined && protectedHeader.crit === undefined) {\n throw new Err('\"crit\" (Critical) Header Parameter MUST be integrity protected');\n }\n if (!protectedHeader || protectedHeader.crit === undefined) {\n return new Set();\n }\n if (!Array.isArray(protectedHeader.crit) ||\n protectedHeader.crit.length === 0 ||\n protectedHeader.crit.some((input) => typeof input !== 'string' || input.length === 0)) {\n throw new Err('\"crit\" (Critical) Header Parameter MUST be an array of non-empty strings when present');\n }\n let recognized;\n if (recognizedOption !== undefined) {\n recognized = new Map([...Object.entries(recognizedOption), ...recognizedDefault.entries()]);\n }\n else {\n recognized = recognizedDefault;\n }\n for (const parameter of protectedHeader.crit) {\n if (!recognized.has(parameter)) {\n throw new JOSENotSupported(`Extension Header Parameter \"${parameter}\" is not recognized`);\n }\n if (joseHeader[parameter] === undefined) {\n throw new Err(`Extension Header Parameter \"${parameter}\" is missing`);\n }\n else if (recognized.get(parameter) && protectedHeader[parameter] === undefined) {\n throw new Err(`Extension Header Parameter \"${parameter}\" MUST be integrity protected`);\n }\n }\n return new Set(protectedHeader.crit);\n}\nexport default validateCrit;\n", "const validateAlgorithms = (option, algorithms) => {\n if (algorithms !== undefined &&\n (!Array.isArray(algorithms) || algorithms.some((s) => typeof s !== 'string'))) {\n throw new TypeError(`\"${option}\" option must be an array of strings`);\n }\n if (!algorithms) {\n return undefined;\n }\n return new Set(algorithms);\n};\nexport default validateAlgorithms;\n", "import { encode as base64url } from '../../runtime/base64url.js';\nimport encrypt from '../../runtime/encrypt.js';\nimport { deflate } from '../../runtime/zlib.js';\nimport generateIv from '../../lib/iv.js';\nimport encryptKeyManagement from '../../lib/encrypt_key_management.js';\nimport { JOSENotSupported, JWEInvalid } from '../../util/errors.js';\nimport isDisjoint from '../../lib/is_disjoint.js';\nimport { encoder, decoder, concat } from '../../lib/buffer_utils.js';\nimport validateCrit from '../../lib/validate_crit.js';\nexport const unprotected = Symbol();\nexport class FlattenedEncrypt {\n constructor(plaintext) {\n if (!(plaintext instanceof Uint8Array)) {\n throw new TypeError('plaintext must be an instance of Uint8Array');\n }\n this._plaintext = plaintext;\n }\n setKeyManagementParameters(parameters) {\n if (this._keyManagementParameters) {\n throw new TypeError('setKeyManagementParameters can only be called once');\n }\n this._keyManagementParameters = parameters;\n return this;\n }\n setProtectedHeader(protectedHeader) {\n if (this._protectedHeader) {\n throw new TypeError('setProtectedHeader can only be called once');\n }\n this._protectedHeader = protectedHeader;\n return this;\n }\n setSharedUnprotectedHeader(sharedUnprotectedHeader) {\n if (this._sharedUnprotectedHeader) {\n throw new TypeError('setSharedUnprotectedHeader can only be called once');\n }\n this._sharedUnprotectedHeader = sharedUnprotectedHeader;\n return this;\n }\n setUnprotectedHeader(unprotectedHeader) {\n if (this._unprotectedHeader) {\n throw new TypeError('setUnprotectedHeader can only be called once');\n }\n this._unprotectedHeader = unprotectedHeader;\n return this;\n }\n setAdditionalAuthenticatedData(aad) {\n this._aad = aad;\n return this;\n }\n setContentEncryptionKey(cek) {\n if (this._cek) {\n throw new TypeError('setContentEncryptionKey can only be called once');\n }\n this._cek = cek;\n return this;\n }\n setInitializationVector(iv) {\n if (this._iv) {\n throw new TypeError('setInitializationVector can only be called once');\n }\n this._iv = iv;\n return this;\n }\n async encrypt(key, options) {\n if (!this._protectedHeader && !this._unprotectedHeader && !this._sharedUnprotectedHeader) {\n throw new JWEInvalid('either setProtectedHeader, setUnprotectedHeader, or sharedUnprotectedHeader must be called before #encrypt()');\n }\n if (!isDisjoint(this._protectedHeader, this._unprotectedHeader, this._sharedUnprotectedHeader)) {\n throw new JWEInvalid('JWE Protected, JWE Shared Unprotected and JWE Per-Recipient Header Parameter names must be disjoint');\n }\n const joseHeader = {\n ...this._protectedHeader,\n ...this._unprotectedHeader,\n ...this._sharedUnprotectedHeader,\n };\n validateCrit(JWEInvalid, new Map(), options === null || options === void 0 ? void 0 : options.crit, this._protectedHeader, joseHeader);\n if (joseHeader.zip !== undefined) {\n if (!this._protectedHeader || !this._protectedHeader.zip) {\n throw new JWEInvalid('JWE \"zip\" (Compression Algorithm) Header MUST be integrity protected');\n }\n if (joseHeader.zip !== 'DEF') {\n throw new JOSENotSupported('Unsupported JWE \"zip\" (Compression Algorithm) Header Parameter value');\n }\n }\n const { alg, enc } = joseHeader;\n if (typeof alg !== 'string' || !alg) {\n throw new JWEInvalid('JWE \"alg\" (Algorithm) Header Parameter missing or invalid');\n }\n if (typeof enc !== 'string' || !enc) {\n throw new JWEInvalid('JWE \"enc\" (Encryption Algorithm) Header Parameter missing or invalid');\n }\n let encryptedKey;\n if (alg === 'dir') {\n if (this._cek) {\n throw new TypeError('setContentEncryptionKey cannot be called when using Direct Encryption');\n }\n }\n else if (alg === 'ECDH-ES') {\n if (this._cek) {\n throw new TypeError('setContentEncryptionKey cannot be called when using Direct Key Agreement');\n }\n }\n let cek;\n {\n let parameters;\n ({ cek, encryptedKey, parameters } = await encryptKeyManagement(alg, enc, key, this._cek, this._keyManagementParameters));\n if (parameters) {\n if (options && unprotected in options) {\n if (!this._unprotectedHeader) {\n this.setUnprotectedHeader(parameters);\n }\n else {\n this._unprotectedHeader = { ...this._unprotectedHeader, ...parameters };\n }\n }\n else {\n if (!this._protectedHeader) {\n this.setProtectedHeader(parameters);\n }\n else {\n this._protectedHeader = { ...this._protectedHeader, ...parameters };\n }\n }\n }\n }\n this._iv || (this._iv = generateIv(enc));\n let additionalData;\n let protectedHeader;\n let aadMember;\n if (this._protectedHeader) {\n protectedHeader = encoder.encode(base64url(JSON.stringify(this._protectedHeader)));\n }\n else {\n protectedHeader = encoder.encode('');\n }\n if (this._aad) {\n aadMember = base64url(this._aad);\n additionalData = concat(protectedHeader, encoder.encode('.'), encoder.encode(aadMember));\n }\n else {\n additionalData = protectedHeader;\n }\n let ciphertext;\n let tag;\n if (joseHeader.zip === 'DEF') {\n const deflated = await ((options === null || options === void 0 ? void 0 : options.deflateRaw) || deflate)(this._plaintext);\n ({ ciphertext, tag } = await encrypt(enc, deflated, cek, this._iv, additionalData));\n }\n else {\n ;\n ({ ciphertext, tag } = await encrypt(enc, this._plaintext, cek, this._iv, additionalData));\n }\n const jwe = {\n ciphertext: base64url(ciphertext),\n iv: base64url(this._iv),\n tag: base64url(tag),\n };\n if (encryptedKey) {\n jwe.encrypted_key = base64url(encryptedKey);\n }\n if (aadMember) {\n jwe.aad = aadMember;\n }\n if (this._protectedHeader) {\n jwe.protected = decoder.decode(protectedHeader);\n }\n if (this._sharedUnprotectedHeader) {\n jwe.unprotected = this._sharedUnprotectedHeader;\n }\n if (this._unprotectedHeader) {\n jwe.header = this._unprotectedHeader;\n }\n return jwe;\n }\n}\n", "import { isCloudflareWorkers } from './env.js';\nimport { JOSENotSupported } from '../util/errors.js';\nexport default function subtleDsa(alg, algorithm) {\n const hash = `SHA-${alg.slice(-3)}`;\n switch (alg) {\n case 'HS256':\n case 'HS384':\n case 'HS512':\n return { hash, name: 'HMAC' };\n case 'PS256':\n case 'PS384':\n case 'PS512':\n return { hash, name: 'RSA-PSS', saltLength: alg.slice(-3) >> 3 };\n case 'RS256':\n case 'RS384':\n case 'RS512':\n return { hash, name: 'RSASSA-PKCS1-v1_5' };\n case 'ES256':\n case 'ES384':\n case 'ES512':\n return { hash, name: 'ECDSA', namedCurve: algorithm.namedCurve };\n case isCloudflareWorkers() && 'EdDSA':\n const { namedCurve } = algorithm;\n return { name: namedCurve, namedCurve };\n case 'EdDSA':\n return { name: algorithm.name };\n default:\n throw new JOSENotSupported(`alg ${alg} is not supported either by JOSE or your javascript runtime`);\n }\n}\n", "import crypto, { isCryptoKey } from './webcrypto.js';\nimport { checkSigCryptoKey } from '../lib/crypto_key.js';\nimport invalidKeyInput from '../lib/invalid_key_input.js';\nimport { types } from './is_key_like.js';\nexport default function getCryptoKey(alg, key, usage) {\n if (isCryptoKey(key)) {\n checkSigCryptoKey(key, alg, usage);\n return key;\n }\n if (key instanceof Uint8Array) {\n if (!alg.startsWith('HS')) {\n throw new TypeError(invalidKeyInput(key, ...types));\n }\n return crypto.subtle.importKey('raw', key, { hash: `SHA-${alg.slice(-3)}`, name: 'HMAC' }, false, [usage]);\n }\n throw new TypeError(invalidKeyInput(key, ...types, 'Uint8Array'));\n}\n", "import subtleAlgorithm from './subtle_dsa.js';\nimport crypto from './webcrypto.js';\nimport checkKeyLength from './check_key_length.js';\nimport getVerifyKey from './get_sign_verify_key.js';\nconst verify = async (alg, key, signature, data) => {\n const cryptoKey = await getVerifyKey(alg, key, 'verify');\n checkKeyLength(alg, cryptoKey);\n const algorithm = subtleAlgorithm(alg, cryptoKey.algorithm);\n try {\n return await crypto.subtle.verify(algorithm, cryptoKey, signature, data);\n }\n catch (_a) {\n return false;\n }\n};\nexport default verify;\n", "import { decode as base64url } from '../../runtime/base64url.js';\nimport verify from '../../runtime/verify.js';\nimport { JOSEAlgNotAllowed, JWSInvalid, JWSSignatureVerificationFailed } from '../../util/errors.js';\nimport { concat, encoder, decoder } from '../../lib/buffer_utils.js';\nimport isDisjoint from '../../lib/is_disjoint.js';\nimport isObject from '../../lib/is_object.js';\nimport checkKeyType from '../../lib/check_key_type.js';\nimport validateCrit from '../../lib/validate_crit.js';\nimport validateAlgorithms from '../../lib/validate_algorithms.js';\nexport async function flattenedVerify(jws, key, options) {\n var _a;\n if (!isObject(jws)) {\n throw new JWSInvalid('Flattened JWS must be an object');\n }\n if (jws.protected === undefined && jws.header === undefined) {\n throw new JWSInvalid('Flattened JWS must have either of the \"protected\" or \"header\" members');\n }\n if (jws.protected !== undefined && typeof jws.protected !== 'string') {\n throw new JWSInvalid('JWS Protected Header incorrect type');\n }\n if (jws.payload === undefined) {\n throw new JWSInvalid('JWS Payload missing');\n }\n if (typeof jws.signature !== 'string') {\n throw new JWSInvalid('JWS Signature missing or incorrect type');\n }\n if (jws.header !== undefined && !isObject(jws.header)) {\n throw new JWSInvalid('JWS Unprotected Header incorrect type');\n }\n let parsedProt = {};\n if (jws.protected) {\n try {\n const protectedHeader = base64url(jws.protected);\n parsedProt = JSON.parse(decoder.decode(protectedHeader));\n }\n catch (_b) {\n throw new JWSInvalid('JWS Protected Header is invalid');\n }\n }\n if (!isDisjoint(parsedProt, jws.header)) {\n throw new JWSInvalid('JWS Protected and JWS Unprotected Header Parameter names must be disjoint');\n }\n const joseHeader = {\n ...parsedProt,\n ...jws.header,\n };\n const extensions = validateCrit(JWSInvalid, new Map([['b64', true]]), options === null || options === void 0 ? void 0 : options.crit, parsedProt, joseHeader);\n let b64 = true;\n if (extensions.has('b64')) {\n b64 = parsedProt.b64;\n if (typeof b64 !== 'boolean') {\n throw new JWSInvalid('The \"b64\" (base64url-encode payload) Header Parameter must be a boolean');\n }\n }\n const { alg } = joseHeader;\n if (typeof alg !== 'string' || !alg) {\n throw new JWSInvalid('JWS \"alg\" (Algorithm) Header Parameter missing or invalid');\n }\n const algorithms = options && validateAlgorithms('algorithms', options.algorithms);\n if (algorithms && !algorithms.has(alg)) {\n throw new JOSEAlgNotAllowed('\"alg\" (Algorithm) Header Parameter not allowed');\n }\n if (b64) {\n if (typeof jws.payload !== 'string') {\n throw new JWSInvalid('JWS Payload must be a string');\n }\n }\n else if (typeof jws.payload !== 'string' && !(jws.payload instanceof Uint8Array)) {\n throw new JWSInvalid('JWS Payload must be a string or an Uint8Array instance');\n }\n let resolvedKey = false;\n if (typeof key === 'function') {\n key = await key(parsedProt, jws);\n resolvedKey = true;\n }\n checkKeyType(alg, key, 'verify');\n const data = concat(encoder.encode((_a = jws.protected) !== null && _a !== void 0 ? _a : ''), encoder.encode('.'), typeof jws.payload === 'string' ? encoder.encode(jws.payload) : jws.payload);\n const signature = base64url(jws.signature);\n const verified = await verify(alg, key, signature, data);\n if (!verified) {\n throw new JWSSignatureVerificationFailed();\n }\n let payload;\n if (b64) {\n payload = base64url(jws.payload);\n }\n else if (typeof jws.payload === 'string') {\n payload = encoder.encode(jws.payload);\n }\n else {\n payload = jws.payload;\n }\n const result = { payload };\n if (jws.protected !== undefined) {\n result.protectedHeader = parsedProt;\n }\n if (jws.header !== undefined) {\n result.unprotectedHeader = jws.header;\n }\n if (resolvedKey) {\n return { ...result, key };\n }\n return result;\n}\n", "import { flattenedVerify } from '../flattened/verify.js';\nimport { JWSInvalid } from '../../util/errors.js';\nimport { decoder } from '../../lib/buffer_utils.js';\nexport async function compactVerify(jws, key, options) {\n if (jws instanceof Uint8Array) {\n jws = decoder.decode(jws);\n }\n if (typeof jws !== 'string') {\n throw new JWSInvalid('Compact JWS must be a string or Uint8Array');\n }\n const { 0: protectedHeader, 1: payload, 2: signature, length } = jws.split('.');\n if (length !== 3) {\n throw new JWSInvalid('Invalid Compact JWS');\n }\n const verified = await flattenedVerify({ payload, protected: protectedHeader, signature }, key, options);\n const result = { payload: verified.payload, protectedHeader: verified.protectedHeader };\n if (typeof key === 'function') {\n return { ...result, key: verified.key };\n }\n return result;\n}\n", "export default (date) => Math.floor(date.getTime() / 1000);\n", "const minute = 60;\nconst hour = minute * 60;\nconst day = hour * 24;\nconst week = day * 7;\nconst year = day * 365.25;\nconst REGEX = /^(\\d+|\\d+\\.\\d+) ?(seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)$/i;\nexport default (str) => {\n const matched = REGEX.exec(str);\n if (!matched) {\n throw new TypeError('Invalid time period format');\n }\n const value = parseFloat(matched[1]);\n const unit = matched[2].toLowerCase();\n switch (unit) {\n case 'sec':\n case 'secs':\n case 'second':\n case 'seconds':\n case 's':\n return Math.round(value);\n case 'minute':\n case 'minutes':\n case 'min':\n case 'mins':\n case 'm':\n return Math.round(value * minute);\n case 'hour':\n case 'hours':\n case 'hr':\n case 'hrs':\n case 'h':\n return Math.round(value * hour);\n case 'day':\n case 'days':\n case 'd':\n return Math.round(value * day);\n case 'week':\n case 'weeks':\n case 'w':\n return Math.round(value * week);\n default:\n return Math.round(value * year);\n }\n};\n", "import { JWTClaimValidationFailed, JWTExpired, JWTInvalid } from '../util/errors.js';\nimport { decoder } from './buffer_utils.js';\nimport epoch from './epoch.js';\nimport secs from './secs.js';\nimport isObject from './is_object.js';\nconst normalizeTyp = (value) => value.toLowerCase().replace(/^application\\//, '');\nconst checkAudiencePresence = (audPayload, audOption) => {\n if (typeof audPayload === 'string') {\n return audOption.includes(audPayload);\n }\n if (Array.isArray(audPayload)) {\n return audOption.some(Set.prototype.has.bind(new Set(audPayload)));\n }\n return false;\n};\nexport default (protectedHeader, encodedPayload, options = {}) => {\n const { typ } = options;\n if (typ &&\n (typeof protectedHeader.typ !== 'string' ||\n normalizeTyp(protectedHeader.typ) !== normalizeTyp(typ))) {\n throw new JWTClaimValidationFailed('unexpected \"typ\" JWT header value', 'typ', 'check_failed');\n }\n let payload;\n try {\n payload = JSON.parse(decoder.decode(encodedPayload));\n }\n catch (_a) {\n }\n if (!isObject(payload)) {\n throw new JWTInvalid('JWT Claims Set must be a top-level JSON object');\n }\n const { issuer } = options;\n if (issuer && !(Array.isArray(issuer) ? issuer : [issuer]).includes(payload.iss)) {\n throw new JWTClaimValidationFailed('unexpected \"iss\" claim value', 'iss', 'check_failed');\n }\n const { subject } = options;\n if (subject && payload.sub !== subject) {\n throw new JWTClaimValidationFailed('unexpected \"sub\" claim value', 'sub', 'check_failed');\n }\n const { audience } = options;\n if (audience &&\n !checkAudiencePresence(payload.aud, typeof audience === 'string' ? [audience] : audience)) {\n throw new JWTClaimValidationFailed('unexpected \"aud\" claim value', 'aud', 'check_failed');\n }\n let tolerance;\n switch (typeof options.clockTolerance) {\n case 'string':\n tolerance = secs(options.clockTolerance);\n break;\n case 'number':\n tolerance = options.clockTolerance;\n break;\n case 'undefined':\n tolerance = 0;\n break;\n default:\n throw new TypeError('Invalid clockTolerance option type');\n }\n const { currentDate } = options;\n const now = epoch(currentDate || new Date());\n if ((payload.iat !== undefined || options.maxTokenAge) && typeof payload.iat !== 'number') {\n throw new JWTClaimValidationFailed('\"iat\" claim must be a number', 'iat', 'invalid');\n }\n if (payload.nbf !== undefined) {\n if (typeof payload.nbf !== 'number') {\n throw new JWTClaimValidationFailed('\"nbf\" claim must be a number', 'nbf', 'invalid');\n }\n if (payload.nbf > now + tolerance) {\n throw new JWTClaimValidationFailed('\"nbf\" claim timestamp check failed', 'nbf', 'check_failed');\n }\n }\n if (payload.exp !== undefined) {\n if (typeof payload.exp !== 'number') {\n throw new JWTClaimValidationFailed('\"exp\" claim must be a number', 'exp', 'invalid');\n }\n if (payload.exp <= now - tolerance) {\n throw new JWTExpired('\"exp\" claim timestamp check failed', 'exp', 'check_failed');\n }\n }\n if (options.maxTokenAge) {\n const age = now - payload.iat;\n const max = typeof options.maxTokenAge === 'number' ? options.maxTokenAge : secs(options.maxTokenAge);\n if (age - tolerance > max) {\n throw new JWTExpired('\"iat\" claim timestamp check failed (too far in the past)', 'iat', 'check_failed');\n }\n if (age < 0 - tolerance) {\n throw new JWTClaimValidationFailed('\"iat\" claim timestamp check failed (it should be in the past)', 'iat', 'check_failed');\n }\n }\n return payload;\n};\n", "import { compactVerify } from '../jws/compact/verify.js';\nimport jwtPayload from '../lib/jwt_claims_set.js';\nimport { JWTInvalid } from '../util/errors.js';\nexport async function jwtVerify(jwt, key, options) {\n var _a;\n const verified = await compactVerify(jwt, key, options);\n if (((_a = verified.protectedHeader.crit) === null || _a === void 0 ? void 0 : _a.includes('b64')) && verified.protectedHeader.b64 === false) {\n throw new JWTInvalid('JWTs MUST NOT use unencoded payload');\n }\n const payload = jwtPayload(verified.protectedHeader, verified.payload, options);\n const result = { payload, protectedHeader: verified.protectedHeader };\n if (typeof key === 'function') {\n return { ...result, key: verified.key };\n }\n return result;\n}\n", "/*\nCopyright 2022 The Dapr Authors\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n http://www.apache.org/licenses/LICENSE-2.0\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*/\n\nimport { importSPKI, jwtVerify } from 'jose'\n\nimport type { Environment } from '$lib/environment'\n\nconst tokenHeaderMatch =\n /^(?:Bearer )?([A-Za-z0-9_\\-]+\\.[A-Za-z0-9_\\-]+\\.[A-Za-z0-9_\\-]+)/i\n\nexport async function AuthorizeRequest(\n req: Request,\n env: Environment\n): Promise {\n // If \"SKIP_AUTH\" is set, we can allow skipping authorization\n if (env.SKIP_AUTH === 'true') {\n return true\n }\n\n // Ensure we have an Authorization header with a bearer JWT token\n const match = tokenHeaderMatch.exec(req.headers.get('authorization') || '')\n if (!match || !match[1]) {\n return false\n }\n\n // Validate the JWT\n const pk = await importSPKI(env.PUBLIC_KEY, 'EdDSA')\n try {\n await jwtVerify(match[1], pk, {\n issuer: 'dapr.io/cloudflare',\n audience: env.TOKEN_AUDIENCE,\n algorithms: ['EdDSA'],\n // Allow 5 mins of clock skew\n clockTolerance: 300,\n })\n } catch (err) {\n console.error('Failed to validate JWT: ' + err)\n return false\n }\n\n return true\n}\n", "/*\nCopyright 2022 The Dapr Authors\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n http://www.apache.org/licenses/LICENSE-2.0\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n*/\n\nimport { Router, type Request as RequestI } from 'itty-router'\n\nimport type { Environment } from '$lib/environment'\nimport { AuthorizeRequest } from '$lib/jwt-auth'\nimport { version } from './package.json'\n\nconst router = Router()\n // Handle the info endpoint\n .get(\n '/.well-known/dapr/info',\n async (\n req: Request & RequestI,\n env: Environment\n ): Promise => {\n const auth = await AuthorizeRequest(req, env)\n if (!auth) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n // Filter all bindings by type\n const queues: string[] = []\n const kv: string[] = []\n const r2: string[] = []\n const all = Object.keys(env)\n for (let i = 0; i < all.length; i++) {\n if (!all[i]) {\n continue\n }\n const obj = env[all[i]]\n if (!obj || typeof obj != 'object' || !obj.constructor) {\n continue\n }\n switch (obj.constructor.name) {\n case 'KvNamespace':\n case 'KVNamespace':\n kv.push(all[i])\n break\n case 'WorkerQueue':\n case 'Queue':\n queues.push(all[i])\n break\n case 'R2Bucket':\n // Note that we currently don't support R2 yet\n r2.push(all[i])\n break\n }\n }\n\n const res = JSON.stringify({\n version,\n queues: queues && queues.length ? queues : undefined,\n kv: kv && kv.length ? kv : undefined,\n r2: r2 && r2.length ? r2 : undefined,\n })\n return new Response(res, {\n headers: {\n 'content-type': 'application/json',\n },\n })\n }\n )\n\n // Retrieve a value from KV\n .get(\n '/kv/:namespace/:key',\n async (\n req: Request & RequestI,\n env: Environment\n ): Promise => {\n const { namespace, key, errorRes } = await setupKVRequest(req, env)\n if (errorRes) {\n return errorRes\n }\n\n const val = await namespace!.get(key!, 'stream')\n if (!val) {\n return new Response('', { status: 404 })\n }\n\n return new Response(val, { status: 200 })\n }\n )\n\n // Store a value in KV\n .post(\n '/kv/:namespace/:key',\n async (\n req: Request & RequestI,\n env: Environment\n ): Promise => {\n const { namespace, key, errorRes } = await setupKVRequest(req, env)\n if (errorRes) {\n return errorRes\n }\n\n let expirationTtl: number|undefined = undefined\n const reqUrl = new URL(req.url)\n const ttlParam = parseInt(reqUrl.searchParams.get('ttl') ||'', 10)\n if (ttlParam > 0) {\n expirationTtl = ttlParam\n }\n await namespace!.put(key!, req.body!, {expirationTtl})\n\n return new Response('', { status: 201 })\n }\n )\n\n // Delete a value from KV\n .delete(\n '/kv/:namespace/:key',\n async (\n req: Request & RequestI,\n env: Environment\n ): Promise => {\n const { namespace, key, errorRes } = await setupKVRequest(req, env)\n if (errorRes) {\n return errorRes\n }\n\n await namespace!.delete(key!)\n\n return new Response('', { status: 204 })\n }\n )\n\n // Publish a message in a queue\n .post(\n '/queues/:queue',\n async (\n req: Request & RequestI,\n env: Environment\n ): Promise => {\n const { queue, errorRes } = await setupQueueRequest(req, env)\n if (errorRes) {\n return errorRes\n }\n\n let message = await req.text()\n await queue!.send(message)\n return new Response('', { status: 201 })\n }\n )\n\n // Catch-all route to handle 404s\n .all('*', (): Response => {\n return new Response('Not found', { status: 404 })\n })\n\n// Performs the init setps for a KV request. Returns a Response object in case of error.\nasync function setupKVRequest(\n req: Request & RequestI,\n env: Environment\n): Promise<{\n namespace?: KVNamespace\n key?: string\n errorRes?: Response\n}> {\n if (!req?.text || !req.params?.namespace || !req.params?.key) {\n return { errorRes: new Response('Bad request', { status: 400 }) }\n }\n const namespace = env[req.params.namespace] as KVNamespace\n if (typeof namespace != 'object' || !['KVNamespace', 'KvNamespace'].includes(namespace?.constructor?.name)) {\n return {\n errorRes: new Response(\n `Worker is not bound to KV '${req.params.kv}'`,\n { status: 412 }\n ),\n }\n }\n\n const auth = await AuthorizeRequest(req, env)\n if (!auth) {\n return { errorRes: new Response('Unauthorized', { status: 401 }) }\n }\n\n return { namespace, key: req.params.key }\n}\n\n// Performs the init setps for a Queue request. Returns a Response object in case of error.\nasync function setupQueueRequest(\n req: Request & RequestI,\n env: Environment\n): Promise<{ queue?: Queue; errorRes?: Response }> {\n if (!req?.text || !req.params?.queue) {\n return { errorRes: new Response('Bad request', { status: 400 }) }\n }\n const queue = env[req.params.queue] as Queue\n if (typeof queue != 'object' || !['WorkerQueue', 'Queue'].includes(queue?.constructor?.name)) {\n return {\n errorRes: new Response(\n `Worker is not bound to queue '${req.params.queue}'`,\n { status: 412 }\n ),\n }\n }\n\n const auth = await AuthorizeRequest(req, env)\n if (!auth) {\n return { errorRes: new Response('Unauthorized', { status: 401 }) }\n }\n\n return { queue }\n}\n\nexport default {\n fetch: router.handle,\n}\n"], - "mappings": "AAAA,SAASA,GAAE,CAAC,KAAKC,EAAE,GAAG,OAAOC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,MAAM,CAAC,UAAU,IAAI,MAAM,CAAC,EAAE,CAAC,IAAI,CAACF,EAAEG,EAAE,IAAI,CAACH,KAAKI,IAAIF,EAAE,KAAK,CAACC,EAAE,YAAY,EAAE,OAAO,KAAKF,EAAED,GAAG,QAAQ,WAAW,SAAS,EAAE,QAAQ,oBAAoB,EAAE,EAAE,QAAQ,oBAAoB,oBAAoB,EAAE,QAAQ,cAAc,KAAK,EAAE,QAAQ,wBAAwB,wBAAwB,MAAM,EAAEI,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,OAAOF,EAAE,MAAM,OAAOF,KAAKI,EAAE,CAAC,IAAID,EAAEE,EAAEJ,EAAE,IAAI,IAAID,EAAE,GAAG,EAAEA,EAAE,MAAM,OAAO,YAAYC,EAAE,YAAY,EAAE,OAAO,CAACK,EAAEC,EAAE,CAAC,IAAIL,EAAE,IAAII,IAAIN,EAAE,QAAgBM,IAAR,SAAaD,EAAEJ,EAAE,SAAS,MAAMM,CAAC,GAAG,CAACP,EAAE,OAAOK,EAAE,OAAO,QAAQG,KAAK,EAAE,IAAaL,EAAE,MAAMK,EAAER,EAAE,OAAOA,EAAE,GAAGI,CAAC,KAAnC,OAAsC,OAAOD,CAAC,CAAC,CAAC,CAAC,CCA7lB,IAAOM,EAAQ,OACFC,EAAeC,GAAQA,aAAe,UCA5C,IAAMC,EAAU,IAAI,YACdC,EAAU,IAAI,YACrBC,GAAY,GAAK,GAChB,SAASC,KAAUC,EAAS,CAC/B,IAAMC,EAAOD,EAAQ,OAAO,CAACE,EAAK,CAAE,OAAAC,CAAO,IAAMD,EAAMC,EAAQ,CAAC,EAC1DC,EAAM,IAAI,WAAWH,CAAI,EAC3BI,EAAI,EACR,OAAAL,EAAQ,QAASM,GAAW,CACxBF,EAAI,IAAIE,EAAQD,CAAC,EACjBA,GAAKC,EAAO,MAChB,CAAC,EACMF,CACX,CCGO,IAAMG,GAAgBC,GAAY,CACrC,IAAMC,EAAS,KAAKD,CAAO,EACrBE,EAAQ,IAAI,WAAWD,EAAO,MAAM,EAC1C,QAASE,EAAI,EAAGA,EAAIF,EAAO,OAAQE,IAC/BD,EAAMC,GAAKF,EAAO,WAAWE,CAAC,EAElC,OAAOD,CACX,EACaE,EAAUC,GAAU,CAC7B,IAAIL,EAAUK,EACVL,aAAmB,aACnBA,EAAUM,EAAQ,OAAON,CAAO,GAEpCA,EAAUA,EAAQ,QAAQ,KAAM,GAAG,EAAE,QAAQ,KAAM,GAAG,EAAE,QAAQ,MAAO,EAAE,EACzE,GAAI,CACA,OAAOD,GAAaC,CAAO,CAC/B,MACA,CACI,MAAM,IAAI,UAAU,mDAAmD,CAC3E,CACJ,ECpCO,IAAMO,EAAN,cAAwB,KAAM,CACjC,YAAYC,EAAS,CACjB,IAAIC,EACJ,MAAMD,CAAO,EACb,KAAK,KAAO,mBACZ,KAAK,KAAO,KAAK,YAAY,MAC5BC,EAAK,MAAM,qBAAuB,MAAQA,IAAO,QAAkBA,EAAG,KAAK,MAAO,KAAM,KAAK,WAAW,CAC7G,CACA,WAAW,MAAO,CACd,MAAO,kBACX,CACJ,EACaC,EAAN,cAAuCH,CAAU,CACpD,YAAYC,EAASG,EAAQ,cAAeC,EAAS,cAAe,CAChE,MAAMJ,CAAO,EACb,KAAK,KAAO,kCACZ,KAAK,MAAQG,EACb,KAAK,OAASC,CAClB,CACA,WAAW,MAAO,CACd,MAAO,iCACX,CACJ,EACaC,EAAN,cAAyBN,CAAU,CACtC,YAAYC,EAASG,EAAQ,cAAeC,EAAS,cAAe,CAChE,MAAMJ,CAAO,EACb,KAAK,KAAO,kBACZ,KAAK,MAAQG,EACb,KAAK,OAASC,CAClB,CACA,WAAW,MAAO,CACd,MAAO,iBACX,CACJ,EACaE,EAAN,cAAgCP,CAAU,CAC7C,aAAc,CACV,MAAM,GAAG,SAAS,EAClB,KAAK,KAAO,0BAChB,CACA,WAAW,MAAO,CACd,MAAO,0BACX,CACJ,EACaQ,EAAN,cAA+BR,CAAU,CAC5C,aAAc,CACV,MAAM,GAAG,SAAS,EAClB,KAAK,KAAO,wBAChB,CACA,WAAW,MAAO,CACd,MAAO,wBACX,CACJ,EAoBO,IAAMS,EAAN,cAAyBC,CAAU,CACtC,aAAc,CACV,MAAM,GAAG,SAAS,EAClB,KAAK,KAAO,iBAChB,CACA,WAAW,MAAO,CACd,MAAO,iBACX,CACJ,EACaC,EAAN,cAAyBD,CAAU,CACtC,aAAc,CACV,MAAM,GAAG,SAAS,EAClB,KAAK,KAAO,iBAChB,CACA,WAAW,MAAO,CACd,MAAO,iBACX,CACJ,EAiDO,IAAME,EAAN,cAA6CC,CAAU,CAC1D,aAAc,CACV,MAAM,GAAG,SAAS,EAClB,KAAK,KAAO,wCACZ,KAAK,QAAU,+BACnB,CACA,WAAW,MAAO,CACd,MAAO,uCACX,CACJ,ECjJA,IAAOC,EAAQC,EAAO,gBAAgB,KAAKA,CAAM,ECD1C,SAASC,GAAsB,CAClC,OAAQ,OAAO,cAAkB,KAC5B,OAAO,UAAc,KAAe,UAAU,YAAc,sBAC5D,OAAO,YAAgB,KAAe,cAAgB,QAC/D,CCHA,SAASC,EAASC,EAAMC,EAAO,iBAAkB,CAC7C,OAAO,IAAI,UAAU,kDAAkDA,aAAgBD,GAAM,CACjG,CACA,SAASE,EAAYC,EAAWH,EAAM,CAClC,OAAOG,EAAU,OAASH,CAC9B,CACA,SAASI,EAAcC,EAAM,CACzB,OAAO,SAASA,EAAK,KAAK,MAAM,CAAC,EAAG,EAAE,CAC1C,CACA,SAASC,GAAcC,EAAK,CACxB,OAAQA,EAAK,CACT,IAAK,QACD,MAAO,QACX,IAAK,QACD,MAAO,QACX,IAAK,QACD,MAAO,QACX,QACI,MAAM,IAAI,MAAM,aAAa,CACrC,CACJ,CACA,SAASC,GAAWC,EAAKC,EAAQ,CAC7B,GAAIA,EAAO,QAAU,CAACA,EAAO,KAAMC,GAAaF,EAAI,OAAO,SAASE,CAAQ,CAAC,EAAG,CAC5E,IAAIC,EAAM,sEACV,GAAIF,EAAO,OAAS,EAAG,CACnB,IAAMG,EAAOH,EAAO,IAAI,EACxBE,GAAO,UAAUF,EAAO,KAAK,IAAI,SAASG,IAC9C,MACSH,EAAO,SAAW,EACvBE,GAAO,UAAUF,EAAO,SAASA,EAAO,MAGxCE,GAAO,GAAGF,EAAO,MAErB,MAAM,IAAI,UAAUE,CAAG,CAC3B,CACJ,CACO,SAASE,GAAkBL,EAAKF,KAAQG,EAAQ,CACnD,OAAQH,EAAK,CACT,IAAK,QACL,IAAK,QACL,IAAK,QAAS,CACV,GAAI,CAACL,EAAYO,EAAI,UAAW,MAAM,EAClC,MAAMV,EAAS,MAAM,EACzB,IAAMY,EAAW,SAASJ,EAAI,MAAM,CAAC,EAAG,EAAE,EAE1C,GADeH,EAAcK,EAAI,UAAU,IAAI,IAChCE,EACX,MAAMZ,EAAS,OAAOY,IAAY,gBAAgB,EACtD,KACJ,CACA,IAAK,QACL,IAAK,QACL,IAAK,QAAS,CACV,GAAI,CAACT,EAAYO,EAAI,UAAW,mBAAmB,EAC/C,MAAMV,EAAS,mBAAmB,EACtC,IAAMY,EAAW,SAASJ,EAAI,MAAM,CAAC,EAAG,EAAE,EAE1C,GADeH,EAAcK,EAAI,UAAU,IAAI,IAChCE,EACX,MAAMZ,EAAS,OAAOY,IAAY,gBAAgB,EACtD,KACJ,CACA,IAAK,QACL,IAAK,QACL,IAAK,QAAS,CACV,GAAI,CAACT,EAAYO,EAAI,UAAW,SAAS,EACrC,MAAMV,EAAS,SAAS,EAC5B,IAAMY,EAAW,SAASJ,EAAI,MAAM,CAAC,EAAG,EAAE,EAE1C,GADeH,EAAcK,EAAI,UAAU,IAAI,IAChCE,EACX,MAAMZ,EAAS,OAAOY,IAAY,gBAAgB,EACtD,KACJ,CACA,KAAKI,EAAoB,GAAK,SAAS,CACnC,GAAI,CAACb,EAAYO,EAAI,UAAW,cAAc,EAC1C,MAAMV,EAAS,cAAc,EACjC,KACJ,CACA,IAAK,QAAS,CACV,GAAIU,EAAI,UAAU,OAAS,WAAaA,EAAI,UAAU,OAAS,QAC3D,MAAMV,EAAS,kBAAkB,EAErC,KACJ,CACA,IAAK,QACL,IAAK,QACL,IAAK,QAAS,CACV,GAAI,CAACG,EAAYO,EAAI,UAAW,OAAO,EACnC,MAAMV,EAAS,OAAO,EAC1B,IAAMY,EAAWL,GAAcC,CAAG,EAElC,GADeE,EAAI,UAAU,aACdE,EACX,MAAMZ,EAASY,EAAU,sBAAsB,EACnD,KACJ,CACA,QACI,MAAM,IAAI,UAAU,2CAA2C,CACvE,CACAH,GAAWC,EAAKC,CAAM,CAC1B,CCnGA,SAASM,GAAQC,EAAKC,KAAWC,EAAO,CACpC,GAAIA,EAAM,OAAS,EAAG,CAClB,IAAMC,EAAOD,EAAM,IAAI,EACvBF,GAAO,eAAeE,EAAM,KAAK,IAAI,SAASC,IAClD,MACSD,EAAM,SAAW,EACtBF,GAAO,eAAeE,EAAM,SAASA,EAAM,MAG3CF,GAAO,WAAWE,EAAM,MAE5B,OAAID,GAAU,KACVD,GAAO,aAAaC,IAEf,OAAOA,GAAW,YAAcA,EAAO,KAC5CD,GAAO,sBAAsBC,EAAO,OAE/B,OAAOA,GAAW,UAAYA,GAAU,MACzCA,EAAO,aAAeA,EAAO,YAAY,OACzCD,GAAO,4BAA4BC,EAAO,YAAY,QAGvDD,CACX,CACA,IAAOI,EAAQ,CAACH,KAAWC,IAChBH,GAAQ,eAAgBE,EAAQ,GAAGC,CAAK,EAE5C,SAASG,EAAQC,EAAKL,KAAWC,EAAO,CAC3C,OAAOH,GAAQ,eAAeO,uBAA0BL,EAAQ,GAAGC,CAAK,CAC5E,CC5BA,IAAOK,EAASC,GACLC,EAAYD,CAAG,EAEbE,EAAQ,CAAC,WAAW,ECJjC,IAAMC,GAAa,IAAIC,IAAY,CAC/B,IAAMC,EAAUD,EAAQ,OAAO,OAAO,EACtC,GAAIC,EAAQ,SAAW,GAAKA,EAAQ,SAAW,EAC3C,MAAO,GAEX,IAAIC,EACJ,QAAWC,KAAUF,EAAS,CAC1B,IAAMG,EAAa,OAAO,KAAKD,CAAM,EACrC,GAAI,CAACD,GAAOA,EAAI,OAAS,EAAG,CACxBA,EAAM,IAAI,IAAIE,CAAU,EACxB,QACJ,CACA,QAAWC,KAAaD,EAAY,CAChC,GAAIF,EAAI,IAAIG,CAAS,EACjB,MAAO,GAEXH,EAAI,IAAIG,CAAS,CACrB,CACJ,CACA,MAAO,EACX,EACOC,EAAQP,GCrBf,SAASQ,GAAaC,EAAO,CACzB,OAAO,OAAOA,GAAU,UAAYA,IAAU,IAClD,CACe,SAARC,EAA0BC,EAAO,CACpC,GAAI,CAACH,GAAaG,CAAK,GAAK,OAAO,UAAU,SAAS,KAAKA,CAAK,IAAM,kBAClE,MAAO,GAEX,GAAI,OAAO,eAAeA,CAAK,IAAM,KACjC,MAAO,GAEX,IAAIC,EAAQD,EACZ,KAAO,OAAO,eAAeC,CAAK,IAAM,MACpCA,EAAQ,OAAO,eAAeA,CAAK,EAEvC,OAAO,OAAO,eAAeD,CAAK,IAAMC,CAC5C,CCfA,IAAOC,EAAQ,CAACC,EAAKC,IAAQ,CACzB,GAAID,EAAI,WAAW,IAAI,GAAKA,EAAI,WAAW,IAAI,EAAG,CAC9C,GAAM,CAAE,cAAAE,CAAc,EAAID,EAAI,UAC9B,GAAI,OAAOC,GAAkB,UAAYA,EAAgB,KACrD,MAAM,IAAI,UAAU,GAAGF,wDAA0D,CAEzF,CACJ,ECkBA,IAAMG,EAAU,CAACC,EAASC,EAAKC,EAAO,IAAM,CACpCA,IAAS,IACTD,EAAI,QAAQA,EAAI,MAAM,EACtBA,EAAI,QAAQ,CAAI,GAEpB,IAAIE,EAAIH,EAAQ,QAAQC,EAAI,GAAIC,CAAI,EACpC,GAAIC,IAAM,GACN,MAAO,GACX,IAAMC,EAAMJ,EAAQ,SAASG,EAAGA,EAAIF,EAAI,MAAM,EAC9C,OAAIG,EAAI,SAAWH,EAAI,OACZ,GACJG,EAAI,MAAM,CAACC,EAAOC,IAAUD,IAAUJ,EAAIK,EAAM,GAAKP,EAAQC,EAASC,EAAKE,EAAI,CAAC,CAC3F,EACMI,EAAiBP,GAAY,CAC/B,OAAQ,GAAM,CACV,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,GAAM,IAAM,GAAM,EAAM,EAAM,CAAI,CAAC,EAClE,MAAO,QACX,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,EAAM,EAAM,EAAI,CAAC,EAChD,MAAO,QACX,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,EAAM,EAAM,EAAI,CAAC,EAChD,MAAO,QACX,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,GAAI,CAAC,EACpC,MAAO,SACX,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,GAAI,CAAC,EACpC,MAAO,OACX,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,GAAI,CAAC,EACpC,MAAO,UACX,KAAKD,EAAQC,EAAS,CAAC,GAAM,IAAM,GAAI,CAAC,EACpC,MAAO,QACX,QACI,MAAM,IAAIQ,EAAiB,yDAAyD,CAC5F,CACJ,EACMC,GAAgB,MAAOC,EAASC,EAAWC,EAAKC,EAAKC,IAAY,CACnE,IAAIC,EACJ,IAAIC,EACAC,EACEjB,EAAU,IAAI,WAAW,KAAKY,EAAI,QAAQF,EAAS,EAAE,CAAC,EACvD,MAAM,EAAE,EACR,IAAKQ,GAAMA,EAAE,WAAW,CAAC,CAAC,CAAC,EAC1BC,EAAWR,IAAc,OAC/B,OAAQE,EAAK,CACT,IAAK,QACL,IAAK,QACL,IAAK,QACDG,EAAY,CAAE,KAAM,UAAW,KAAM,OAAOH,EAAI,MAAM,EAAE,GAAI,EAC5DI,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,MACJ,IAAK,QACL,IAAK,QACL,IAAK,QACDH,EAAY,CAAE,KAAM,oBAAqB,KAAM,OAAOH,EAAI,MAAM,EAAE,GAAI,EACtEI,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,MACJ,IAAK,WACL,IAAK,eACL,IAAK,eACL,IAAK,eACDH,EAAY,CACR,KAAM,WACN,KAAM,OAAO,SAASH,EAAI,MAAM,EAAE,EAAG,EAAE,GAAK,GAChD,EACAI,EAAYE,EAAW,CAAC,UAAW,SAAS,EAAI,CAAC,UAAW,WAAW,EACvE,MACJ,IAAK,QACDH,EAAY,CAAE,KAAM,QAAS,WAAY,OAAQ,EACjDC,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,MACJ,IAAK,QACDH,EAAY,CAAE,KAAM,QAAS,WAAY,OAAQ,EACjDC,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,MACJ,IAAK,QACDH,EAAY,CAAE,KAAM,QAAS,WAAY,OAAQ,EACjDC,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,MACJ,IAAK,UACL,IAAK,iBACL,IAAK,iBACL,IAAK,iBAAkB,CACnB,IAAMC,EAAab,EAAcP,CAAO,EACxCgB,EAAYI,EAAW,WAAW,IAAI,EAAI,CAAE,KAAM,OAAQ,WAAAA,CAAW,EAAI,CAAE,KAAMA,CAAW,EAC5FH,EAAYE,EAAW,CAAC,EAAI,CAAC,YAAY,EACzC,KACJ,CACA,KAAKE,EAAoB,GAAK,SAAS,CACnC,IAAMD,EAAab,EAAcP,CAAO,EAAE,YAAY,EACtDgB,EAAY,CAAE,KAAM,QAAQI,IAAc,WAAY,QAAQA,GAAa,EAC3EH,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,KACJ,CACA,IAAK,QACDH,EAAY,CAAE,KAAMT,EAAcP,CAAO,CAAE,EAC3CiB,EAAYE,EAAW,CAAC,QAAQ,EAAI,CAAC,MAAM,EAC3C,MACJ,QACI,MAAM,IAAIX,EAAiB,gDAAgD,CACnF,CACA,OAAOc,EAAO,OAAO,UAAUX,EAAWX,EAASgB,GAAYD,EAAuDD,GAAQ,eAAiB,MAAQC,IAAO,OAASA,EAAK,GAAOE,CAAS,CAChM,EAIO,IAAMM,GAAW,CAACC,EAAKC,EAAKC,IACxBC,GAAc,6CAA8C,OAAQH,EAAKC,EAAKC,CAAO,ECtDhG,eAAsBE,GAAWC,EAAMC,EAAKC,EAAS,CACjD,GAAI,OAAOF,GAAS,UAAYA,EAAK,QAAQ,4BAA4B,IAAM,EAC3E,MAAM,IAAI,UAAU,sCAAsC,EAE9D,OAAOG,GAAaH,EAAMC,EAAKC,CAAO,CAC1C,CC9EA,IAAME,GAAqB,CAACC,EAAKC,IAAQ,CACrC,GAAI,EAAAA,aAAe,YAEnB,IAAI,CAACC,EAAUD,CAAG,EACd,MAAM,IAAI,UAAUE,EAAgBH,EAAKC,EAAK,GAAGG,EAAO,YAAY,CAAC,EAEzE,GAAIH,EAAI,OAAS,SACb,MAAM,IAAI,UAAU,GAAGG,EAAM,KAAK,MAAM,+DAA+D,EAE/G,EACMC,GAAsB,CAACL,EAAKC,EAAKK,IAAU,CAC7C,GAAI,CAACJ,EAAUD,CAAG,EACd,MAAM,IAAI,UAAUE,EAAgBH,EAAKC,EAAK,GAAGG,CAAK,CAAC,EAE3D,GAAIH,EAAI,OAAS,SACb,MAAM,IAAI,UAAU,GAAGG,EAAM,KAAK,MAAM,oEAAoE,EAEhH,GAAIE,IAAU,QAAUL,EAAI,OAAS,SACjC,MAAM,IAAI,UAAU,GAAGG,EAAM,KAAK,MAAM,wEAAwE,EAEpH,GAAIE,IAAU,WAAaL,EAAI,OAAS,SACpC,MAAM,IAAI,UAAU,GAAGG,EAAM,KAAK,MAAM,2EAA2E,EAEvH,GAAIH,EAAI,WAAaK,IAAU,UAAYL,EAAI,OAAS,UACpD,MAAM,IAAI,UAAU,GAAGG,EAAM,KAAK,MAAM,yEAAyE,EAErH,GAAIH,EAAI,WAAaK,IAAU,WAAaL,EAAI,OAAS,UACrD,MAAM,IAAI,UAAU,GAAGG,EAAM,KAAK,MAAM,0EAA0E,CAE1H,EACMG,GAAe,CAACP,EAAKC,EAAKK,IAAU,CACpBN,EAAI,WAAW,IAAI,GACjCA,IAAQ,OACRA,EAAI,WAAW,OAAO,GACtB,qBAAqB,KAAKA,CAAG,EAE7BD,GAAmBC,EAAKC,CAAG,EAG3BI,GAAoBL,EAAKC,EAAKK,CAAK,CAE3C,EACOE,EAAQD,GC3Cf,SAASE,GAAaC,EAAKC,EAAmBC,EAAkBC,EAAiBC,EAAY,CACzF,GAAIA,EAAW,OAAS,QAAaD,EAAgB,OAAS,OAC1D,MAAM,IAAIH,EAAI,gEAAgE,EAElF,GAAI,CAACG,GAAmBA,EAAgB,OAAS,OAC7C,OAAO,IAAI,IAEf,GAAI,CAAC,MAAM,QAAQA,EAAgB,IAAI,GACnCA,EAAgB,KAAK,SAAW,GAChCA,EAAgB,KAAK,KAAME,GAAU,OAAOA,GAAU,UAAYA,EAAM,SAAW,CAAC,EACpF,MAAM,IAAIL,EAAI,uFAAuF,EAEzG,IAAIM,EACAJ,IAAqB,OACrBI,EAAa,IAAI,IAAI,CAAC,GAAG,OAAO,QAAQJ,CAAgB,EAAG,GAAGD,EAAkB,QAAQ,CAAC,CAAC,EAG1FK,EAAaL,EAEjB,QAAWM,KAAaJ,EAAgB,KAAM,CAC1C,GAAI,CAACG,EAAW,IAAIC,CAAS,EACzB,MAAM,IAAIC,EAAiB,+BAA+BD,sBAA8B,EAE5F,GAAIH,EAAWG,KAAe,OAC1B,MAAM,IAAIP,EAAI,+BAA+BO,eAAuB,EAEnE,GAAID,EAAW,IAAIC,CAAS,GAAKJ,EAAgBI,KAAe,OACjE,MAAM,IAAIP,EAAI,+BAA+BO,gCAAwC,CAE7F,CACA,OAAO,IAAI,IAAIJ,EAAgB,IAAI,CACvC,CACA,IAAOM,EAAQV,GCjCf,IAAMW,GAAqB,CAACC,EAAQC,IAAe,CAC/C,GAAIA,IAAe,SACd,CAAC,MAAM,QAAQA,CAAU,GAAKA,EAAW,KAAMC,GAAM,OAAOA,GAAM,QAAQ,GAC3E,MAAM,IAAI,UAAU,IAAIF,uCAA4C,EAExE,GAAI,EAACC,EAGL,OAAO,IAAI,IAAIA,CAAU,CAC7B,EACOE,GAAQJ,GCDR,IAAMK,GAAc,OAAO,ECPnB,SAARC,EAA2BC,EAAKC,EAAW,CAC9C,IAAMC,EAAO,OAAOF,EAAI,MAAM,EAAE,IAChC,OAAQA,EAAK,CACT,IAAK,QACL,IAAK,QACL,IAAK,QACD,MAAO,CAAE,KAAAE,EAAM,KAAM,MAAO,EAChC,IAAK,QACL,IAAK,QACL,IAAK,QACD,MAAO,CAAE,KAAAA,EAAM,KAAM,UAAW,WAAYF,EAAI,MAAM,EAAE,GAAK,CAAE,EACnE,IAAK,QACL,IAAK,QACL,IAAK,QACD,MAAO,CAAE,KAAAE,EAAM,KAAM,mBAAoB,EAC7C,IAAK,QACL,IAAK,QACL,IAAK,QACD,MAAO,CAAE,KAAAA,EAAM,KAAM,QAAS,WAAYD,EAAU,UAAW,EACnE,KAAKE,EAAoB,GAAK,SAC1B,GAAM,CAAE,WAAAC,CAAW,EAAIH,EACvB,MAAO,CAAE,KAAMG,EAAY,WAAAA,CAAW,EAC1C,IAAK,QACD,MAAO,CAAE,KAAMH,EAAU,IAAK,EAClC,QACI,MAAM,IAAII,EAAiB,OAAOL,8DAAgE,CAC1G,CACJ,CCzBe,SAARM,EAA8BC,EAAKC,EAAKC,EAAO,CAClD,GAAIC,EAAYF,CAAG,EACf,OAAAG,GAAkBH,EAAKD,EAAKE,CAAK,EAC1BD,EAEX,GAAIA,aAAe,WAAY,CAC3B,GAAI,CAACD,EAAI,WAAW,IAAI,EACpB,MAAM,IAAI,UAAUK,EAAgBJ,EAAK,GAAGK,CAAK,CAAC,EAEtD,OAAOC,EAAO,OAAO,UAAU,MAAON,EAAK,CAAE,KAAM,OAAOD,EAAI,MAAM,EAAE,IAAK,KAAM,MAAO,EAAG,GAAO,CAACE,CAAK,CAAC,CAC7G,CACA,MAAM,IAAI,UAAUG,EAAgBJ,EAAK,GAAGK,EAAO,YAAY,CAAC,CACpE,CCZA,IAAME,GAAS,MAAOC,EAAKC,EAAKC,EAAWC,IAAS,CAChD,IAAMC,EAAY,MAAMC,EAAaL,EAAKC,EAAK,QAAQ,EACvDK,EAAeN,EAAKI,CAAS,EAC7B,IAAMG,EAAYC,EAAgBR,EAAKI,EAAU,SAAS,EAC1D,GAAI,CACA,OAAO,MAAMK,EAAO,OAAO,OAAOF,EAAWH,EAAWF,EAAWC,CAAI,CAC3E,MACA,CACI,MAAO,EACX,CACJ,EACOO,GAAQX,GCNf,eAAsBY,EAAgBC,EAAKC,EAAKC,EAAS,CACrD,IAAIC,EACJ,GAAI,CAACC,EAASJ,CAAG,EACb,MAAM,IAAIK,EAAW,iCAAiC,EAE1D,GAAIL,EAAI,YAAc,QAAaA,EAAI,SAAW,OAC9C,MAAM,IAAIK,EAAW,uEAAuE,EAEhG,GAAIL,EAAI,YAAc,QAAa,OAAOA,EAAI,WAAc,SACxD,MAAM,IAAIK,EAAW,qCAAqC,EAE9D,GAAIL,EAAI,UAAY,OAChB,MAAM,IAAIK,EAAW,qBAAqB,EAE9C,GAAI,OAAOL,EAAI,WAAc,SACzB,MAAM,IAAIK,EAAW,yCAAyC,EAElE,GAAIL,EAAI,SAAW,QAAa,CAACI,EAASJ,EAAI,MAAM,EAChD,MAAM,IAAIK,EAAW,uCAAuC,EAEhE,IAAIC,EAAa,CAAC,EAClB,GAAIN,EAAI,UACJ,GAAI,CACA,IAAMO,GAAkBC,EAAUR,EAAI,SAAS,EAC/CM,EAAa,KAAK,MAAMG,EAAQ,OAAOF,EAAe,CAAC,CAC3D,MACA,CACI,MAAM,IAAIF,EAAW,iCAAiC,CAC1D,CAEJ,GAAI,CAACK,EAAWJ,EAAYN,EAAI,MAAM,EAClC,MAAM,IAAIK,EAAW,2EAA2E,EAEpG,IAAMM,EAAa,CACf,GAAGL,EACH,GAAGN,EAAI,MACX,EACMY,EAAaC,EAAaR,EAAY,IAAI,IAAI,CAAC,CAAC,MAAO,EAAI,CAAC,CAAC,EAAqDH,GAAQ,KAAMI,EAAYK,CAAU,EACxJG,EAAM,GACV,GAAIF,EAAW,IAAI,KAAK,IACpBE,EAAMR,EAAW,IACb,OAAOQ,GAAQ,WACf,MAAM,IAAIT,EAAW,yEAAyE,EAGtG,GAAM,CAAE,IAAAU,CAAI,EAAIJ,EAChB,GAAI,OAAOI,GAAQ,UAAY,CAACA,EAC5B,MAAM,IAAIV,EAAW,2DAA2D,EAEpF,IAAMW,EAAad,GAAWe,GAAmB,aAAcf,EAAQ,UAAU,EACjF,GAAIc,GAAc,CAACA,EAAW,IAAID,CAAG,EACjC,MAAM,IAAIG,EAAkB,gDAAgD,EAEhF,GAAIJ,GACA,GAAI,OAAOd,EAAI,SAAY,SACvB,MAAM,IAAIK,EAAW,8BAA8B,UAGlD,OAAOL,EAAI,SAAY,UAAY,EAAEA,EAAI,mBAAmB,YACjE,MAAM,IAAIK,EAAW,wDAAwD,EAEjF,IAAIc,EAAc,GACd,OAAOlB,GAAQ,aACfA,EAAM,MAAMA,EAAIK,EAAYN,CAAG,EAC/BmB,EAAc,IAElBC,EAAaL,EAAKd,EAAK,QAAQ,EAC/B,IAAMoB,EAAOC,EAAOC,EAAQ,QAAQpB,EAAKH,EAAI,aAAe,MAAQG,IAAO,OAASA,EAAK,EAAE,EAAGoB,EAAQ,OAAO,GAAG,EAAG,OAAOvB,EAAI,SAAY,SAAWuB,EAAQ,OAAOvB,EAAI,OAAO,EAAIA,EAAI,OAAO,EACxLwB,EAAYhB,EAAUR,EAAI,SAAS,EAEzC,GAAI,CADa,MAAMyB,GAAOV,EAAKd,EAAKuB,EAAWH,CAAI,EAEnD,MAAM,IAAIK,EAEd,IAAIC,EACAb,EACAa,EAAUnB,EAAUR,EAAI,OAAO,EAE1B,OAAOA,EAAI,SAAY,SAC5B2B,EAAUJ,EAAQ,OAAOvB,EAAI,OAAO,EAGpC2B,EAAU3B,EAAI,QAElB,IAAM4B,EAAS,CAAE,QAAAD,CAAQ,EAOzB,OANI3B,EAAI,YAAc,SAClB4B,EAAO,gBAAkBtB,GAEzBN,EAAI,SAAW,SACf4B,EAAO,kBAAoB5B,EAAI,QAE/BmB,EACO,CAAE,GAAGS,EAAQ,IAAA3B,CAAI,EAErB2B,CACX,CCpGA,eAAsBC,GAAcC,EAAKC,EAAKC,EAAS,CAInD,GAHIF,aAAe,aACfA,EAAMG,EAAQ,OAAOH,CAAG,GAExB,OAAOA,GAAQ,SACf,MAAM,IAAII,EAAW,4CAA4C,EAErE,GAAM,CAAE,EAAGC,EAAiB,EAAGC,EAAS,EAAGC,EAAW,OAAAC,CAAO,EAAIR,EAAI,MAAM,GAAG,EAC9E,GAAIQ,IAAW,EACX,MAAM,IAAIJ,EAAW,qBAAqB,EAE9C,IAAMK,EAAW,MAAMC,EAAgB,CAAE,QAAAJ,EAAS,UAAWD,EAAiB,UAAAE,CAAU,EAAGN,EAAKC,CAAO,EACjGS,EAAS,CAAE,QAASF,EAAS,QAAS,gBAAiBA,EAAS,eAAgB,EACtF,OAAI,OAAOR,GAAQ,WACR,CAAE,GAAGU,EAAQ,IAAKF,EAAS,GAAI,EAEnCE,CACX,CCpBA,IAAOC,GAASC,GAAS,KAAK,MAAMA,EAAK,QAAQ,EAAI,GAAI,ECKzD,IAAMC,GAAQ,sGACPC,EAASC,GAAQ,CACpB,IAAMC,EAAUH,GAAM,KAAKE,CAAG,EAC9B,GAAI,CAACC,EACD,MAAM,IAAI,UAAU,4BAA4B,EAEpD,IAAMC,EAAQ,WAAWD,EAAQ,EAAE,EAEnC,OADaA,EAAQ,GAAG,YAAY,EACtB,CACV,IAAK,MACL,IAAK,OACL,IAAK,SACL,IAAK,UACL,IAAK,IACD,OAAO,KAAK,MAAMC,CAAK,EAC3B,IAAK,SACL,IAAK,UACL,IAAK,MACL,IAAK,OACL,IAAK,IACD,OAAO,KAAK,MAAMA,EAAQ,EAAM,EACpC,IAAK,OACL,IAAK,QACL,IAAK,KACL,IAAK,MACL,IAAK,IACD,OAAO,KAAK,MAAMA,EAAQ,IAAI,EAClC,IAAK,MACL,IAAK,OACL,IAAK,IACD,OAAO,KAAK,MAAMA,EAAQ,KAAG,EACjC,IAAK,OACL,IAAK,QACL,IAAK,IACD,OAAO,KAAK,MAAMA,EAAQ,MAAI,EAClC,QACI,OAAO,KAAK,MAAMA,EAAQ,QAAI,CACtC,CACJ,ECtCA,IAAMC,GAAgBC,GAAUA,EAAM,YAAY,EAAE,QAAQ,iBAAkB,EAAE,EAC1EC,GAAwB,CAACC,EAAYC,IACnC,OAAOD,GAAe,SACfC,EAAU,SAASD,CAAU,EAEpC,MAAM,QAAQA,CAAU,EACjBC,EAAU,KAAK,IAAI,UAAU,IAAI,KAAK,IAAI,IAAID,CAAU,CAAC,CAAC,EAE9D,GAEJE,EAAQ,CAACC,EAAiBC,EAAgBC,EAAU,CAAC,IAAM,CAC9D,GAAM,CAAE,IAAAC,CAAI,EAAID,EAChB,GAAIC,IACC,OAAOH,EAAgB,KAAQ,UAC5BN,GAAaM,EAAgB,GAAG,IAAMN,GAAaS,CAAG,GAC1D,MAAM,IAAIC,EAAyB,oCAAqC,MAAO,cAAc,EAEjG,IAAIC,EACJ,GAAI,CACAA,EAAU,KAAK,MAAMC,EAAQ,OAAOL,CAAc,CAAC,CACvD,MACA,CACA,CACA,GAAI,CAACM,EAASF,CAAO,EACjB,MAAM,IAAIG,EAAW,gDAAgD,EAEzE,GAAM,CAAE,OAAAC,CAAO,EAAIP,EACnB,GAAIO,GAAU,EAAE,MAAM,QAAQA,CAAM,EAAIA,EAAS,CAACA,CAAM,GAAG,SAASJ,EAAQ,GAAG,EAC3E,MAAM,IAAID,EAAyB,+BAAgC,MAAO,cAAc,EAE5F,GAAM,CAAE,QAAAM,CAAQ,EAAIR,EACpB,GAAIQ,GAAWL,EAAQ,MAAQK,EAC3B,MAAM,IAAIN,EAAyB,+BAAgC,MAAO,cAAc,EAE5F,GAAM,CAAE,SAAAO,CAAS,EAAIT,EACrB,GAAIS,GACA,CAACf,GAAsBS,EAAQ,IAAK,OAAOM,GAAa,SAAW,CAACA,CAAQ,EAAIA,CAAQ,EACxF,MAAM,IAAIP,EAAyB,+BAAgC,MAAO,cAAc,EAE5F,IAAIQ,EACJ,OAAQ,OAAOV,EAAQ,eAAgB,CACnC,IAAK,SACDU,EAAYC,EAAKX,EAAQ,cAAc,EACvC,MACJ,IAAK,SACDU,EAAYV,EAAQ,eACpB,MACJ,IAAK,YACDU,EAAY,EACZ,MACJ,QACI,MAAM,IAAI,UAAU,oCAAoC,CAChE,CACA,GAAM,CAAE,YAAAE,CAAY,EAAIZ,EAClBa,EAAMC,GAAMF,GAAe,IAAI,IAAM,EAC3C,IAAKT,EAAQ,MAAQ,QAAaH,EAAQ,cAAgB,OAAOG,EAAQ,KAAQ,SAC7E,MAAM,IAAID,EAAyB,+BAAgC,MAAO,SAAS,EAEvF,GAAIC,EAAQ,MAAQ,OAAW,CAC3B,GAAI,OAAOA,EAAQ,KAAQ,SACvB,MAAM,IAAID,EAAyB,+BAAgC,MAAO,SAAS,EAEvF,GAAIC,EAAQ,IAAMU,EAAMH,EACpB,MAAM,IAAIR,EAAyB,qCAAsC,MAAO,cAAc,CAEtG,CACA,GAAIC,EAAQ,MAAQ,OAAW,CAC3B,GAAI,OAAOA,EAAQ,KAAQ,SACvB,MAAM,IAAID,EAAyB,+BAAgC,MAAO,SAAS,EAEvF,GAAIC,EAAQ,KAAOU,EAAMH,EACrB,MAAM,IAAIK,EAAW,qCAAsC,MAAO,cAAc,CAExF,CACA,GAAIf,EAAQ,YAAa,CACrB,IAAMgB,EAAMH,EAAMV,EAAQ,IACpBc,EAAM,OAAOjB,EAAQ,aAAgB,SAAWA,EAAQ,YAAcW,EAAKX,EAAQ,WAAW,EACpG,GAAIgB,EAAMN,EAAYO,EAClB,MAAM,IAAIF,EAAW,2DAA4D,MAAO,cAAc,EAE1G,GAAIC,EAAM,EAAIN,EACV,MAAM,IAAIR,EAAyB,gEAAiE,MAAO,cAAc,CAEjI,CACA,OAAOC,CACX,ECvFA,eAAsBe,GAAUC,EAAKC,EAAKC,EAAS,CAC/C,IAAIC,EACJ,IAAMC,EAAW,MAAMC,GAAcL,EAAKC,EAAKC,CAAO,EACtD,KAAMC,EAAKC,EAAS,gBAAgB,QAAU,MAAQD,IAAO,OAAS,OAASA,EAAG,SAAS,KAAK,IAAMC,EAAS,gBAAgB,MAAQ,GACnI,MAAM,IAAIE,EAAW,qCAAqC,EAG9D,IAAMC,EAAS,CAAE,QADDC,EAAWJ,EAAS,gBAAiBA,EAAS,QAASF,CAAO,EACpD,gBAAiBE,EAAS,eAAgB,EACpE,OAAI,OAAOH,GAAQ,WACR,CAAE,GAAGM,EAAQ,IAAKH,EAAS,GAAI,EAEnCG,CACX,CCEA,IAAME,GACF,oEAEJ,eAAsBC,EAClBC,EACAC,EACgB,CAEhB,GAAIA,EAAI,YAAc,OAClB,MAAO,GAIX,IAAMC,EAAQJ,GAAiB,KAAKE,EAAI,QAAQ,IAAI,eAAe,GAAK,EAAE,EAC1E,GAAI,CAACE,GAAS,CAACA,EAAM,GACjB,MAAO,GAIX,IAAMC,EAAK,MAAMC,GAAWH,EAAI,WAAY,OAAO,EACnD,GAAI,CACA,MAAMI,GAAUH,EAAM,GAAIC,EAAI,CAC1B,OAAQ,qBACR,SAAUF,EAAI,eACd,WAAY,CAAC,OAAO,EAEpB,eAAgB,GACpB,CAAC,CACL,OAASK,EAAP,CACE,eAAQ,MAAM,2BAA6BA,CAAG,EACvC,EACX,CAEA,MAAO,EACX,mBChCA,IAAMC,GAASC,GAAO,EAEjB,IACG,yBACA,MACIC,EACAC,IACoB,CAEpB,GAAI,CADS,MAAMC,EAAiBF,EAAKC,CAAG,EAExC,OAAO,IAAI,SAAS,eAAgB,CAAE,OAAQ,GAAI,CAAC,EAIvD,IAAME,EAAmB,CAAC,EACpBC,EAAe,CAAC,EAChBC,EAAe,CAAC,EAChBC,EAAM,OAAO,KAAKL,CAAG,EAC3B,QAASM,EAAI,EAAGA,EAAID,EAAI,OAAQC,IAAK,CACjC,GAAI,CAACD,EAAIC,GACL,SAEJ,IAAMC,EAAMP,EAAIK,EAAIC,IACpB,GAAI,GAACC,GAAO,OAAOA,GAAO,UAAY,CAACA,EAAI,aAG3C,OAAQA,EAAI,YAAY,KAAM,CAC1B,IAAK,cACL,IAAK,cACDJ,EAAG,KAAKE,EAAIC,EAAE,EACd,MACJ,IAAK,cACL,IAAK,QACDJ,EAAO,KAAKG,EAAIC,EAAE,EAClB,MACJ,IAAK,WAEDF,EAAG,KAAKC,EAAIC,EAAE,EACd,KACR,CACJ,CAEA,IAAME,EAAM,KAAK,UAAU,CACvB,QAAAC,GACA,OAAQP,GAAUA,EAAO,OAASA,EAAS,OAC3C,GAAIC,GAAMA,EAAG,OAASA,EAAK,OAC3B,GAAIC,GAAMA,EAAG,OAASA,EAAK,MAC/B,CAAC,EACD,OAAO,IAAI,SAASI,EAAK,CACrB,QAAS,CACL,eAAgB,kBACpB,CACJ,CAAC,CACL,CACJ,EAGC,IACG,sBACA,MACIT,EACAC,IACoB,CACpB,GAAM,CAAE,UAAAU,EAAW,IAAAC,EAAK,SAAAC,CAAS,EAAI,MAAMC,GAAed,EAAKC,CAAG,EAClE,GAAIY,EACA,OAAOA,EAGX,IAAME,EAAM,MAAMJ,EAAW,IAAIC,EAAM,QAAQ,EAC/C,OAAKG,EAIE,IAAI,SAASA,EAAK,CAAE,OAAQ,GAAI,CAAC,EAH7B,IAAI,SAAS,GAAI,CAAE,OAAQ,GAAI,CAAC,CAI/C,CACJ,EAGC,KACG,sBACA,MACIf,EACAC,IACoB,CACpB,GAAM,CAAE,UAAAU,EAAW,IAAAC,EAAK,SAAAC,CAAS,EAAI,MAAMC,GAAed,EAAKC,CAAG,EAClE,GAAIY,EACA,OAAOA,EAGX,IAAIG,EACEC,EAAS,IAAI,IAAIjB,EAAI,GAAG,EACxBkB,EAAW,SAASD,EAAO,aAAa,IAAI,KAAK,GAAI,GAAI,EAAE,EACjE,OAAIC,EAAW,IACXF,EAAgBE,GAEpB,MAAMP,EAAW,IAAIC,EAAMZ,EAAI,KAAO,CAAC,cAAAgB,CAAa,CAAC,EAE9C,IAAI,SAAS,GAAI,CAAE,OAAQ,GAAI,CAAC,CAC3C,CACJ,EAGC,OACG,sBACA,MACIhB,EACAC,IACoB,CACpB,GAAM,CAAE,UAAAU,EAAW,IAAAC,EAAK,SAAAC,CAAS,EAAI,MAAMC,GAAed,EAAKC,CAAG,EAClE,OAAIY,IAIJ,MAAMF,EAAW,OAAOC,CAAI,EAErB,IAAI,SAAS,GAAI,CAAE,OAAQ,GAAI,CAAC,EAC3C,CACJ,EAGC,KACG,iBACA,MACIZ,EACAC,IACoB,CACpB,GAAM,CAAE,MAAAkB,EAAO,SAAAN,CAAS,EAAI,MAAMO,GAAkBpB,EAAKC,CAAG,EAC5D,GAAIY,EACA,OAAOA,EAGX,IAAIQ,EAAU,MAAMrB,EAAI,KAAK,EAC7B,aAAMmB,EAAO,KAAKE,CAAO,EAClB,IAAI,SAAS,GAAI,CAAE,OAAQ,GAAI,CAAC,CAC3C,CACJ,EAGC,IAAI,IAAK,IACC,IAAI,SAAS,YAAa,CAAE,OAAQ,GAAI,CAAC,CACnD,EAGL,eAAeP,GACXd,EACAC,EAKD,CACC,GAAI,CAACD,GAAK,MAAQ,CAACA,EAAI,QAAQ,WAAa,CAACA,EAAI,QAAQ,IACrD,MAAO,CAAE,SAAU,IAAI,SAAS,cAAe,CAAE,OAAQ,GAAI,CAAC,CAAE,EAEpE,IAAMW,EAAYV,EAAID,EAAI,OAAO,WACjC,OAAI,OAAOW,GAAa,UAAY,CAAC,CAAC,cAAe,aAAa,EAAE,SAASA,GAAW,aAAa,IAAI,EAC9F,CACH,SAAU,IAAI,SACV,8BAA8BX,EAAI,OAAO,MACzC,CAAE,OAAQ,GAAI,CAClB,CACJ,EAGS,MAAME,EAAiBF,EAAKC,CAAG,EAKrC,CAAE,UAAAU,EAAW,IAAKX,EAAI,OAAO,GAAI,EAH7B,CAAE,SAAU,IAAI,SAAS,eAAgB,CAAE,OAAQ,GAAI,CAAC,CAAE,CAIzE,CAGA,eAAeoB,GACXpB,EACAC,EACuD,CACvD,GAAI,CAACD,GAAK,MAAQ,CAACA,EAAI,QAAQ,MAC3B,MAAO,CAAE,SAAU,IAAI,SAAS,cAAe,CAAE,OAAQ,GAAI,CAAC,CAAE,EAEpE,IAAMmB,EAAQlB,EAAID,EAAI,OAAO,OAC7B,OAAI,OAAOmB,GAAS,UAAY,CAAC,CAAC,cAAe,OAAO,EAAE,SAASA,GAAO,aAAa,IAAI,EAChF,CACH,SAAU,IAAI,SACV,iCAAiCnB,EAAI,OAAO,SAC5C,CAAE,OAAQ,GAAI,CAClB,CACJ,EAGS,MAAME,EAAiBF,EAAKC,CAAG,EAKrC,CAAE,MAAAkB,CAAM,EAHJ,CAAE,SAAU,IAAI,SAAS,eAAgB,CAAE,OAAQ,GAAI,CAAC,CAAE,CAIzE,CAEA,IAAOG,GAAQ,CACX,MAAOxB,GAAO,MAClB", - "names": ["e", "t", "n", "a", "r", "o", "p", "s", "c", "webcrypto_default", "isCryptoKey", "key", "encoder", "decoder", "MAX_INT32", "concat", "buffers", "size", "acc", "length", "buf", "i", "buffer", "decodeBase64", "encoded", "binary", "bytes", "i", "decode", "input", "decoder", "JOSEError", "message", "_a", "JWTClaimValidationFailed", "claim", "reason", "JWTExpired", "JOSEAlgNotAllowed", "JOSENotSupported", "JWSInvalid", "JOSEError", "JWTInvalid", "JWSSignatureVerificationFailed", "JOSEError", "random_default", "webcrypto_default", "isCloudflareWorkers", "unusable", "name", "prop", "isAlgorithm", "algorithm", "getHashLength", "hash", "getNamedCurve", "alg", "checkUsage", "key", "usages", "expected", "msg", "last", "checkSigCryptoKey", "isCloudflareWorkers", "message", "msg", "actual", "types", "last", "invalid_key_input_default", "withAlg", "alg", "is_key_like_default", "key", "isCryptoKey", "types", "isDisjoint", "headers", "sources", "acc", "header", "parameters", "parameter", "is_disjoint_default", "isObjectLike", "value", "isObject", "input", "proto", "check_key_length_default", "alg", "key", "modulusLength", "findOid", "keyData", "oid", "from", "i", "sub", "value", "index", "getNamedCurve", "JOSENotSupported", "genericImport", "replace", "keyFormat", "pem", "alg", "options", "_a", "algorithm", "keyUsages", "c", "isPublic", "namedCurve", "isCloudflareWorkers", "webcrypto_default", "fromSPKI", "pem", "alg", "options", "genericImport", "importSPKI", "spki", "alg", "options", "fromSPKI", "symmetricTypeCheck", "alg", "key", "is_key_like_default", "withAlg", "types", "asymmetricTypeCheck", "usage", "checkKeyType", "check_key_type_default", "validateCrit", "Err", "recognizedDefault", "recognizedOption", "protectedHeader", "joseHeader", "input", "recognized", "parameter", "JOSENotSupported", "validate_crit_default", "validateAlgorithms", "option", "algorithms", "s", "validate_algorithms_default", "unprotected", "subtleDsa", "alg", "algorithm", "hash", "isCloudflareWorkers", "namedCurve", "JOSENotSupported", "getCryptoKey", "alg", "key", "usage", "isCryptoKey", "checkSigCryptoKey", "invalid_key_input_default", "types", "webcrypto_default", "verify", "alg", "key", "signature", "data", "cryptoKey", "getCryptoKey", "check_key_length_default", "algorithm", "subtleDsa", "webcrypto_default", "verify_default", "flattenedVerify", "jws", "key", "options", "_a", "isObject", "JWSInvalid", "parsedProt", "protectedHeader", "decode", "decoder", "is_disjoint_default", "joseHeader", "extensions", "validate_crit_default", "b64", "alg", "algorithms", "validate_algorithms_default", "JOSEAlgNotAllowed", "resolvedKey", "check_key_type_default", "data", "concat", "encoder", "signature", "verify_default", "JWSSignatureVerificationFailed", "payload", "result", "compactVerify", "jws", "key", "options", "decoder", "JWSInvalid", "protectedHeader", "payload", "signature", "length", "verified", "flattenedVerify", "result", "epoch_default", "date", "REGEX", "secs_default", "str", "matched", "value", "normalizeTyp", "value", "checkAudiencePresence", "audPayload", "audOption", "jwt_claims_set_default", "protectedHeader", "encodedPayload", "options", "typ", "JWTClaimValidationFailed", "payload", "decoder", "isObject", "JWTInvalid", "issuer", "subject", "audience", "tolerance", "secs_default", "currentDate", "now", "epoch_default", "JWTExpired", "age", "max", "jwtVerify", "jwt", "key", "options", "_a", "verified", "compactVerify", "JWTInvalid", "result", "jwt_claims_set_default", "tokenHeaderMatch", "AuthorizeRequest", "req", "env", "match", "pk", "importSPKI", "jwtVerify", "err", "router", "e", "req", "env", "AuthorizeRequest", "queues", "kv", "r2", "all", "i", "obj", "res", "version", "namespace", "key", "errorRes", "setupKVRequest", "val", "expirationTtl", "reqUrl", "ttlParam", "queue", "setupQueueRequest", "message", "worker_default"] -} diff --git a/tests/certification/secretstores/local/file/components/secrets.json b/tests/certification/secretstores/local/file/components/secrets.json index a267c2a9e..c928a018b 100644 --- a/tests/certification/secretstores/local/file/components/secrets.json +++ b/tests/certification/secretstores/local/file/components/secrets.json @@ -3,4 +3,4 @@ "nestedsecret": { "secret": "efgh" } -} \ No newline at end of file +} diff --git a/tests/config/secretstores/localenv/env.yml b/tests/config/secretstores/local/env/env.yml similarity index 100% rename from tests/config/secretstores/localenv/env.yml rename to tests/config/secretstores/local/env/env.yml diff --git a/tests/config/secretstores/localfile/local.yml b/tests/config/secretstores/local/file/file.yml similarity index 100% rename from tests/config/secretstores/localfile/local.yml rename to tests/config/secretstores/local/file/file.yml diff --git a/tests/config/secretstores/tests.yml b/tests/config/secretstores/tests.yml index 6bdbacaa9..7065e2964 100644 --- a/tests/config/secretstores/tests.yml +++ b/tests/config/secretstores/tests.yml @@ -1,9 +1,9 @@ # Supported operations: get, bulkget componentType: secretstores components: - - component: localenv - operations: ["get"] - - component: localfile + - component: local.env + allOperations: true + - component: local.file allOperations: true - component: azure.keyvault.certificate allOperations: true diff --git a/tests/conformance/README.md b/tests/conformance/README.md index ffed4f0bc..d48886c5d 100644 --- a/tests/conformance/README.md +++ b/tests/conformance/README.md @@ -63,17 +63,16 @@ 1. Test setup is independent of the test run. 2. Run the service that needs to conformance tested locally or in your own cloud account. - - For cloud-agnostic components such as Kafka, MQTT etc., there are `docker-compose` definitions under the [/.github/infrastructure](https://github.com/dapr/components-contrib/tree/master/.github/infrastructure) folder you can use to quickly create an instance of the service. For example, to setup Kafka for conformance tests: + - For cloud-agnostic components such as Kafka, MQTT etc., there are `docker-compose` definitions under the [/.github/infrastructure](../../.github/infrastructure/) folder you can use to quickly create an instance of the service. For example, to setup Kafka for conformance tests: ```bash docker-compose -f ./.github/infrastructure/docker-compose-kafka.yml -p kafka up -d ``` - For Azure components such as Blob Storage, Key Vault etc., there is an automation script that can help you create the resources under your subscription, and extract the environment variables needed to run the conformance tests. See [/.github/infrastructure/conformance/azure/README.md](../../.github/infrastructure/conformance/azure/README.md) for more details. + - Some components require additional set up or teardown scripts, which are placed in [/.github/scripts/components-scripts/](../../.github/scripts/components-scripts/) - > Given the variability in components and how they need to be set up for the conformance tests, you may need to refer to the [GitHub workflow for conformance tests](../../.github/workflows/conformance.yml) for any extra setup required by some components. E.g. Azure Event Grid bindings require setting up an Ngrok instance or similar endpoint for the test. - -3. Some conformance tests require credentials in the form of environment variables. For examples Azure CosmosDB conformance tests will need to have Azure CosmosDB credentials. You will need to supply them to make these tests pass. +3. Some conformance tests require credentials in the form of environment variables. For examples Azure Cosmos DB conformance tests will need to have Azure Cosmos DB credentials. You will need to supply them to make these tests pass. 4. To run specific tests, run: ```bash @@ -121,22 +120,22 @@ If you want to combine VS Code & dlv for debugging so you can set breakpoints in } ``` -## Using terraform for conformance tests +## Using Terraform for conformance tests -If you are writing new conformance tests and they require cloud resources, you should use the -terraform framework we have in place. To enable your component test to use terraform there are a few changes in the normal steps you must do. +If you are writing new conformance tests and they require cloud resources, you should use the Terraform framework we have in place. To enable your component test to use terraform there are a few changes in the normal steps you must do. -1. In the `conformance.yml` you should create a new step in a workflow for your component that creates new env variables. You will need a variable for each specific resource your tests will use. If you require 3 different topics and 2 different tables for your tests you should have 5 different env variables set. The only convention you must follow for the variables is the value must use `env.UNIQUE_ID` to ensure there are no conflicts with the resource names. +1. Create a setup and teardown script in [/.github/scripts/components-scripts/](../../.github/scripts/components-scripts/) for your component. You should also define new env variables. You will need a variable for each specific resource your tests will use. If you require 3 different topics and 2 different tables for your tests you should have 5 different env variables set. The only convention you must follow for the variables is the value must use `$UNIQUE_ID` to ensure there are no conflicts with the resource names. ```bash - PUBSUB_AWS_SNSSQS_QUEUE="testQueue-${{ env.UNIQUE_ID }}" - echo "PUBSUB_AWS_SNSSQS_QUEUE=$PUBSUB_AWS_SNSSQS_QUEUE" >> $GITHUB_ENV + echo "PUBSUB_AWS_SNSSQS_QUEUE=testQueue-${UNIQUE_ID}" >> $GITHUB_ENV ``` + Take a look at the AWS DynamoDB [setup](../../.github/scripts/components-scripts/conformance-state.aws.dynamodb-setup.sh) and [teardown](../../.github/scripts/components-scripts/conformance-state.aws.dynamodb-destroy.sh) scripts as example. + 2. When updating the `tests.yml` defined inside `tests/config//` folder you should overwrite the default names of any resources the conformance tests use. These values should reference env variables which should be defined in the conformance.yml. ```yaml - - component: aws.snssqs.terraform + - component: aws.snssqs.terraform operations: ["publish", "subscribe", "multiplehandlers"] config: pubsubName: aws-snssqs @@ -148,22 +147,22 @@ terraform framework we have in place. To enable your component test to use terra 3. When writing your `component.yml` you should reference your credentials using env variables and any resources specified in the yaml should use env variables as well just as you did in the `test.yml`. Also if your component has an option that controls resource creation such as `disableEntityManagement` you will need to set it so it prohibits new resource creation. We want to use only terraform to provision resources and not dapr itself for these tests. ```yaml - metadata: + metadata: - name: accessKey - value: ${{AWS_ACCESS_KEY_ID}} + value: ${{AWS_ACCESS_KEY_ID}} - name: secretKey - value: ${{AWS_SECRET_ACCESS_KEY}} + value: ${{AWS_SECRET_ACCESS_KEY}} - name: region - value: "us-east-1" + value: "us-east-1" - name: consumerID - value: ${{PUBSUB_AWS_SNSSQS_QUEUE}} + value: ${{PUBSUB_AWS_SNSSQS_QUEUE}} - name: disableEntityManagement - value: "true" + value: "true" ``` 4. You will need to create a new terrafrorm file `component.tf` to provision your resources. The file should be placed in its own folder in the `.github/infrastructure/terraform/conformance` directory such as -`.github/infrastructure/terraform/conformance/pubsub/aws/snsqsq`. The terraform file should use a UNIQUE_ID variables and use this variables when naming its resources so they matched the names defined earlier. Make sure any resources your tests will use are defined in terraform. +`.github/infrastructure/terraform/conformance/pubsub/aws/snsqsq`. The terraform file should use a `UNIQUE_ID` variable and use this variables when naming its resources so they matched the names defined earlier. Make sure any resources your tests will use are defined in terraform. ``` variable "UNIQUE_ID" { @@ -172,23 +171,18 @@ terraform framework we have in place. To enable your component test to use terra } ``` -5. The component should be added to the `cron-components` step in conformance test workflow `.github/conformance.yml`. The component should have a variable named `terraform-dir` and the value should be the relative path from `.github/infrastructure/terraform/conformance` to the folder which the tests personal terraform files are located such as `pubsub/aws/snsqsq`. +5. Register your test in the file [/.github/scripts/test-info.mjs](../../.github/scripts/test-info.mjs) file, making sure to set `requiresTerraform: true`. - ``` - - component: pubsub.aws.snssqs.terraform - terraform-dir: pubsub/aws/snssqs - ``` - -## Adding new AWS component in github actions +## Adding new AWS component in GitHub Actions 1. For tests involving aws components we use a service account to provision the resources needed. If you are contributing a brand new component you will need to make sure our account has sufficient permissions to provision resources and use handle component. A Dapr STC member will have to update the service account so contact them for assistance. 2. In your component yaml for your tests you should set the component metadata properties `accesskey` and `secretkey` to the values of `${{AWS_ACCESS_KEY_ID}}` and `${{AWS_SECRET_ACCESS_KEY}}`. These env values will contain the credentials for the testing service account. ```yaml - metadata: + metadata: - name: accessKey - value: ${{AWS_ACCESS_KEY_ID}} + value: ${{AWS_ACCESS_KEY_ID}} - name: secretKey - value: ${{AWS_SECRET_ACCESS_KEY}} - ``` \ No newline at end of file + value: ${{AWS_SECRET_ACCESS_KEY}} + ``` diff --git a/tests/conformance/common.go b/tests/conformance/common.go index 4e4b8bb2b..5b4ca81e0 100644 --- a/tests/conformance/common.go +++ b/tests/conformance/common.go @@ -510,9 +510,9 @@ func loadSecretStore(tc TestComponent) secretstores.SecretStore { store = ss_azure.NewAzureKeyvaultSecretStore(testLogger) case "kubernetes": store = ss_kubernetes.NewKubernetesSecretStore(testLogger) - case "localenv": + case "local.env": store = ss_local_env.NewEnvSecretStore(testLogger) - case "localfile": + case "local.file": store = ss_local_file.NewLocalSecretStore(testLogger) case "hashicorp.vault": store = ss_hashicorp_vault.NewHashiCorpVaultSecretStore(testLogger) diff --git a/tests/conformance/secrets.json b/tests/conformance/secrets.json index c6dc0c790..8a6623844 100644 --- a/tests/conformance/secrets.json +++ b/tests/conformance/secrets.json @@ -1,4 +1,4 @@ { "conftestsecret": "abcd", "secondsecret": "efgh" -} \ No newline at end of file +} diff --git a/tests/conformance/secretstores/secretstores.go b/tests/conformance/secretstores/secretstores.go index 307d6f4cc..30e5f7530 100644 --- a/tests/conformance/secretstores/secretstores.go +++ b/tests/conformance/secretstores/secretstores.go @@ -15,7 +15,6 @@ package secretstores import ( "context" - "os" "testing" "github.com/stretchr/testify/assert" @@ -46,8 +45,8 @@ func NewTestConfig(name string, allOperations bool, operations []string) TestCon func ConformanceTests(t *testing.T, props map[string]string, store secretstores.SecretStore, config TestConfig) { // TODO add support for metadata // For local env var based component test - os.Setenv("conftestsecret", "abcd") - defer os.Unsetenv("conftestsecret") + t.Setenv("conftestsecret", "abcd") + t.Setenv("secondsecret", "efgh") // Init t.Run("init", func(t *testing.T) { diff --git a/tests/e2e/bindings/zeebe/.devcontainer.json b/tests/e2e/bindings/zeebe/.devcontainer.json index 56518dd7c..cfad15ed7 100644 --- a/tests/e2e/bindings/zeebe/.devcontainer.json +++ b/tests/e2e/bindings/zeebe/.devcontainer.json @@ -2,21 +2,21 @@ "name": "Dapr Components E2E Environment - Zeebe", "dockerComposeFile": ["./docker-compose.yml"], "service": "dapr", - "extensions": [ - "golang.go", - "ms-azuretools.vscode-dapr", - "ms-azuretools.vscode-docker", - "ms-kubernetes-tools.vscode-kubernetes-tools" - ], - // As a workaround for Codespaces not supporting workspaceFolder/workspace mount, create - // a symlink from /workspaces/components-contrib to /go/src/github.com/dapr/components-contrib - "postCreateCommand": "bash /usr/local/share/setup-gopath.sh components-contrib true", - // On Linux, this will prevent new files getting created as root, but you - // may need to update the USER_UID and USER_GID in docker/Dockerfile-dev - // to match your user if not 1000. - "remoteUser": "dapr", + "extensions": [ + "golang.go", + "ms-azuretools.vscode-dapr", + "ms-azuretools.vscode-docker", + "ms-kubernetes-tools.vscode-kubernetes-tools" + ], + // As a workaround for Codespaces not supporting workspaceFolder/workspace mount, create + // a symlink from /workspaces/components-contrib to /go/src/github.com/dapr/components-contrib + "postCreateCommand": "bash /usr/local/share/setup-gopath.sh components-contrib true", + // On Linux, this will prevent new files getting created as root, but you + // may need to update the USER_UID and USER_GID in docker/Dockerfile-dev + // to match your user if not 1000. + "remoteUser": "dapr", "settings": { - "terminal.integrated.defaultProfile.linux": "bash", - }, - "workspaceFolder": "/go/src/github.com/dapr/components-contrib", + "terminal.integrated.defaultProfile.linux": "bash" + }, + "workspaceFolder": "/go/src/github.com/dapr/components-contrib" } diff --git a/tests/e2e/pubsub/jetstream/.devcontainer/devcontainer.json b/tests/e2e/pubsub/jetstream/.devcontainer/devcontainer.json index 28d9d3ae3..41829a085 100644 --- a/tests/e2e/pubsub/jetstream/.devcontainer/devcontainer.json +++ b/tests/e2e/pubsub/jetstream/.devcontainer/devcontainer.json @@ -2,43 +2,38 @@ // https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-existing-docker-compose // If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml. { - "name": "Existing Docker Compose (Extend)", + "name": "Existing Docker Compose (Extend)", - // Update the 'dockerComposeFile' list if you have more compose files or use different names. - // The .devcontainer/docker-compose.yml file contains any overrides you need/want to make. - "dockerComposeFile": [ - "../docker-compose.yaml", - "docker-compose.yml" - ], + // Update the 'dockerComposeFile' list if you have more compose files or use different names. + // The .devcontainer/docker-compose.yml file contains any overrides you need/want to make. + "dockerComposeFile": ["../docker-compose.yaml", "docker-compose.yml"], - // The 'service' property is the name of the service for the container that VS Code should - // use. Update this value and .devcontainer/docker-compose.yml to the real service name. - "service": "dev", + // The 'service' property is the name of the service for the container that VS Code should + // use. Update this value and .devcontainer/docker-compose.yml to the real service name. + "service": "dev", - // The optional 'workspaceFolder' property is the path VS Code should open by default when - // connected. This is typically a file mount in .devcontainer/docker-compose.yml - "workspaceFolder": "/workspace", + // The optional 'workspaceFolder' property is the path VS Code should open by default when + // connected. This is typically a file mount in .devcontainer/docker-compose.yml + "workspaceFolder": "/workspace", - // Set *default* container specific settings.json values on container create. - "settings": {}, + // Set *default* container specific settings.json values on container create. + "settings": {}, - // Add the IDs of extensions you want installed when the container is created. - "extensions": [ - "golang.go" - ] + // Add the IDs of extensions you want installed when the container is created. + "extensions": ["golang.go"] - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], - // Uncomment the next line if you want start specific services in your Docker Compose config. - // "runServices": [], + // Uncomment the next line if you want start specific services in your Docker Compose config. + // "runServices": [], - // Uncomment the next line if you want to keep your containers running after VS Code shuts down. - // "shutdownAction": "none", + // Uncomment the next line if you want to keep your containers running after VS Code shuts down. + // "shutdownAction": "none", - // Uncomment the next line to run commands after the container is created - for example installing curl. - // "postCreateCommand": "apt-get update && apt-get install -y curl", + // Uncomment the next line to run commands after the container is created - for example installing curl. + // "postCreateCommand": "apt-get update && apt-get install -y curl", - // Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root. - // "remoteUser": "vscode" + // Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root. + // "remoteUser": "vscode" } diff --git a/tests/state/query/q1.json b/tests/state/query/q1.json index c84cdc182..b31c7a226 100644 --- a/tests/state/query/q1.json +++ b/tests/state/query/q1.json @@ -1,4 +1,3 @@ - { "page": { "limit": 2 diff --git a/tests/state/query/q2-token.json b/tests/state/query/q2-token.json index 111cc0de2..832109e49 100644 --- a/tests/state/query/q2-token.json +++ b/tests/state/query/q2-token.json @@ -1,4 +1,3 @@ - { "filter": { "EQ": { diff --git a/tests/state/query/q2.json b/tests/state/query/q2.json index ec76b0eb9..6b56f0903 100644 --- a/tests/state/query/q2.json +++ b/tests/state/query/q2.json @@ -1,4 +1,3 @@ - { "filter": { "EQ": { diff --git a/tests/state/query/q3.json b/tests/state/query/q3.json index 94eb240d6..999e0521d 100644 --- a/tests/state/query/q3.json +++ b/tests/state/query/q3.json @@ -8,7 +8,7 @@ }, { "IN": { - "state":["CA", "WA"] + "state": ["CA", "WA"] } } ]