639 lines
28 KiB
YAML
639 lines
28 KiB
YAML
# ------------------------------------------------------------
|
|
# Copyright 2021 The Dapr Authors
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
# ------------------------------------------------------------
|
|
|
|
name: Stable Components Certification Tests
|
|
|
|
on:
|
|
repository_dispatch:
|
|
types: [certification-test]
|
|
workflow_dispatch:
|
|
schedule:
|
|
- cron: '5 */12 * * *'
|
|
pull_request:
|
|
branches:
|
|
- master
|
|
- release-*
|
|
|
|
jobs:
|
|
# Based on whether this is a PR or a scheduled run, we will run a different
|
|
# subset of the certification tests. This allows all the tests not requiring
|
|
# secrets to be executed on pull requests.
|
|
generate-matrix:
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Parse repository_dispatch payload
|
|
if: github.event_name == 'repository_dispatch'
|
|
working-directory: ${{ github.workspace }}
|
|
run: |
|
|
if [ ${{ github.event.client_payload.command }} = "ok-to-test" ]; then
|
|
echo "CHECKOUT_REF=${{ github.event.client_payload.pull_head_ref }}" >> $GITHUB_ENV
|
|
echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV
|
|
fi
|
|
|
|
- name: Install yq
|
|
run: |
|
|
sudo snap install yq
|
|
|
|
- name: Specify components that can be run on every PR
|
|
id: pr-components
|
|
run: |
|
|
PR_COMPONENTS=$(yq -I0 --tojson eval - << EOF
|
|
- pubsub.kafka
|
|
- pubsub.rabbitmq
|
|
- pubsub.pulsar
|
|
- pubsub.mqtt3
|
|
- state.mongodb
|
|
- state.redis
|
|
- state.cockroachdb
|
|
- state.postgresql
|
|
- state.cassandra
|
|
- state.memcached
|
|
- state.mysql
|
|
- bindings.alicloud.dubbo
|
|
- bindings.kafka
|
|
- bindings.redis
|
|
- bindings.cron
|
|
- secretstores.local.env
|
|
- secretstores.local.file
|
|
- secretstores.hashicorp.vault
|
|
- bindings.rabbitmq
|
|
- bindings.localstorage
|
|
- bindings.postgres
|
|
EOF
|
|
)
|
|
echo "pr-components=$PR_COMPONENTS" >> $GITHUB_OUTPUT
|
|
|
|
- name: Specify components requiring cloud resources to run
|
|
id: cloud-components
|
|
run: |
|
|
# Skip cloud-components on PRs, requires scheduled run trigger
|
|
# or approver to trigger via respository-dispatch on /ok-to-test
|
|
if [ "${{ github.event_name }}" = "pull_request" ]; then
|
|
echo "cloud-components=[]" >> $GITHUB_OUTPUT
|
|
exit
|
|
fi
|
|
# Reuse the same cloud infrastructure as conformance.yml
|
|
#
|
|
# Unfortunately, Azure secrets can't have underscores in
|
|
# names, while environment variables with hyphens ('-') are
|
|
# troublesome.
|
|
#
|
|
# We work around here by leveraging the fact that
|
|
# environment variable names are case sensitive, so
|
|
# CamelCase would still work.
|
|
#
|
|
# That is slightly better than something like
|
|
# AZURECOSMOSDBMASTERKEY, which is extremely hard to read
|
|
# and errorprone.
|
|
#
|
|
# Only list the secrets you need for the component.
|
|
CRON_COMPONENTS=$(yq -I0 --tojson eval - << EOF
|
|
- component: secretstores.azure.keyvault
|
|
required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreClientId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret,AzureContainerRegistryName,AzureResourceGroupName
|
|
required-certs: AzureKeyVaultSecretStoreCert
|
|
- component: state.sqlserver
|
|
required-secrets: AzureSqlServerConnectionString
|
|
- component: bindings.azure.servicebusqueues
|
|
required-secrets: AzureServiceBusConnectionString
|
|
- component: bindings.azure.cosmosdb
|
|
required-secrets: AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection,AzureCosmosDBMasterKey,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret
|
|
- component: bindings.azure.eventhubs
|
|
required-secrets: AzureEventHubsBindingsConnectionString,AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureEventHubsBindingsHub,AzureEventHubsBindingsNamespace,AzureEventHubsBindingsConsumerGroup,AzureCertificationServicePrincipalClientId,AzureCertificationTenantId,AzureCertificationServicePrincipalClientSecret,AzureResourceGroupName,AzureCertificationSubscriptionId,AzureEventHubsBindingsContainer,AzureIotHubEventHubConnectionString,AzureIotHubName,AzureIotHubBindingsConsumerGroup
|
|
- component: pubsub.azure.eventhubs
|
|
required-secrets: AzureEventHubsPubsubTopicActiveConnectionString,AzureEventHubsPubsubNamespace,AzureEventHubsPubsubNamespaceConnectionString,AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureEventHubsPubsubContainer,AzureIotHubName,AzureIotHubEventHubConnectionString,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret,AzureResourceGroupName,AzureCertificationSubscriptionId
|
|
- component: pubsub.azure.servicebus.topics
|
|
required-secrets: AzureServiceBusConnectionString,AzureServiceBusNamespace, AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret
|
|
- component: bindings.azure.blobstorage
|
|
required-secrets: AzureBlobStorageAccount,AzureBlobStorageAccessKey,AzureBlobStorageContainer,AzureCertificationTenantId,AzureCertificationServicePrincipalClientId,AzureCertificationServicePrincipalClientSecret
|
|
- component: bindings.azure.storagequeues
|
|
required-secrets: AzureBlobStorageAccount, AzureBlobStorageAccessKey
|
|
- component: state.azure.tablestorage
|
|
required-secrets: AzureBlobStorageAccount, AzureBlobStorageAccessKey, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret
|
|
- component: state.azure.blobstorage
|
|
required-secrets: AzureBlobStorageContainer,AzureBlobStorageAccount, AzureBlobStorageAccessKey, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret
|
|
- component: state.azure.cosmosdb
|
|
required-secrets: AzureCosmosDBMasterKey, AzureCosmosDBUrl, AzureCosmosDB, AzureCosmosDBCollection, AzureCertificationTenantId, AzureCertificationServicePrincipalClientId, AzureCertificationServicePrincipalClientSecret
|
|
- component: pubsub.aws.snssqs
|
|
terraform-dir: pubsub/aws/snssqs
|
|
- component: state.aws.dynamodb
|
|
terraform-dir: state/aws/dynamodb
|
|
|
|
EOF
|
|
)
|
|
echo "cloud-components=$CRON_COMPONENTS" >> $GITHUB_OUTPUT
|
|
|
|
- name: Create PR comment
|
|
if: env.PR_NUMBER != ''
|
|
uses: artursouza/sticky-pull-request-comment@v2.2.0
|
|
with:
|
|
header: ${{ github.run_id }}
|
|
number: ${{ env.PR_NUMBER }}
|
|
GITHUB_TOKEN: ${{ secrets.DAPR_BOT_TOKEN }}
|
|
message: |
|
|
# Components certification test
|
|
|
|
🔗 **[Link to Action run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
|
|
|
Commit ref: ${{ env.CHECKOUT_REF }}
|
|
|
|
outputs:
|
|
pr-components: ${{ steps.pr-components.outputs.pr-components }}
|
|
cloud-components: ${{ steps.cloud-components.outputs.cloud-components }}
|
|
|
|
certification:
|
|
name: ${{ matrix.component }} certification
|
|
runs-on: ubuntu-latest
|
|
defaults:
|
|
run:
|
|
shell: bash
|
|
needs: generate-matrix
|
|
env:
|
|
UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}}
|
|
|
|
strategy:
|
|
fail-fast: false # Keep running even if one component fails
|
|
matrix:
|
|
component: ${{ fromJson(needs.generate-matrix.outputs.pr-components) }}
|
|
include: ${{ fromJson(needs.generate-matrix.outputs.cloud-components) }}
|
|
|
|
steps:
|
|
- name: Set default payload repo and ref
|
|
run: |
|
|
echo "CHECKOUT_REPO=${{ github.repository }}" >> $GITHUB_ENV
|
|
echo "CHECKOUT_REF=${{ github.ref }}" >> $GITHUB_ENV
|
|
|
|
- name: Parse repository_dispatch payload
|
|
if: github.event_name == 'repository_dispatch'
|
|
run: |
|
|
if [ ${{ github.event.client_payload.command }} = "ok-to-test" ]; then
|
|
echo "CHECKOUT_REPO=${{ github.event.client_payload.pull_head_repo }}" >> $GITHUB_ENV
|
|
echo "CHECKOUT_REF=${{ github.event.client_payload.pull_head_ref }}" >> $GITHUB_ENV
|
|
fi
|
|
|
|
- name: Check out code
|
|
uses: actions/checkout@v3
|
|
with:
|
|
repository: ${{ env.CHECKOUT_REPO }}
|
|
ref: ${{ env.CHECKOUT_REF }}
|
|
|
|
- name: Setup test output
|
|
run: |
|
|
export TEST_OUTPUT_FILE_PREFIX=$GITHUB_WORKSPACE/test_report
|
|
echo "TEST_OUTPUT_FILE_PREFIX=$TEST_OUTPUT_FILE_PREFIX" >> $GITHUB_ENV
|
|
|
|
- name: Configure certification test and source path
|
|
run: |
|
|
TEST_COMPONENT=$(echo ${{ matrix.component }} | sed -E 's/\./\//g')
|
|
export TEST_PATH="tests/certification/${TEST_COMPONENT}"
|
|
echo "TEST_PATH=$TEST_PATH" >> $GITHUB_ENV
|
|
export SOURCE_PATH="github.com/dapr/components-contrib/${TEST_COMPONENT}"
|
|
echo "SOURCE_PATH=$SOURCE_PATH" >> $GITHUB_ENV
|
|
# converts slashes to dots in this string, so that it doesn't consider them sub-folders
|
|
export SOURCE_PATH_LINEAR=$(echo "$SOURCE_PATH" |sed 's#/#\.#g')
|
|
echo "SOURCE_PATH_LINEAR=$SOURCE_PATH_LINEAR" >> $GITHUB_ENV
|
|
|
|
- uses: Azure/login@v1
|
|
with:
|
|
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
|
if: matrix.required-secrets != ''
|
|
|
|
# Set this GitHub secret to your KeyVault, and grant the KeyVault policy to your Service Principal:
|
|
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
|
|
# Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated
|
|
- name: Setup secrets
|
|
id: get-azure-secrets
|
|
if: matrix.required-secrets != ''
|
|
env:
|
|
VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }}
|
|
run: |
|
|
secrets="${{ matrix.required-secrets }}"
|
|
for secretName in $(echo -n $secrets | tr ',' ' '); do
|
|
value=$(az keyvault secret show \
|
|
--name $secretName \
|
|
--vault-name $VAULT_NAME \
|
|
--query value \
|
|
--output tsv)
|
|
echo "::add-mask::$value"
|
|
echo "$secretName=$value" >> $GITHUB_OUTPUT
|
|
echo "$secretName=$value" >> $GITHUB_ENV
|
|
done
|
|
|
|
# Download the required certificates into files, and set env var pointing to their names
|
|
- name: Setup certs
|
|
if: matrix.required-certs != ''
|
|
working-directory: ${{ env.TEST_PATH }}
|
|
run: |
|
|
for CERT_NAME in $(echo "${{ matrix.required-certs }}" | sed 's/,/ /g'); do
|
|
CERT_FILE=$(mktemp --suffix .pfx)
|
|
echo "Downloading cert $CERT_NAME into file $CERT_FILE"
|
|
rm $CERT_FILE && \
|
|
az keyvault secret download --vault-name ${{ secrets.AZURE_KEYVAULT }} --name $CERT_NAME --encoding base64 --file $CERT_FILE
|
|
echo 'Setting $CERT_NAME to' "$CERT_FILE"
|
|
echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV
|
|
done
|
|
|
|
- name: Get current time
|
|
run: |
|
|
echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV}
|
|
|
|
- name: Setup Terraform
|
|
uses: hashicorp/setup-terraform@v2
|
|
if: matrix.terraform-dir != ''
|
|
|
|
- name: Set AWS Region
|
|
if: contains(matrix.component, 'aws')
|
|
run: |
|
|
AWS_REGION="us-west-1"
|
|
echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV
|
|
|
|
- name: Configure AWS Credentials
|
|
uses: aws-actions/configure-aws-credentials@v1
|
|
with:
|
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
|
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
|
|
aws-region: "${{ env.AWS_REGION }}"
|
|
if: matrix.terraform-dir != ''
|
|
|
|
- name: Terraform Init
|
|
id: init
|
|
run: terraform init
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
if: matrix.terraform-dir != ''
|
|
|
|
- name: Terraform Validate
|
|
id: validate
|
|
run: terraform validate -no-color
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
if: matrix.terraform-dir != ''
|
|
|
|
- name: Terraform Plan
|
|
id: plan
|
|
run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
if: matrix.terraform-dir != ''
|
|
|
|
- name: Terraform Apply
|
|
run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
if: matrix.terraform-dir != ''
|
|
continue-on-error: true
|
|
|
|
- name: Create aws.snssqs specific variables
|
|
if: contains(matrix.component, 'snssqs')
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
run: |
|
|
PUBSUB_AWS_SNSSQS_QUEUE_1="sqssnscerttest-q1-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_1=$PUBSUB_AWS_SNSSQS_QUEUE_1" >> $GITHUB_ENV
|
|
|
|
PUBSUB_AWS_SNSSQS_QUEUE_2="sqssnscerttest-q2-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_2=$PUBSUB_AWS_SNSSQS_QUEUE_2" >> $GITHUB_ENV
|
|
|
|
PUBSUB_AWS_SNSSQS_QUEUE_3="sqssnscerttest-q3-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_3=$PUBSUB_AWS_SNSSQS_QUEUE_3" >> $GITHUB_ENV
|
|
PUBSUB_AWS_SNSSQS_TOPIC_3="sqssnscerttest-t3-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_3=$PUBSUB_AWS_SNSSQS_TOPIC_3" >> $GITHUB_ENV
|
|
|
|
PUBSUB_AWS_SNSSQS_QUEUE_MVT="sqssnscerttest-q-mvt-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_MVT=$PUBSUB_AWS_SNSSQS_QUEUE_MVT" >> $GITHUB_ENV
|
|
PUBSUB_AWS_SNSSQS_TOPIC_MVT="sqssnscerttest-tp-mvt-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_MVT=$PUBSUB_AWS_SNSSQS_TOPIC_MVT" >> $GITHUB_ENV
|
|
|
|
PUBSUB_AWS_SNSSQS_QUEUE_DLIN="sqssnscerttest-dlq-in-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_DLIN=$PUBSUB_AWS_SNSSQS_QUEUE_DLIN" >> $GITHUB_ENV
|
|
PUBSUB_AWS_SNSSQS_QUEUE_DLOUT="sqssnscerttest-dlq-out-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_DLOUT=$PUBSUB_AWS_SNSSQS_QUEUE_DLOUT" >> $GITHUB_ENV
|
|
|
|
PUBSUB_AWS_SNSSQS_TOPIC_DLIN="sqssnscerttest-dlt-in-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_DLIN=$PUBSUB_AWS_SNSSQS_TOPIC_DLIN" >> $GITHUB_ENV
|
|
PUBSUB_AWS_SNSSQS_TOPIC_DLOUT="sqssnscerttest-dlt-out-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_DLOUT=$PUBSUB_AWS_SNSSQS_TOPIC_DLOUT" >> $GITHUB_ENV
|
|
|
|
PUBSUB_AWS_SNSSQS_QUEUE_FIFO="sqssnscerttest-q-fifo-${{env.UNIQUE_ID}}.fifo"
|
|
echo "PUBSUB_AWS_SNSSQS_QUEUE_FIFO=$PUBSUB_AWS_SNSSQS_QUEUE_FIFO" >> $GITHUB_ENV
|
|
PUBSUB_AWS_SNSSQS_TOPIC_FIFO="sqssnscerttest-t-fifo-${{env.UNIQUE_ID}}.fifo"
|
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_FIFO=$PUBSUB_AWS_SNSSQS_TOPIC_FIFO" >> $GITHUB_ENV
|
|
PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID="sqssnscerttest-q-fifo-${{env.UNIQUE_ID}}"
|
|
echo "PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID=$PUBSUB_AWS_SNSSQS_FIFO_GROUP_ID" >> $GITHUB_ENV
|
|
|
|
AWS_REGION="us-east-1"
|
|
echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV
|
|
|
|
- name: Create state aws.dynamodb specific variables
|
|
if: contains(matrix.component, 'dynamodb')
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
run: |
|
|
STATE_AWS_DYNAMODB_TABLE_1="certification-test-terraform-basic-${{ env.UNIQUE_ID }}"
|
|
echo "STATE_AWS_DYNAMODB_TABLE_1=$STATE_AWS_DYNAMODB_TABLE_1" >> $GITHUB_ENV
|
|
STATE_AWS_DYNAMODB_TABLE_2="certification-test-terraform-partition-key-${{ env.UNIQUE_ID }}"
|
|
echo "STATE_AWS_DYNAMODB_TABLE_2=$STATE_AWS_DYNAMODB_TABLE_2" >> $GITHUB_ENV
|
|
AWS_REGION="us-east-1"
|
|
echo "AWS_REGION=$AWS_REGION" >> $GITHUB_ENV
|
|
|
|
- name: Set up Go
|
|
uses: actions/setup-go@v3
|
|
with:
|
|
go-version: '^1.19'
|
|
|
|
- name: Download Go dependencies
|
|
working-directory: ${{ env.TEST_PATH }}
|
|
run: |
|
|
go mod download
|
|
go install gotest.tools/gotestsum@latest
|
|
go install github.com/axw/gocov/gocov@v1.1.0
|
|
|
|
- name: Check that go mod tidy is up-to-date
|
|
working-directory: ${{ env.TEST_PATH }}
|
|
run: |
|
|
go mod tidy -compat=1.19
|
|
git diff --exit-code ./go.mod
|
|
git diff --exit-code ./go.sum
|
|
|
|
- name: Run tests
|
|
continue-on-error: false
|
|
working-directory: ${{ env.TEST_PATH }}
|
|
env:
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
|
AWS_REGION: "${{ env.AWS_REGION }}"
|
|
run: |
|
|
echo "Running certification tests for ${{ matrix.component }} ... "
|
|
export GOLANG_PROTOBUF_REGISTRATION_CONFLICT=ignore
|
|
set +e
|
|
gotestsum --jsonfile ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.json \
|
|
--junitfile ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.xml --format standard-quiet -- \
|
|
-coverprofile=cover.out -covermode=set -tags=certtests -coverpkg=${{ env.SOURCE_PATH }}
|
|
status=$?
|
|
echo "Completed certification tests for ${{ matrix.component }} ... "
|
|
if test $status -ne 0; then
|
|
echo "Setting CERTIFICATION_FAILURE"
|
|
export CERTIFICATION_FAILURE=true
|
|
fi
|
|
set -e
|
|
|
|
COVERAGE_REPORT=$(gocov convert cover.out | gocov report)
|
|
COVERAGE_LINE=$(echo $COVERAGE_REPORT | grep -oP '(?<=Total Coverage:).*') # example: "80.00% (40/50)"
|
|
COVERAGE_PERCENTAGE=$(echo $COVERAGE_LINE | grep -oP '([0-9\.]*)' | head -n 1) # example "80.00"
|
|
echo "COVERAGE_LINE=$COVERAGE_LINE" >> $GITHUB_ENV
|
|
echo "COMPONENT_PERCENTAGE=$COVERAGE_PERCENTAGE" >> $GITHUB_ENV
|
|
|
|
# Fail the step if we found no test to run
|
|
if grep -q "\[no test files\]" ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.json ; then
|
|
echo "::error:: No certification test file was found for component ${{ matrix.component }}"
|
|
exit -1
|
|
fi
|
|
|
|
for CERT_NAME in $(echo "${{ matrix.required-certs }}" | sed 's/,/ /g'); do
|
|
CERT_FILE=$(printenv $CERT_NAME)
|
|
|
|
echo "Cleaning up the certificate file $CERT_FILE..."
|
|
rm $CERT_FILE
|
|
done
|
|
|
|
if [[ -v CERTIFICATION_FAILURE ]]; then
|
|
echo "CERTIFICATION_FAILURE=true" >> $GITHUB_ENV
|
|
exit 1
|
|
else
|
|
echo "CERTIFICATION_FAILURE=false" >> $GITHUB_ENV
|
|
fi
|
|
|
|
- name: Prepare test result info
|
|
if: always()
|
|
run: |
|
|
mkdir -p tmp/result_files
|
|
echo "Writing to tmp/result_files/${{ matrix.component }}.txt"
|
|
if [[ "${{ env.CERTIFICATION_FAILURE }}" == "true" ]]; then
|
|
echo "0" >> "tmp/result_files/${{ matrix.component }}.txt"
|
|
else
|
|
echo "1" >> "tmp/result_files/${{ matrix.component }}.txt"
|
|
fi
|
|
|
|
- name: Upload result files
|
|
uses: actions/upload-artifact@v3
|
|
if: always()
|
|
with:
|
|
name: result_files
|
|
path: tmp/result_files
|
|
retention-days: 1
|
|
|
|
- name: Prepare Cert Coverage Info
|
|
if: github.event_name == 'schedule'
|
|
run: |
|
|
mkdir -p tmp/cov_files
|
|
echo "${{ env.COVERAGE_LINE }}" >> tmp/cov_files/${{ env.SOURCE_PATH_LINEAR }}.txt
|
|
|
|
- name: Upload Cert Coverage Artifact
|
|
uses: actions/upload-artifact@v3
|
|
if: github.event_name == 'schedule'
|
|
with:
|
|
name: certtest_cov
|
|
path: tmp/cov_files
|
|
retention-days: 1
|
|
|
|
- name: Component Coverage Discord Notification
|
|
if: github.event_name == 'schedule'
|
|
env:
|
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_MONITORING_WEBHOOK_URL }}
|
|
uses: Ilshidur/action-discord@0c4b27844ba47cb1c7bee539c8eead5284ce9fa9
|
|
continue-on-error: true
|
|
with:
|
|
args: 'Cert Test Coverage for {{ SOURCE_PATH }} is {{ COVERAGE_LINE }}'
|
|
|
|
# Upload logs for test analytics to consume
|
|
- name: Upload test results
|
|
if: always()
|
|
uses: actions/upload-artifact@master
|
|
with:
|
|
name: ${{ matrix.component }}_certification_test
|
|
path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.*
|
|
|
|
- name: Terraform Destroy
|
|
continue-on-error: true
|
|
run: terraform destroy -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
|
if: matrix.terraform-dir != ''
|
|
|
|
post_job:
|
|
name: Post-completion
|
|
runs-on: ubuntu-latest
|
|
if: always()
|
|
needs:
|
|
- certification
|
|
- generate-matrix
|
|
steps:
|
|
- name: Parse repository_dispatch payload
|
|
if: github.event_name == 'repository_dispatch'
|
|
working-directory: ${{ github.workspace }}
|
|
run: |
|
|
if [ ${{ github.event.client_payload.command }} = "ok-to-test" ]; then
|
|
echo "CHECKOUT_REF=${{ github.event.client_payload.pull_head_ref }}" >> $GITHUB_ENV
|
|
echo "PR_NUMBER=${{ github.event.client_payload.issue.number }}" >> $GITHUB_ENV
|
|
fi
|
|
|
|
- name: Download test result artifact
|
|
if: always() && env.PR_NUMBER != ''
|
|
uses: actions/download-artifact@v3
|
|
continue-on-error: true
|
|
id: testresults
|
|
with:
|
|
name: result_files
|
|
path: tmp/result_files
|
|
|
|
- name: Build message
|
|
if: always() && env.PR_NUMBER != ''
|
|
# Abusing of the github-script action to be able to write this in JS
|
|
uses: actions/github-script@v6
|
|
with:
|
|
script: |
|
|
const prComponents = ('${{ needs.generate-matrix.outputs.pr-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.pr-components }}')) || []
|
|
const cloudComponents = ('${{ needs.generate-matrix.outputs.cloud-components }}' && JSON.parse('${{ needs.generate-matrix.outputs.cloud-components }}')) || []
|
|
const allComponents = [...prComponents, ...cloudComponents]
|
|
const basePath = '${{ steps.testresults.outputs.download-path }}'
|
|
const testType = 'certification'
|
|
|
|
const fs = require('fs')
|
|
const path = require('path')
|
|
|
|
let message = `# Components ${testType} test
|
|
|
|
🔗 **[Link to Action run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
|
|
|
Commit ref: ${{ env.CHECKOUT_REF }}`
|
|
|
|
let allSuccess = true
|
|
let allFound = true
|
|
let notSuccess = []
|
|
let notFound = []
|
|
for (let i = 0; i < allComponents.length; i++) {
|
|
let component = allComponents[i]
|
|
if (!component) {
|
|
continue
|
|
}
|
|
if (typeof component == 'object') {
|
|
component = component.component
|
|
}
|
|
let found = false
|
|
let success = false
|
|
try {
|
|
let read =fs.readFileSync(path.join(basePath, component + '.txt'), 'utf8')
|
|
read = read.split('\n')[0]
|
|
switch (read) {
|
|
case '1':
|
|
found = true
|
|
success = true
|
|
break
|
|
case '0':
|
|
found = true
|
|
success = false
|
|
}
|
|
} catch (e) {
|
|
// ignore errors, leave found = false
|
|
}
|
|
|
|
if (!found) {
|
|
allFound = false
|
|
notFound.push(component)
|
|
}
|
|
if (!success) {
|
|
allSuccess = false
|
|
notSuccess.push(component)
|
|
}
|
|
}
|
|
|
|
if (allSuccess) {
|
|
if (allFound) {
|
|
message += '\n\n' + `# ✅ All ${testType} tests passed
|
|
|
|
All tests have reported a successful status` + '\n\n'
|
|
} else {
|
|
message += '\n\n' + `# ⚠️ Some ${testType} tests did not report status
|
|
|
|
Although there were no failures reported, some tests did not report a status:` + '\n\n'
|
|
for (let i = 0; i < notFound.length; i++) {
|
|
message += '- ' + notFound[i] + '\n'
|
|
}
|
|
message += '\n'
|
|
}
|
|
} else {
|
|
message += '\n\n' + `# ❌ Some ${testType} tests failed
|
|
|
|
These tests failed:` + '\n\n'
|
|
for (let i = 0; i < notSuccess.length; i++) {
|
|
message += '- ' + notSuccess[i] + '\n'
|
|
}
|
|
message += '\n'
|
|
|
|
if (!allFound) {
|
|
message += 'Additionally, some tests did not report a status:\n\n'
|
|
for (let i = 0; i < notFound.length; i++) {
|
|
message += '- ' + notFound[i] + '\n'
|
|
}
|
|
message += '\n'
|
|
}
|
|
}
|
|
|
|
fs.writeFileSync('message.txt', message)
|
|
|
|
- name: Replace PR comment
|
|
if: always() && env.PR_NUMBER != ''
|
|
uses: artursouza/sticky-pull-request-comment@v2.2.0
|
|
with:
|
|
header: ${{ github.run_id }}
|
|
number: ${{ env.PR_NUMBER }}
|
|
GITHUB_TOKEN: ${{ secrets.DAPR_BOT_TOKEN }}
|
|
path: message.txt
|
|
|
|
- name: Download Cert Coverage Artifact
|
|
uses: actions/download-artifact@v3
|
|
continue-on-error: true
|
|
if: success() && github.event_name == 'schedule'
|
|
id: download
|
|
with:
|
|
name: certtest_cov
|
|
path: tmp/cov_files
|
|
|
|
- name: Calculate total coverage
|
|
if: success() && github.event_name == 'schedule'
|
|
run: |
|
|
threshold=60.0
|
|
echo "threshold=$threshold" >> $GITHUB_ENV
|
|
aboveThreshold=0
|
|
totalFiles=0
|
|
ls "${{steps.download.outputs.download-path}}" | while read f; do
|
|
while read LINE;
|
|
do
|
|
ratio=$(echo $LINE | cut -d "(" -f2 | cut -d ")" -f1)
|
|
prcnt=$(echo $LINE | cut -d "(" -f1 | cut -d ")" -f1)
|
|
tempPrcnt=$(echo $prcnt | cut -d'%' -f1)
|
|
if [ $tempPrcnt \> $threshold ]; then aboveThreshold=$(($aboveThreshold+1)); fi
|
|
totalFiles=$(($totalFiles+1))
|
|
tempNumerator=$(echo $ratio | cut -d'/' -f1)
|
|
tempDenominator=$(echo $ratio | cut -d'/' -f2)
|
|
export numerator=$(($numerator+$tempNumerator))
|
|
export denominator=$(($denominator+$tempDenominator))
|
|
totalPer=$(awk "BEGIN { print (($numerator / $denominator) * 100) }")
|
|
echo "totalPer=$totalPer" >> $GITHUB_ENV
|
|
echo "aboveThreshold=$aboveThreshold" >> $GITHUB_ENV
|
|
echo "totalFiles=$totalFiles" >> $GITHUB_ENV
|
|
done < "${{steps.download.outputs.download-path}}/$f"
|
|
done
|
|
continue-on-error: true
|
|
|
|
- name: Final Coverage Discord Notification
|
|
if: success() && github.event_name == 'schedule'
|
|
env:
|
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_MONITORING_WEBHOOK_URL }}
|
|
uses: Ilshidur/action-discord@0c4b27844ba47cb1c7bee539c8eead5284ce9fa9
|
|
continue-on-error: true
|
|
with:
|
|
args: 'Total Coverage for Certification Tests is {{ totalPer }}%. {{ aboveThreshold }} out of {{ totalFiles }} components have certification tests with code coverage > {{ threshold }}%'
|