Merge branch 'master' into postgres-pgx
This commit is contained in:
commit
3efab74b51
|
@ -0,0 +1,61 @@
|
||||||
|
version: '3.3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
primary:
|
||||||
|
container_name: pubSubStandardSingleNode
|
||||||
|
image: solace/solace-pubsub-standard:latest
|
||||||
|
volumes:
|
||||||
|
- "storage-group:/var/lib/solace"
|
||||||
|
shm_size: 1g
|
||||||
|
ulimits:
|
||||||
|
core: -1
|
||||||
|
nofile:
|
||||||
|
soft: 2448
|
||||||
|
hard: 6592
|
||||||
|
deploy:
|
||||||
|
restart_policy:
|
||||||
|
condition: on-failure
|
||||||
|
max_attempts: 1
|
||||||
|
ports:
|
||||||
|
#Port Mappings: With the exception of SMF, ports are mapped straight
|
||||||
|
#through from host to container. This may result in port collisions on
|
||||||
|
#commonly used ports that will cause failure of the container to start.
|
||||||
|
#Web transport
|
||||||
|
- '8008:8008'
|
||||||
|
#Web transport over TLS
|
||||||
|
- '1443:1443'
|
||||||
|
#SEMP over TLS
|
||||||
|
- '1943:1943'
|
||||||
|
#MQTT Default VPN
|
||||||
|
- '1883:1883'
|
||||||
|
#AMQP Default VPN over TLS
|
||||||
|
- '5671:5671'
|
||||||
|
#AMQP Default VPN
|
||||||
|
- '5672:5672'
|
||||||
|
#MQTT Default VPN over WebSockets
|
||||||
|
- '8000:8000'
|
||||||
|
#MQTT Default VPN over WebSockets / TLS
|
||||||
|
- '8443:8443'
|
||||||
|
#MQTT Default VPN over TLS
|
||||||
|
- '8883:8883'
|
||||||
|
#SEMP / PubSub+ Manager
|
||||||
|
- '8080:8080'
|
||||||
|
#REST Default VPN
|
||||||
|
- '9000:9000'
|
||||||
|
#REST Default VPN over TLS
|
||||||
|
- '9443:9443'
|
||||||
|
#SMF
|
||||||
|
- '55554:55555'
|
||||||
|
#SMF Compressed
|
||||||
|
- '55003:55003'
|
||||||
|
#SMF over TLS
|
||||||
|
- '55443:55443'
|
||||||
|
#SSH connection to CLI
|
||||||
|
- '2222:2222'
|
||||||
|
environment:
|
||||||
|
- username_admin_globalaccesslevel=admin
|
||||||
|
- username_admin_password=admin
|
||||||
|
- system_scaling_maxconnectioncount=100
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
storage-group:
|
|
@ -0,0 +1,105 @@
|
||||||
|
terraform {
|
||||||
|
required_version = ">=0.13"
|
||||||
|
|
||||||
|
required_providers {
|
||||||
|
aws = {
|
||||||
|
source = "hashicorp/aws"
|
||||||
|
version = "~> 4.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "TIMESTAMP" {
|
||||||
|
type = string
|
||||||
|
description = "Timestamp of the github worklow run."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "UNIQUE_ID" {
|
||||||
|
type = string
|
||||||
|
description = "Unique Id of the github worklow run."
|
||||||
|
}
|
||||||
|
|
||||||
|
provider "aws" {
|
||||||
|
region = "us-east-1"
|
||||||
|
default_tags {
|
||||||
|
tags = {
|
||||||
|
Purpose = "AutomatedTesting"
|
||||||
|
Timestamp = "${var.TIMESTAMP}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic" "testTopic" {
|
||||||
|
name = "testTopic-${var.UNIQUE_ID}"
|
||||||
|
tags = {
|
||||||
|
dapr-topic-name = "testTopic-${var.UNIQUE_ID}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic" "multiTopic1" {
|
||||||
|
name = "multiTopic1-${var.UNIQUE_ID}"
|
||||||
|
tags = {
|
||||||
|
dapr-topic-name = "multiTopic1-${var.UNIQUE_ID}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic" "multiTopic2" {
|
||||||
|
name = "multiTopic2-${var.UNIQUE_ID}"
|
||||||
|
tags = {
|
||||||
|
dapr-topic-name = "multiTopic2-${var.UNIQUE_ID}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sqs_queue" "testQueue" {
|
||||||
|
name = "testQueue-${var.UNIQUE_ID}"
|
||||||
|
tags = {
|
||||||
|
dapr-queue-name = "testQueue-${var.UNIQUE_ID}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic_subscription" "multiTopic1_testQueue" {
|
||||||
|
topic_arn = aws_sns_topic.multiTopic1.arn
|
||||||
|
protocol = "sqs"
|
||||||
|
endpoint = aws_sqs_queue.testQueue.arn
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic_subscription" "multiTopic2_testQueue" {
|
||||||
|
topic_arn = aws_sns_topic.multiTopic2.arn
|
||||||
|
protocol = "sqs"
|
||||||
|
endpoint = aws_sqs_queue.testQueue.arn
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sns_topic_subscription" "testTopic_testQueue" {
|
||||||
|
topic_arn = aws_sns_topic.testTopic.arn
|
||||||
|
protocol = "sqs"
|
||||||
|
endpoint = aws_sqs_queue.testQueue.arn
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "aws_sqs_queue_policy" "testQueue_policy" {
|
||||||
|
queue_url = "${aws_sqs_queue.testQueue.id}"
|
||||||
|
|
||||||
|
policy = <<POLICY
|
||||||
|
{
|
||||||
|
"Version": "2012-10-17",
|
||||||
|
"Id": "sqspolicy",
|
||||||
|
"Statement": [{
|
||||||
|
"Sid": "Allow-SNS-SendMessage",
|
||||||
|
"Effect": "Allow",
|
||||||
|
"Principal": {
|
||||||
|
"Service": "sns.amazonaws.com"
|
||||||
|
},
|
||||||
|
"Action": "sqs:SendMessage",
|
||||||
|
"Resource": "${aws_sqs_queue.testQueue.arn}",
|
||||||
|
"Condition": {
|
||||||
|
"ArnEquals": {
|
||||||
|
"aws:SourceArn": [
|
||||||
|
"${aws_sns_topic.testTopic.arn}",
|
||||||
|
"${aws_sns_topic.multiTopic1.arn}",
|
||||||
|
"${aws_sns_topic.multiTopic2.arn}"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
POLICY
|
||||||
|
}
|
|
@ -53,7 +53,7 @@ module.exports = async ({ github, context }) => {
|
||||||
async function handleIssueCommentCreate({ github, context }) {
|
async function handleIssueCommentCreate({ github, context }) {
|
||||||
const payload = context.payload;
|
const payload = context.payload;
|
||||||
const issue = context.issue;
|
const issue = context.issue;
|
||||||
const username = context.actor;
|
const username = (context.actor || "").toLowerCase();
|
||||||
const isFromPulls = !!payload.issue.pull_request;
|
const isFromPulls = !!payload.issue.pull_request;
|
||||||
const commentBody = payload.comment.body;
|
const commentBody = payload.comment.body;
|
||||||
|
|
||||||
|
@ -70,15 +70,12 @@ async function handleIssueCommentCreate({ github, context }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commands that can only be executed by owners.
|
// Commands that can only be executed by owners.
|
||||||
if (owners.indexOf(username) < 0) {
|
if (owners.map((v) => v.toLowerCase()).indexOf(username) < 0) {
|
||||||
console.log(`[handleIssueCommentCreate] user ${username} is not an owner, exiting.`);
|
console.log(`[handleIssueCommentCreate] user ${username} is not an owner, exiting.`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (command) {
|
switch (command) {
|
||||||
case "/make-me-laugh":
|
|
||||||
await cmdMakeMeLaugh(github, issue);
|
|
||||||
break;
|
|
||||||
case "/ok-to-test":
|
case "/ok-to-test":
|
||||||
await cmdOkToTest(github, issue, isFromPulls);
|
await cmdOkToTest(github, issue, isFromPulls);
|
||||||
break;
|
break;
|
||||||
|
@ -108,7 +105,7 @@ async function handleIssueOrPrLabeled({ github, context }) {
|
||||||
// Only authorized users can add labels to issues.
|
// Only authorized users can add labels to issues.
|
||||||
if (label == "documentation required") {
|
if (label == "documentation required") {
|
||||||
// Open a new docs issue
|
// Open a new docs issue
|
||||||
await github.issues.create({
|
await github.rest.issues.create({
|
||||||
owner: "dapr",
|
owner: "dapr",
|
||||||
repo: "docs",
|
repo: "docs",
|
||||||
title: `New content needed for dapr/components-contrib#${issueNumber}`,
|
title: `New content needed for dapr/components-contrib#${issueNumber}`,
|
||||||
|
@ -117,7 +114,7 @@ async function handleIssueOrPrLabeled({ github, context }) {
|
||||||
});
|
});
|
||||||
} else if (label == "new component") {
|
} else if (label == "new component") {
|
||||||
// Open a new dapr issue
|
// Open a new dapr issue
|
||||||
await github.issues.create({
|
await github.rest.issues.create({
|
||||||
owner: "dapr",
|
owner: "dapr",
|
||||||
repo: "dapr",
|
repo: "dapr",
|
||||||
title: `Component registration for dapr/components-contrib#${issueNumber}`,
|
title: `Component registration for dapr/components-contrib#${issueNumber}`,
|
||||||
|
@ -145,7 +142,7 @@ async function cmdAssign(github, issue, username, isFromPulls) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await github.issues.addAssignees({
|
await github.rest.issues.addAssignees({
|
||||||
owner: issue.owner,
|
owner: issue.owner,
|
||||||
repo: issue.repo,
|
repo: issue.repo,
|
||||||
issue_number: issue.number,
|
issue_number: issue.number,
|
||||||
|
@ -153,27 +150,6 @@ async function cmdAssign(github, issue, username, isFromPulls) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Comment a funny joke.
|
|
||||||
* @param {*} github GitHub object reference
|
|
||||||
* @param {*} issue GitHub issue object
|
|
||||||
*/
|
|
||||||
async function cmdMakeMeLaugh(github, issue) {
|
|
||||||
const result = await github.request("https://official-joke-api.appspot.com/random_joke");
|
|
||||||
jokedata = result.data;
|
|
||||||
joke = "I have a bad feeling about this.";
|
|
||||||
if (jokedata && jokedata.setup && jokedata.punchline) {
|
|
||||||
joke = `${jokedata.setup} - ${jokedata.punchline}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
await github.issues.createComment({
|
|
||||||
owner: issue.owner,
|
|
||||||
repo: issue.repo,
|
|
||||||
issue_number: issue.number,
|
|
||||||
body: joke,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Trigger e2e test for the pull request.
|
* Trigger e2e test for the pull request.
|
||||||
|
@ -188,7 +164,7 @@ async function cmdOkToTest(github, issue, isFromPulls) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get pull request
|
// Get pull request
|
||||||
const pull = await github.pulls.get({
|
const pull = await github.rest.pulls.get({
|
||||||
owner: issue.owner,
|
owner: issue.owner,
|
||||||
repo: issue.repo,
|
repo: issue.repo,
|
||||||
pull_number: issue.number
|
pull_number: issue.number
|
||||||
|
@ -204,7 +180,7 @@ async function cmdOkToTest(github, issue, isFromPulls) {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fire repository_dispatch event to trigger certification test
|
// Fire repository_dispatch event to trigger certification test
|
||||||
await github.repos.createDispatchEvent({
|
await github.rest.repos.createDispatchEvent({
|
||||||
owner: issue.owner,
|
owner: issue.owner,
|
||||||
repo: issue.repo,
|
repo: issue.repo,
|
||||||
event_type: "certification-test",
|
event_type: "certification-test",
|
||||||
|
@ -212,7 +188,7 @@ async function cmdOkToTest(github, issue, isFromPulls) {
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fire repository_dispatch event to trigger conformance test
|
// Fire repository_dispatch event to trigger conformance test
|
||||||
await github.repos.createDispatchEvent({
|
await github.rest.repos.createDispatchEvent({
|
||||||
owner: issue.owner,
|
owner: issue.owner,
|
||||||
repo: issue.repo,
|
repo: issue.repo,
|
||||||
event_type: "conformance-test",
|
event_type: "conformance-test",
|
||||||
|
|
|
@ -151,6 +151,8 @@ jobs:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
needs: generate-matrix
|
needs: generate-matrix
|
||||||
|
env:
|
||||||
|
UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}}
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false # Keep running even if one component fails
|
fail-fast: false # Keep running even if one component fails
|
||||||
|
@ -199,15 +201,26 @@ jobs:
|
||||||
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
||||||
if: matrix.required-secrets != ''
|
if: matrix.required-secrets != ''
|
||||||
|
|
||||||
|
# Set this GitHub secret to your KeyVault, and grant the KeyVault policy to your Service Principal:
|
||||||
|
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
|
||||||
|
# Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated
|
||||||
- name: Setup secrets
|
- name: Setup secrets
|
||||||
uses: Azure/get-keyvault-secrets@v1
|
|
||||||
with:
|
|
||||||
# Set this GitHub secret to your KeyVault, and grant the KeyVault policy to your Service Principal:
|
|
||||||
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
|
|
||||||
keyvault: ${{ secrets.AZURE_KEYVAULT }}
|
|
||||||
secrets: ${{ matrix.required-secrets }}
|
|
||||||
id: get-azure-secrets
|
id: get-azure-secrets
|
||||||
if: matrix.required-secrets != ''
|
if: matrix.required-secrets != ''
|
||||||
|
env:
|
||||||
|
VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }}
|
||||||
|
run: |
|
||||||
|
secrets="${{ matrix.required-secrets }}"
|
||||||
|
for secretName in $(echo -n $secrets | tr ',' ' '); do
|
||||||
|
value=$(az keyvault secret show \
|
||||||
|
--name $secretName \
|
||||||
|
--vault-name $VAULT_NAME \
|
||||||
|
--query value \
|
||||||
|
--output tsv)
|
||||||
|
echo "::add-mask::$value"
|
||||||
|
echo "$secretName=$value" >> $GITHUB_OUTPUT
|
||||||
|
echo "$secretName=$value" >> $GITHUB_ENV
|
||||||
|
done
|
||||||
|
|
||||||
# Download the required certificates into files, and set env var pointing to their names
|
# Download the required certificates into files, and set env var pointing to their names
|
||||||
- name: Setup certs
|
- name: Setup certs
|
||||||
|
@ -223,6 +236,46 @@ jobs:
|
||||||
echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV
|
echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV
|
||||||
done
|
done
|
||||||
|
|
||||||
|
- name: Get current time
|
||||||
|
run: |
|
||||||
|
echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV}
|
||||||
|
|
||||||
|
- name: Setup Terraform
|
||||||
|
uses: hashicorp/setup-terraform@v2
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v1
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
aws-region: us-west-1
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Init
|
||||||
|
id: init
|
||||||
|
run: terraform init
|
||||||
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Validate
|
||||||
|
id: validate
|
||||||
|
run: terraform validate -no-color
|
||||||
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Plan
|
||||||
|
id: plan
|
||||||
|
run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
||||||
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Apply
|
||||||
|
run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
||||||
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v3
|
uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
|
@ -245,6 +298,9 @@ jobs:
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
working-directory: ${{ env.TEST_PATH }}
|
working-directory: ${{ env.TEST_PATH }}
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
run: |
|
run: |
|
||||||
echo "Running certification tests for ${{ matrix.component }} ... "
|
echo "Running certification tests for ${{ matrix.component }} ... "
|
||||||
export GOLANG_PROTOBUF_REGISTRATION_CONFLICT=ignore
|
export GOLANG_PROTOBUF_REGISTRATION_CONFLICT=ignore
|
||||||
|
@ -336,6 +392,12 @@ jobs:
|
||||||
name: ${{ matrix.component }}_certification_test
|
name: ${{ matrix.component }}_certification_test
|
||||||
path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.*
|
path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.*
|
||||||
|
|
||||||
|
- name: Terraform Destroy
|
||||||
|
continue-on-error: true
|
||||||
|
run: terraform destroy -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
||||||
|
working-directory: "./.github/infrastructure/terraform/certification/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
post_job:
|
post_job:
|
||||||
name: Post-completion
|
name: Post-completion
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -61,7 +61,7 @@ jobs:
|
||||||
- bindings.redis.v7
|
- bindings.redis.v7
|
||||||
- bindings.kubemq
|
- bindings.kubemq
|
||||||
- bindings.rabbitmq
|
- bindings.rabbitmq
|
||||||
- pubsub.aws.snssqs
|
- pubsub.aws.snssqs.docker
|
||||||
- pubsub.hazelcast
|
- pubsub.hazelcast
|
||||||
- pubsub.in-memory
|
- pubsub.in-memory
|
||||||
- pubsub.mqtt-emqx
|
- pubsub.mqtt-emqx
|
||||||
|
@ -74,6 +74,7 @@ jobs:
|
||||||
- pubsub.kafka-wurstmeister
|
- pubsub.kafka-wurstmeister
|
||||||
- pubsub.kafka-confluent
|
- pubsub.kafka-confluent
|
||||||
- pubsub.kubemq
|
- pubsub.kubemq
|
||||||
|
- pubsub.solace
|
||||||
- secretstores.kubernetes
|
- secretstores.kubernetes
|
||||||
- secretstores.localenv
|
- secretstores.localenv
|
||||||
- secretstores.localfile
|
- secretstores.localfile
|
||||||
|
@ -149,6 +150,8 @@ jobs:
|
||||||
required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret
|
required-secrets: AzureKeyVaultName,AzureKeyVaultSecretStoreTenantId,AzureKeyVaultSecretStoreServicePrincipalClientId,AzureKeyVaultSecretStoreServicePrincipalClientSecret
|
||||||
- component: bindings.azure.cosmosdb
|
- component: bindings.azure.cosmosdb
|
||||||
required-secrets: AzureCosmosDBMasterKey,AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection
|
required-secrets: AzureCosmosDBMasterKey,AzureCosmosDBUrl,AzureCosmosDB,AzureCosmosDBCollection
|
||||||
|
- component: pubsub.aws.snssqs.terraform
|
||||||
|
terraform-dir: pubsub/aws/snssqs
|
||||||
- component: state.cloudflare.workerskv
|
- component: state.cloudflare.workerskv
|
||||||
EOF
|
EOF
|
||||||
)
|
)
|
||||||
|
@ -179,6 +182,7 @@ jobs:
|
||||||
# Version of Node.js to use
|
# Version of Node.js to use
|
||||||
# Currently used by the Cloudflare components
|
# Currently used by the Cloudflare components
|
||||||
NODE_VERSION: 18.x
|
NODE_VERSION: 18.x
|
||||||
|
UNIQUE_ID: ${{github.run_id}}-${{github.run_attempt}}
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
@ -223,15 +227,26 @@ jobs:
|
||||||
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
creds: ${{ secrets.AZURE_CREDENTIALS }}
|
||||||
if: matrix.required-secrets != ''
|
if: matrix.required-secrets != ''
|
||||||
|
|
||||||
|
# Set this GitHub secret to your KeyVault, and grant the KeyVault policy to your Service Principal:
|
||||||
|
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
|
||||||
|
# Using az cli to query keyvault as Azure/get-keyvault-secrets@v1 is deprecated
|
||||||
- name: Setup secrets
|
- name: Setup secrets
|
||||||
uses: Azure/get-keyvault-secrets@v1
|
|
||||||
with:
|
|
||||||
# Set this GitHub secret to your KeyVault, and grant the KeyVault policy to your Service Principal:
|
|
||||||
# az keyvault set-policy -n $AZURE_KEYVAULT --secret-permissions get list --spn $SPN_CLIENT_ID
|
|
||||||
keyvault: ${{ secrets.AZURE_KEYVAULT }}
|
|
||||||
secrets: ${{ matrix.required-secrets }}
|
|
||||||
id: get-azure-secrets
|
id: get-azure-secrets
|
||||||
if: matrix.required-secrets != ''
|
if: matrix.required-secrets != ''
|
||||||
|
env:
|
||||||
|
VAULT_NAME: ${{ secrets.AZURE_KEYVAULT }}
|
||||||
|
run: |
|
||||||
|
secrets="${{ matrix.required-secrets }}"
|
||||||
|
for secretName in $(echo -n $secrets | tr ',' ' '); do
|
||||||
|
value=$(az keyvault secret show \
|
||||||
|
--name $secretName \
|
||||||
|
--vault-name $VAULT_NAME \
|
||||||
|
--query value \
|
||||||
|
--output tsv)
|
||||||
|
echo "::add-mask::$value"
|
||||||
|
echo "$secretName=$value" >> $GITHUB_OUTPUT
|
||||||
|
echo "$secretName=$value" >> $GITHUB_ENV
|
||||||
|
done
|
||||||
|
|
||||||
- name: Start ngrok
|
- name: Start ngrok
|
||||||
if: contains(matrix.component, 'azure.eventgrid')
|
if: contains(matrix.component, 'azure.eventgrid')
|
||||||
|
@ -261,6 +276,58 @@ jobs:
|
||||||
echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV
|
echo "$CERT_NAME=$CERT_FILE" >> $GITHUB_ENV
|
||||||
done
|
done
|
||||||
|
|
||||||
|
- name: Get current time
|
||||||
|
run: |
|
||||||
|
echo "CURRENT_TIME=$(date --rfc-3339=date)" >> ${GITHUB_ENV}
|
||||||
|
|
||||||
|
- name: Setup Terraform
|
||||||
|
uses: hashicorp/setup-terraform@v2
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Configure AWS Credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v1
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
|
aws-region: us-west-1
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Init
|
||||||
|
id: init
|
||||||
|
run: terraform init
|
||||||
|
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Validate
|
||||||
|
id: validate
|
||||||
|
run: terraform validate -no-color
|
||||||
|
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Plan
|
||||||
|
id: plan
|
||||||
|
run: terraform plan -no-color -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
||||||
|
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
|
- name: Terraform Apply
|
||||||
|
run: terraform apply -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
||||||
|
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Create aws.snssqs variables
|
||||||
|
run: |
|
||||||
|
PUBSUB_AWS_SNSSQS_QUEUE="testQueue-${{ env.UNIQUE_ID }}"
|
||||||
|
echo "PUBSUB_AWS_SNSSQS_QUEUE=$PUBSUB_AWS_SNSSQS_QUEUE" >> $GITHUB_ENV
|
||||||
|
PUBSUB_AWS_SNSSQS_TOPIC="testTopic-${{ env.UNIQUE_ID }}"
|
||||||
|
echo "PUBSUB_AWS_SNSSQS_TOPIC=$PUBSUB_AWS_SNSSQS_TOPIC" >> $GITHUB_ENV
|
||||||
|
PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1="multiTopic1-${{ env.UNIQUE_ID }}"
|
||||||
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1=$PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1" >> $GITHUB_ENV
|
||||||
|
PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2="multiTopic2-${{ env.UNIQUE_ID }}"
|
||||||
|
echo "PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2=$PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2" >> $GITHUB_ENV
|
||||||
|
if: contains(matrix.component, 'snssqs')
|
||||||
|
|
||||||
- name: Start Redis 6 with Redis JSON
|
- name: Start Redis 6 with Redis JSON
|
||||||
run: docker-compose -f ./.github/infrastructure/docker-compose-redisjson.yml -p redis up -d
|
run: docker-compose -f ./.github/infrastructure/docker-compose-redisjson.yml -p redis up -d
|
||||||
if: contains(matrix.component, 'redis.v6')
|
if: contains(matrix.component, 'redis.v6')
|
||||||
|
@ -363,7 +430,7 @@ jobs:
|
||||||
|
|
||||||
- name: Start aws snssqs
|
- name: Start aws snssqs
|
||||||
run: docker-compose -f ./.github/infrastructure/docker-compose-snssqs.yml -p snssqs up -d
|
run: docker-compose -f ./.github/infrastructure/docker-compose-snssqs.yml -p snssqs up -d
|
||||||
if: contains(matrix.component, 'aws.snssqs')
|
if: contains(matrix.component, 'aws.snssqs.docker')
|
||||||
|
|
||||||
- name: Start influxdb
|
- name: Start influxdb
|
||||||
run: |
|
run: |
|
||||||
|
@ -414,6 +481,10 @@ jobs:
|
||||||
- name: Start kubemq
|
- name: Start kubemq
|
||||||
run: docker-compose -f ./.github/infrastructure/docker-compose-kubemq.yml -p kubemq up -d
|
run: docker-compose -f ./.github/infrastructure/docker-compose-kubemq.yml -p kubemq up -d
|
||||||
if: contains(matrix.component, 'kubemq')
|
if: contains(matrix.component, 'kubemq')
|
||||||
|
|
||||||
|
- name: Start solace
|
||||||
|
run: docker-compose -f ./.github/infrastructure/docker-compose-solace.yml -p solace up -d
|
||||||
|
if: contains(matrix.component, 'solace')
|
||||||
|
|
||||||
- name: Start nats with JetStream
|
- name: Start nats with JetStream
|
||||||
run: |
|
run: |
|
||||||
|
@ -445,6 +516,9 @@ jobs:
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||||
run: |
|
run: |
|
||||||
set -e
|
set -e
|
||||||
KIND=$(echo ${{ matrix.component }} | cut -d. -f1)
|
KIND=$(echo ${{ matrix.component }} | cut -d. -f1)
|
||||||
|
@ -524,6 +598,12 @@ jobs:
|
||||||
rm $CERT_FILE
|
rm $CERT_FILE
|
||||||
done
|
done
|
||||||
|
|
||||||
|
- name: Terraform Destroy
|
||||||
|
continue-on-error: true
|
||||||
|
run: terraform destroy -auto-approve -var="UNIQUE_ID=${{env.UNIQUE_ID}}" -var="TIMESTAMP=${{env.CURRENT_TIME}}"
|
||||||
|
working-directory: "./.github/infrastructure/terraform/conformance/${{ matrix.terraform-dir }}"
|
||||||
|
if: matrix.terraform-dir != ''
|
||||||
|
|
||||||
- name: Check conformance test passed
|
- name: Check conformance test passed
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -35,13 +35,14 @@ jobs:
|
||||||
prune_stale:
|
prune_stale:
|
||||||
name: Prune Stale
|
name: Prune Stale
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Prune Stale
|
- name: Prune Stale
|
||||||
uses: actions/stale@v3.0.14
|
uses: actions/stale@v7.0.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.DAPR_BOT_TOKEN }}
|
repo-token: ${{ github.token }}
|
||||||
# Different amounts of days for issues/PRs are not currently supported but there is a PR
|
|
||||||
# open for it: https://github.com/actions/stale/issues/214
|
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
days-before-close: 7
|
days-before-close: 7
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
|
|
|
@ -27,9 +27,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2 # required to make the script available for next step
|
uses: actions/checkout@v3 # required to make the script available for next step
|
||||||
- name: Issue analyzer
|
- name: Issue analyzer
|
||||||
uses: actions/github-script@v4
|
uses: actions/github-script@v6
|
||||||
with:
|
with:
|
||||||
github-token: ${{secrets.DAPR_BOT_TOKEN}}
|
github-token: ${{secrets.DAPR_BOT_TOKEN}}
|
||||||
script: |
|
script: |
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "dapr-cfworkers-client",
|
"name": "dapr-cfworkers-client",
|
||||||
"version": "20221219",
|
"version": "20221228",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "dapr-cfworkers-client",
|
"name": "dapr-cfworkers-client",
|
||||||
"version": "20221219",
|
"version": "20221228",
|
||||||
"license": "Apache2",
|
"license": "Apache2",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"itty-router": "^2.6.6",
|
"itty-router": "^2.6.6",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"name": "dapr-cfworkers-client",
|
"name": "dapr-cfworkers-client",
|
||||||
"description": "Client code for Dapr to interact with Cloudflare Workers",
|
"description": "Client code for Dapr to interact with Cloudflare Workers",
|
||||||
"version": "20221219",
|
"version": "20221228",
|
||||||
"main": "worker.ts",
|
"main": "worker.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "esbuild --bundle --minify --outfile=../workers/code/worker.js --format=esm --platform=browser --sourcemap worker.ts",
|
"build": "esbuild --bundle --minify --outfile=../workers/code/worker.js --format=esm --platform=browser --sourcemap worker.ts",
|
||||||
|
|
|
@ -40,24 +40,20 @@ const router = Router()
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const obj = env[all[i]]
|
const obj = env[all[i]]
|
||||||
if (!obj || typeof obj != 'object') {
|
if (!obj || typeof obj != 'object' || !obj.constructor) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if (
|
switch (obj.constructor.name) {
|
||||||
(obj as Queue<string>) &&
|
case 'KVNamespace':
|
||||||
typeof (obj as Queue<string>).send == 'function'
|
kv.push(all[i])
|
||||||
) {
|
break
|
||||||
queues.push(all[i])
|
case 'Queue':
|
||||||
} else if (
|
queues.push(all[i])
|
||||||
(obj as KVNamespace) &&
|
break
|
||||||
typeof (obj as KVNamespace).getWithMetadata == 'function'
|
case 'R2Bucket':
|
||||||
) {
|
// Note that we currently don't support R2 yet
|
||||||
kv.push(all[i])
|
r2.push(all[i])
|
||||||
} else if (
|
break
|
||||||
(obj as R2Bucket) &&
|
|
||||||
typeof (obj as R2Bucket).createMultipartUpload == 'function'
|
|
||||||
) {
|
|
||||||
r2.push(all[i])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -174,7 +170,7 @@ async function setupKVRequest(
|
||||||
return { errorRes: new Response('Bad request', { status: 400 }) }
|
return { errorRes: new Response('Bad request', { status: 400 }) }
|
||||||
}
|
}
|
||||||
const namespace = env[req.params.namespace] as KVNamespace<string>
|
const namespace = env[req.params.namespace] as KVNamespace<string>
|
||||||
if (!namespace || typeof namespace.getWithMetadata != 'function') {
|
if (typeof namespace != 'object' || namespace?.constructor?.name != 'KVNamespace') {
|
||||||
return {
|
return {
|
||||||
errorRes: new Response(
|
errorRes: new Response(
|
||||||
`Worker is not bound to KV '${req.params.kv}'`,
|
`Worker is not bound to KV '${req.params.kv}'`,
|
||||||
|
@ -200,7 +196,7 @@ async function setupQueueRequest(
|
||||||
return { errorRes: new Response('Bad request', { status: 400 }) }
|
return { errorRes: new Response('Bad request', { status: 400 }) }
|
||||||
}
|
}
|
||||||
const queue = env[req.params.queue] as Queue<string>
|
const queue = env[req.params.queue] as Queue<string>
|
||||||
if (!queue || typeof queue.send != 'function') {
|
if (typeof queue != 'object' || queue?.constructor?.name != 'Queue') {
|
||||||
return {
|
return {
|
||||||
errorRes: new Response(
|
errorRes: new Response(
|
||||||
`Worker is not bound to queue '${req.params.queue}'`,
|
`Worker is not bound to queue '${req.params.queue}'`,
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,292 @@
|
||||||
|
/*
|
||||||
|
Copyright 2021 The Dapr Authors
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package amqp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"crypto/x509"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
time "time"
|
||||||
|
|
||||||
|
amqp "github.com/Azure/go-amqp"
|
||||||
|
|
||||||
|
"github.com/dapr/components-contrib/pubsub"
|
||||||
|
"github.com/dapr/kit/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
publishRetryWaitSeconds = 2
|
||||||
|
publishMaxRetries = 3
|
||||||
|
)
|
||||||
|
|
||||||
|
// amqpPubSub type allows sending and receiving data to/from an AMQP 1.0 broker
|
||||||
|
type amqpPubSub struct {
|
||||||
|
session *amqp.Session
|
||||||
|
metadata *metadata
|
||||||
|
logger logger.Logger
|
||||||
|
publishLock sync.RWMutex
|
||||||
|
publishRetryCount int
|
||||||
|
ctx context.Context
|
||||||
|
cancel context.CancelFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAMQPPubsub returns a new AMQPPubSub instance
|
||||||
|
func NewAMQPPubsub(logger logger.Logger) pubsub.PubSub {
|
||||||
|
return &amqpPubSub{
|
||||||
|
logger: logger,
|
||||||
|
publishLock: sync.RWMutex{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Init parses the metadata and creates a new Pub Sub Client.
|
||||||
|
func (a *amqpPubSub) Init(metadata pubsub.Metadata) error {
|
||||||
|
amqpMeta, err := parseAMQPMetaData(metadata, a.logger)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
a.metadata = amqpMeta
|
||||||
|
|
||||||
|
a.ctx, a.cancel = context.WithCancel(context.Background())
|
||||||
|
|
||||||
|
s, err := a.connect()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
a.session = s
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func AddPrefixToAddress(t string) string {
|
||||||
|
dest := t
|
||||||
|
|
||||||
|
// Unless the request comes in to publish on a queue, publish directly on a topic
|
||||||
|
if !strings.HasPrefix(dest, "queue:") && !strings.HasPrefix(dest, "topic:") {
|
||||||
|
dest = "topic://" + dest
|
||||||
|
} else if strings.HasPrefix(dest, "queue:") {
|
||||||
|
dest = strings.Replace(dest, "queue:", "queue://", 1)
|
||||||
|
} else if strings.HasPrefix(dest, "topic:") {
|
||||||
|
dest = strings.Replace(dest, "topic:", "topic://", 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dest
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish the topic to amqp pubsub
|
||||||
|
func (a *amqpPubSub) Publish(ctx context.Context, req *pubsub.PublishRequest) error {
|
||||||
|
a.publishLock.Lock()
|
||||||
|
defer a.publishLock.Unlock()
|
||||||
|
|
||||||
|
a.publishRetryCount = 0
|
||||||
|
|
||||||
|
if req.Topic == "" {
|
||||||
|
return errors.New("topic name is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
m := amqp.NewMessage(req.Data)
|
||||||
|
|
||||||
|
// If the request has ttl specified, put it on the message header
|
||||||
|
ttlProp := req.Metadata["ttlInSeconds"]
|
||||||
|
if ttlProp != "" {
|
||||||
|
ttlInSeconds, err := strconv.Atoi(ttlProp)
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Warnf("Invalid ttl received from message %s", ttlInSeconds)
|
||||||
|
} else {
|
||||||
|
m.Header.TTL = time.Second * time.Duration(ttlInSeconds)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sender, err := a.session.NewSender(ctx,
|
||||||
|
AddPrefixToAddress(req.Topic),
|
||||||
|
nil,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Errorf("Unable to create link to %s", req.Topic, err)
|
||||||
|
} else {
|
||||||
|
err = sender.Send(ctx, m)
|
||||||
|
|
||||||
|
// If the publish operation has failed, attempt to republish a maximum number of times
|
||||||
|
// before giving up
|
||||||
|
if err != nil {
|
||||||
|
for a.publishRetryCount <= publishMaxRetries {
|
||||||
|
a.publishRetryCount++
|
||||||
|
|
||||||
|
// Send message
|
||||||
|
err = sender.Send(ctx, m)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Warnf("Failed to publish a message to the broker", err)
|
||||||
|
}
|
||||||
|
time.Sleep(publishRetryWaitSeconds * time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *amqpPubSub) Subscribe(ctx context.Context, req pubsub.SubscribeRequest, handler pubsub.Handler) error {
|
||||||
|
prefixedTopic := AddPrefixToAddress(req.Topic)
|
||||||
|
|
||||||
|
receiver, err := a.session.NewReceiver(a.ctx,
|
||||||
|
prefixedTopic,
|
||||||
|
nil,
|
||||||
|
)
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
a.logger.Infof("Attempting to subscribe to %s", prefixedTopic)
|
||||||
|
go a.subscribeForever(ctx, receiver, handler, prefixedTopic)
|
||||||
|
} else {
|
||||||
|
a.logger.Error("Unable to create a receiver:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// function that subscribes to a queue in a tight loop
|
||||||
|
func (a *amqpPubSub) subscribeForever(ctx context.Context, receiver *amqp.Receiver, handler pubsub.Handler, t string) {
|
||||||
|
for {
|
||||||
|
// Receive next message
|
||||||
|
msg, err := receiver.Receive(ctx)
|
||||||
|
|
||||||
|
if msg != nil {
|
||||||
|
data := msg.GetData()
|
||||||
|
|
||||||
|
// if data is empty, then check the value field for data
|
||||||
|
if data == nil || len(data) == 0 {
|
||||||
|
data = []byte(fmt.Sprint(msg.Value))
|
||||||
|
}
|
||||||
|
|
||||||
|
pubsubMsg := &pubsub.NewMessage{
|
||||||
|
Data: data,
|
||||||
|
Topic: msg.LinkName(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Errorf("failed to establish receiver")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = handler(ctx, pubsubMsg)
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
err := receiver.AcceptMessage(ctx, msg)
|
||||||
|
a.logger.Debugf("ACKed a message")
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Errorf("failed to acknowledge a message")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
a.logger.Errorf("Error processing message from %s", msg.LinkName())
|
||||||
|
a.logger.Debugf("NAKd a message")
|
||||||
|
err := receiver.RejectMessage(ctx, msg, nil)
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Errorf("failed to NAK a message")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to the AMQP broker
|
||||||
|
func (a *amqpPubSub) connect() (*amqp.Session, error) {
|
||||||
|
uri, err := url.Parse(a.metadata.url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
clientOpts := a.createClientOptions(uri)
|
||||||
|
|
||||||
|
a.logger.Infof("Attempting to connect to %s", a.metadata.url)
|
||||||
|
client, err := amqp.Dial(a.metadata.url, &clientOpts)
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Fatal("Dialing AMQP server:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open a session
|
||||||
|
session, err := client.NewSession(a.ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Fatal("Creating AMQP session:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return session, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *amqpPubSub) newTLSConfig() *tls.Config {
|
||||||
|
tlsConfig := new(tls.Config)
|
||||||
|
|
||||||
|
if a.metadata.clientCert != "" && a.metadata.clientKey != "" {
|
||||||
|
cert, err := tls.X509KeyPair([]byte(a.metadata.clientCert), []byte(a.metadata.clientKey))
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Warnf("unable to load client certificate and key pair. Err: %v", err)
|
||||||
|
|
||||||
|
return tlsConfig
|
||||||
|
}
|
||||||
|
tlsConfig.Certificates = []tls.Certificate{cert}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.metadata.caCert != "" {
|
||||||
|
tlsConfig.RootCAs = x509.NewCertPool()
|
||||||
|
if ok := tlsConfig.RootCAs.AppendCertsFromPEM([]byte(a.metadata.caCert)); !ok {
|
||||||
|
a.logger.Warnf("unable to load ca certificate.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tlsConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *amqpPubSub) createClientOptions(uri *url.URL) amqp.ConnOptions {
|
||||||
|
var opts amqp.ConnOptions
|
||||||
|
|
||||||
|
scheme := uri.Scheme
|
||||||
|
|
||||||
|
switch scheme {
|
||||||
|
case "amqp":
|
||||||
|
if a.metadata.anonymous == true {
|
||||||
|
opts.SASLType = amqp.SASLTypeAnonymous()
|
||||||
|
} else {
|
||||||
|
opts.SASLType = amqp.SASLTypePlain(a.metadata.username, a.metadata.password)
|
||||||
|
}
|
||||||
|
case "amqps":
|
||||||
|
opts.SASLType = amqp.SASLTypePlain(a.metadata.username, a.metadata.password)
|
||||||
|
opts.TLSConfig = a.newTLSConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
return opts
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close the session
|
||||||
|
func (a *amqpPubSub) Close() error {
|
||||||
|
a.publishLock.Lock()
|
||||||
|
|
||||||
|
defer a.publishLock.Unlock()
|
||||||
|
|
||||||
|
err := a.session.Close(a.ctx)
|
||||||
|
if err != nil {
|
||||||
|
a.logger.Warnf("failed to close the connection.", err)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Feature list for AMQP PubSub
|
||||||
|
func (a *amqpPubSub) Features() []pubsub.Feature {
|
||||||
|
return []pubsub.Feature{pubsub.FeatureSubscribeWildcards, pubsub.FeatureMessageTTL}
|
||||||
|
}
|
|
@ -0,0 +1,141 @@
|
||||||
|
/*
|
||||||
|
Copyright 2021 The Dapr Authors
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package amqp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/pem"
|
||||||
|
"errors"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
mdata "github.com/dapr/components-contrib/metadata"
|
||||||
|
|
||||||
|
"github.com/dapr/components-contrib/pubsub"
|
||||||
|
"github.com/dapr/kit/logger"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getFakeProperties() map[string]string {
|
||||||
|
return map[string]string{
|
||||||
|
"consumerID": "client",
|
||||||
|
amqpURL: "tcp://fakeUser:fakePassword@fake.mqtt.host:1883",
|
||||||
|
anonymous: "false",
|
||||||
|
username: "default",
|
||||||
|
password: "default",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseMetadata(t *testing.T) {
|
||||||
|
log := logger.NewLogger("test")
|
||||||
|
t.Run("metadata is correct", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
|
||||||
|
m, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, fakeProperties[amqpURL], m.url)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("url is not given", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
|
||||||
|
fakeMetaData := pubsub.Metadata{
|
||||||
|
Base: mdata.Base{Properties: fakeProperties},
|
||||||
|
}
|
||||||
|
fakeMetaData.Properties[amqpURL] = ""
|
||||||
|
|
||||||
|
m, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.EqualError(t, err, errors.New(errorMsgPrefix+" missing url").Error())
|
||||||
|
assert.Equal(t, fakeProperties[amqpURL], m.url)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("invalid ca certificate", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
fakeMetaData.Properties[amqpCACert] = "randomNonPEMBlockCA"
|
||||||
|
_, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.Contains(t, err.Error(), "invalid caCert")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("valid ca certificate", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
fakeMetaData.Properties[amqpCACert] = "-----BEGIN CERTIFICATE-----\nMIICyDCCAbACCQDb8BtgvbqW5jANBgkqhkiG9w0BAQsFADAmMQswCQYDVQQGEwJJ\nTjEXMBUGA1UEAwwOZGFwck1xdHRUZXN0Q0EwHhcNMjAwODEyMDY1MzU4WhcNMjUw\nODEyMDY1MzU4WjAmMQswCQYDVQQGEwJJTjEXMBUGA1UEAwwOZGFwck1xdHRUZXN0\nQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDEXte1GBxFJaygsEnK\nHV2AxazZW6Vppv+i50AuURHcaGo0i8G5CTfHzSKrYtTFfBskUspl+2N8GPV5c8Eb\ng+PP6YFn1wiHVz+wRSk3BD35DcGOT2o4XsJw5tiAzJkbpAOYCYl7KAM+BtOf41uC\nd6TdqmawhRGtv1ND2WtyJOT6A3KcUfjhL4TFEhWoljPJVay4TQoJcZMAImD/Xcxw\n6urv6wmUJby3/RJ3I46ZNH3zxEw5vSq1TuzuXxQmfPJG0ZPKJtQZ2nkZ3PNZe4bd\nNUa83YgQap7nBhYdYMMsQyLES2qy3mPcemBVoBWRGODel4PMEcsQiOhAyloAF2d3\nhd+LAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAK13X5JYBy78vHYoP0Oq9fe5XBbL\nuRM8YLnet9b/bXTGG4SnCCOGqWz99swYK7SVyR5l2h8SAoLzeNV61PtaZ6fHrbar\noxSL7BoRXOhMH6LQATadyvwlJ71uqlagqya7soaPK09TtfzeebLT0QkRCWT9b9lQ\nDBvBVCaFidynJL1ts21m5yUdIY4JSu4sGZGb4FRGFdBv/hD3wH8LAkOppsSv3C/Q\nkfkDDSQzYbdMoBuXmafvi3He7Rv+e6Tj9or1rrWdx0MIKlZPzz4DOe5Rh112uRB9\n7xPHJt16c+Ya3DKpchwwdNcki0vFchlpV96HK8sMCoY9kBzPhkEQLdiBGv4=\n-----END CERTIFICATE-----\n"
|
||||||
|
m, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.NoError(t, err)
|
||||||
|
block, _ := pem.Decode([]byte(m.tlsCfg.caCert))
|
||||||
|
cert, err := x509.ParseCertificate(block.Bytes)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("failed to parse ca certificate from metadata. %v", err)
|
||||||
|
}
|
||||||
|
assert.Equal(t, "daprMqttTestCA", cert.Subject.CommonName)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("invalid client certificate", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
fakeMetaData.Properties[amqpClientCert] = "randomNonPEMBlockClientCert"
|
||||||
|
_, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.Contains(t, err.Error(), "invalid clientCert")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("valid client certificate", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
fakeMetaData.Properties[amqpClientCert] = "-----BEGIN CERTIFICATE-----\nMIICzDCCAbQCCQDBKDMS3SHsDzANBgkqhkiG9w0BAQUFADAmMQswCQYDVQQGEwJJ\nTjEXMBUGA1UEAwwOZGFwck1xdHRUZXN0Q0EwHhcNMjAwODEyMDY1NTE1WhcNMjEw\nODA3MDY1NTE1WjAqMQswCQYDVQQGEwJJTjEbMBkGA1UEAwwSZGFwck1xdHRUZXN0\nQ2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5IDfsGI2pb4W\nt3CjckrKuNeTrgmla3sXxSI5wfDgLGd/XkNu++M6yi9ABaBiYChpxbylqIeAn/HT\n3r/nhcb+bldMtEkU9tODHy/QDhvN2UGFjRsMfzO9p1oMpTnRdJCHYinE+oqVced5\nHI+UEofAU+1eiIXqJGKrdfn4gvaHst4QfVPvui8WzJq9TMkEhEME+5hs3VKyKZr2\nqjIxzr7nLVod3DBf482VjxRI06Ip3fPvNuMWwzj2G+Rj8PMcBjoKeCLQL9uQh7f1\nTWHuACqNIrmFEUQWdGETnRjHWIvw0NEL40+Ur2b5+7/hoqnTzReJ3XUe1jM3l44f\nl0rOf4hu2QIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQAT9yoIeX0LTsvx7/b+8V3a\nkP+j8u97QCc8n5xnMpivcMEk5cfqXX5Llv2EUJ9kBsynrJwT7ujhTJXSA/zb2UdC\nKH8PaSrgIlLwQNZMDofbz6+zPbjStkgne/ZQkTDIxY73sGpJL8LsQVO9p2KjOpdj\nSf9KuJhLzcHolh7ry3ZrkOg+QlMSvseeDRAxNhpkJrGQ6piXoUiEeKKNa0rWTMHx\nIP1Hqj+hh7jgqoQR48NL2jNng7I64HqTl6Mv2fiNfINiw+5xmXTB0QYkGU5NvPBO\naKcCRcGlU7ND89BogQPZsl/P04tAuQqpQWffzT4sEEOyWSVGda4N2Ys3GSQGBv8e\n-----END CERTIFICATE-----\n"
|
||||||
|
m, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.NoError(t, err)
|
||||||
|
block, _ := pem.Decode([]byte(m.tlsCfg.clientCert))
|
||||||
|
cert, err := x509.ParseCertificate(block.Bytes)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("failed to parse client certificate from metadata. %v", err)
|
||||||
|
}
|
||||||
|
assert.Equal(t, "daprMqttTestClient", cert.Subject.CommonName)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("invalid client certificate key", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
fakeMetaData.Properties[amqpClientKey] = "randomNonPEMBlockClientKey"
|
||||||
|
_, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.Contains(t, err.Error(), "invalid clientKey")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("valid client certificate key", func(t *testing.T) {
|
||||||
|
fakeProperties := getFakeProperties()
|
||||||
|
fakeMetaData := pubsub.Metadata{Base: mdata.Base{Properties: fakeProperties}}
|
||||||
|
fakeMetaData.Properties[amqpClientKey] = "-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEA5IDfsGI2pb4Wt3CjckrKuNeTrgmla3sXxSI5wfDgLGd/XkNu\n++M6yi9ABaBiYChpxbylqIeAn/HT3r/nhcb+bldMtEkU9tODHy/QDhvN2UGFjRsM\nfzO9p1oMpTnRdJCHYinE+oqVced5HI+UEofAU+1eiIXqJGKrdfn4gvaHst4QfVPv\nui8WzJq9TMkEhEME+5hs3VKyKZr2qjIxzr7nLVod3DBf482VjxRI06Ip3fPvNuMW\nwzj2G+Rj8PMcBjoKeCLQL9uQh7f1TWHuACqNIrmFEUQWdGETnRjHWIvw0NEL40+U\nr2b5+7/hoqnTzReJ3XUe1jM3l44fl0rOf4hu2QIDAQABAoIBAQCVMINb4TP20P55\n9IPyqlxjhPT563hijXK+lhMJyiBDPavOOs7qjLikq2bshYPVbm1o2jt6pkXXqAeB\n5t/d20fheQQurYyPfxecNBZuL78duwbcUy28m2aXLlcVRYO4zGhoMgdW4UajoNLV\nT/UIiDONWGyhTHXMHdP+6h9UOmvs3o4b225AuLrw9n6QO5I1Se8lcfOTIqR1fy4O\nGsUWEQPdW0X3Dhgpx7kDIuBTAQzbjD31PCR1U8h2wsCeEe6hPCrsMbo/D019weol\ndi40tbWR1/oNz0+vro2d9YDPJkXN0gmpT51Z4YJoexZBdyzO5z4DMSdn5yczzt6p\nQq8LsXAFAoGBAPYXRbC4OxhtuC+xr8KRkaCCMjtjUWFbFWf6OFgUS9b5uPz9xvdY\nXo7wBP1zp2dS8yFsdIYH5Six4Z5iOuDR4sVixzjabhwedL6bmS1zV5qcCWeASKX1\nURgSkfMmC4Tg3LBgZ9YxySFcVRjikxljkS3eK7Mp7Xmj5afe7qV73TJfAoGBAO20\nTtw2RGe02xnydZmmwf+NpQHOA9S0JsehZA6NRbtPEN/C8bPJIq4VABC5zcH+tfYf\nzndbDlGhuk+qpPA590rG5RSOUjYnQFq7njdSfFyok9dXSZQTjJwFnG2oy0LmgjCe\nROYnbCzD+a+gBKV4xlo2M80OLakQ3zOwPT0xNRnHAoGATLEj/tbrU8mdxP9TDwfe\nom7wyKFDE1wXZ7gLJyfsGqrog69y+lKH5XPXmkUYvpKTQq9SARMkz3HgJkPmpXnD\nelA2Vfl8pza2m1BShF+VxZErPR41hcLV6vKemXAZ1udc33qr4YzSaZskygSSYy8s\nZ2b9p3BBmc8CGzbWmKvpW3ECgYEAn7sFLxdMWj/+5221Nr4HKPn+wrq0ek9gq884\n1Ep8bETSOvrdvolPQ5mbBKJGsLC/h5eR/0Rx18sMzpIF6eOZ2GbU8z474mX36cCf\nrd9A8Gbbid3+9IE6gHGIz2uYwujw3UjNVbdyCpbahvjJhoQlDePUZVu8tRpAUpSA\nYklZvGsCgYBuIlOFTNGMVUnwfzrcS9a/31LSvWTZa8w2QFjsRPMYFezo2l4yWs4D\nPEpeuoJm+Gp6F6ayjoeyOw9mvMBH5hAZr4WjbiU6UodzEHREAsLAzCzcRyIpnDE6\nPW1c3j60r8AHVufkWTA+8B9WoLC5MqcYTV3beMGnNGGqS2PeBom63Q==\n-----END RSA PRIVATE KEY-----\n"
|
||||||
|
m, err := parseAMQPMetaData(fakeMetaData, log)
|
||||||
|
|
||||||
|
// assert
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, m.tlsCfg.clientKey, "failed to parse valid client certificate key")
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
/*
|
||||||
|
Copyright 2021 The Dapr Authors
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package amqp
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/pem"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dapr/components-contrib/pubsub"
|
||||||
|
"github.com/dapr/kit/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// errors.
|
||||||
|
errorMsgPrefix = "amqp pub sub error:"
|
||||||
|
)
|
||||||
|
|
||||||
|
type metadata struct {
|
||||||
|
tlsCfg
|
||||||
|
url string
|
||||||
|
username string
|
||||||
|
password string
|
||||||
|
anonymous bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type tlsCfg struct {
|
||||||
|
caCert string
|
||||||
|
clientCert string
|
||||||
|
clientKey string
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Keys
|
||||||
|
amqpURL = "url"
|
||||||
|
anonymous = "anonymous"
|
||||||
|
username = "username"
|
||||||
|
password = "password"
|
||||||
|
amqpCACert = "caCert"
|
||||||
|
amqpClientCert = "clientCert"
|
||||||
|
amqpClientKey = "clientKey"
|
||||||
|
defaultWait = 30 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
// isValidPEM validates the provided input has PEM formatted block.
|
||||||
|
func isValidPEM(val string) bool {
|
||||||
|
block, _ := pem.Decode([]byte(val))
|
||||||
|
|
||||||
|
return block != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseAMQPMetaData(md pubsub.Metadata, log logger.Logger) (*metadata, error) {
|
||||||
|
m := metadata{anonymous: false}
|
||||||
|
|
||||||
|
// required configuration settings
|
||||||
|
if val, ok := md.Properties[amqpURL]; ok && val != "" {
|
||||||
|
m.url = val
|
||||||
|
} else {
|
||||||
|
return &m, fmt.Errorf("%s missing url", errorMsgPrefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// optional configuration settings
|
||||||
|
if val, ok := md.Properties[anonymous]; ok && val != "" {
|
||||||
|
var err error
|
||||||
|
m.anonymous, err = strconv.ParseBool(val)
|
||||||
|
if err != nil {
|
||||||
|
return &m, fmt.Errorf("%s invalid anonymous %s, %s", errorMsgPrefix, val, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !m.anonymous {
|
||||||
|
if val, ok := md.Properties[username]; ok && val != "" {
|
||||||
|
m.username = val
|
||||||
|
} else {
|
||||||
|
return &m, fmt.Errorf("%s missing username", errorMsgPrefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
if val, ok := md.Properties[password]; ok && val != "" {
|
||||||
|
m.password = val
|
||||||
|
} else {
|
||||||
|
return &m, fmt.Errorf("%s missing username", errorMsgPrefix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if val, ok := md.Properties[amqpCACert]; ok && val != "" {
|
||||||
|
if !isValidPEM(val) {
|
||||||
|
return &m, fmt.Errorf("%s invalid caCert", errorMsgPrefix)
|
||||||
|
}
|
||||||
|
m.tlsCfg.caCert = val
|
||||||
|
}
|
||||||
|
if val, ok := md.Properties[amqpClientCert]; ok && val != "" {
|
||||||
|
if !isValidPEM(val) {
|
||||||
|
return &m, fmt.Errorf("%s invalid clientCert", errorMsgPrefix)
|
||||||
|
}
|
||||||
|
m.tlsCfg.clientCert = val
|
||||||
|
}
|
||||||
|
if val, ok := md.Properties[amqpClientKey]; ok && val != "" {
|
||||||
|
if !isValidPEM(val) {
|
||||||
|
return &m, fmt.Errorf("%s invalid clientKey", errorMsgPrefix)
|
||||||
|
}
|
||||||
|
m.tlsCfg.clientKey = val
|
||||||
|
}
|
||||||
|
|
||||||
|
return &m, nil
|
||||||
|
}
|
|
@ -21,7 +21,8 @@ type Filter interface {
|
||||||
Parse(interface{}) error
|
Parse(interface{}) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseFilter(obj interface{}) (Filter, error) {
|
// ParseFilter parses a filter struct using the visitor pattern returning a built Filter interface.
|
||||||
|
func ParseFilter(obj interface{}) (Filter, error) {
|
||||||
m, ok := obj.(map[string]interface{})
|
m, ok := obj.(map[string]interface{})
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("filter unit must be a map")
|
return nil, fmt.Errorf("filter unit must be a map")
|
||||||
|
@ -134,7 +135,7 @@ func parseFilters(t string, obj interface{}) ([]Filter, error) {
|
||||||
filters := make([]Filter, len(arr))
|
filters := make([]Filter, len(arr))
|
||||||
for i, entry := range arr {
|
for i, entry := range arr {
|
||||||
var err error
|
var err error
|
||||||
if filters[i], err = parseFilter(entry); err != nil {
|
if filters[i], err = ParseFilter(entry); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,7 +109,7 @@ func (q *Query) UnmarshalJSON(data []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
filter, err := parseFilter(q.QueryFields.Filters)
|
filter, err := ParseFilter(q.QueryFields.Filters)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
apiVersion: dapr.io/v1alpha1
|
||||||
|
kind: Component
|
||||||
|
metadata:
|
||||||
|
name: aws-snssqs
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
type: pubsub.aws.snssqs
|
||||||
|
version: v1
|
||||||
|
metadata:
|
||||||
|
- name: accessKey
|
||||||
|
value: ${{AWS_ACCESS_KEY_ID}}
|
||||||
|
- name: secretKey
|
||||||
|
value: ${{AWS_SECRET_ACCESS_KEY}}
|
||||||
|
- name: region
|
||||||
|
value: "us-east-1"
|
||||||
|
- name: consumerID
|
||||||
|
value: ${{PUBSUB_AWS_SNSSQS_QUEUE}}
|
||||||
|
- name: messageVisibilityTimeout
|
||||||
|
value: 10
|
||||||
|
- name: messageRetryLimit
|
||||||
|
value: 10
|
||||||
|
- name: messageWaitTimeSeconds
|
||||||
|
value: 1
|
||||||
|
- name: messageMaxNumber
|
||||||
|
value: 10
|
||||||
|
- name: concurrencyMode
|
||||||
|
value: "single"
|
||||||
|
- name: disableEntityManagement
|
||||||
|
value: "true"
|
|
@ -0,0 +1,10 @@
|
||||||
|
apiVersion: dapr.io/v1alpha1
|
||||||
|
kind: Component
|
||||||
|
spec:
|
||||||
|
type: pubsub.solace.amqp
|
||||||
|
version: v1
|
||||||
|
metadata:
|
||||||
|
- name: url
|
||||||
|
value: 'amqp://localhost:5672'
|
||||||
|
- name: anonymous
|
||||||
|
value: true
|
|
@ -12,7 +12,7 @@
|
||||||
componentType: pubsub
|
componentType: pubsub
|
||||||
components:
|
components:
|
||||||
- component: azure.eventhubs
|
- component: azure.eventhubs
|
||||||
operations: ["publish", "subscribe", "multiplehandlers", "bulkpublish"]
|
operations: ['publish', 'subscribe', 'multiplehandlers', 'bulkpublish']
|
||||||
config:
|
config:
|
||||||
pubsubName: azure-eventhubs
|
pubsubName: azure-eventhubs
|
||||||
testTopicName: eventhubs-pubsub-topic
|
testTopicName: eventhubs-pubsub-topic
|
||||||
|
@ -50,9 +50,9 @@ components:
|
||||||
config:
|
config:
|
||||||
checkInOrderProcessing: false
|
checkInOrderProcessing: false
|
||||||
- component: natsstreaming
|
- component: natsstreaming
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
- component: jetstream
|
- component: jetstream
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
- component: kafka
|
- component: kafka
|
||||||
allOperations: true
|
allOperations: true
|
||||||
- component: kafka
|
- component: kafka
|
||||||
|
@ -62,27 +62,38 @@ components:
|
||||||
profile: confluent
|
profile: confluent
|
||||||
allOperations: true
|
allOperations: true
|
||||||
- component: pulsar
|
- component: pulsar
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
- component: mqtt
|
- component: mqtt
|
||||||
profile: mosquitto
|
profile: mosquitto
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
|
- component: solace.amqp
|
||||||
|
operations: ['publish', 'subscribe']
|
||||||
- component: mqtt
|
- component: mqtt
|
||||||
profile: emqx
|
profile: emqx
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
- component: mqtt
|
- component: mqtt
|
||||||
profile: vernemq
|
profile: vernemq
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
- component: hazelcast
|
- component: hazelcast
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
- component: rabbitmq
|
- component: rabbitmq
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
config:
|
config:
|
||||||
checkInOrderProcessing: false
|
checkInOrderProcessing: false
|
||||||
- component: in-memory
|
- component: in-memory
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ["publish", "subscribe", "multiplehandlers"]
|
||||||
- component: aws.snssqs
|
- component: aws.snssqs.terraform
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ["publish", "subscribe", "multiplehandlers"]
|
||||||
config:
|
config:
|
||||||
|
pubsubName: aws-snssqs
|
||||||
|
testTopicName: ${{PUBSUB_AWS_SNSSQS_TOPIC}}
|
||||||
|
testMultiTopic1Name: ${{PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1}}
|
||||||
|
testMultiTopic2Name: ${{PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2}}
|
||||||
|
checkInOrderProcessing: false
|
||||||
|
- component: aws.snssqs.docker
|
||||||
|
operations: ["publish", "subscribe", "multiplehandlers"]
|
||||||
|
config:
|
||||||
|
pubsubName: aws-snssqs
|
||||||
checkInOrderProcessing: false
|
checkInOrderProcessing: false
|
||||||
- component: kubemq
|
- component: kubemq
|
||||||
operations: ["publish", "subscribe", "multiplehandlers"]
|
operations: ['publish', 'subscribe', 'multiplehandlers']
|
||||||
|
|
|
@ -119,4 +119,76 @@ If you want to combine VS Code & dlv for debugging so you can set breakpoints in
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Using terraform for conformance tests
|
||||||
|
|
||||||
|
If you are writing new conformance tests and they require cloud resources, you should use the
|
||||||
|
terraform framework we have in place. To enable your component test to use terraform there are a few changes in the normal steps you must do.
|
||||||
|
|
||||||
|
1. In the `conformance.yml` you should create a new step in a workflow for your component that creates new env variables. You will need a variable for each specific resource your tests will use. If you require 3 different topics and 2 different tables for your tests you should have 5 different env variables set. The only convention you must follow for the variables is the value must use `env.UNIQUE_ID` to ensure there are no conflicts with the resource names.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
PUBSUB_AWS_SNSSQS_QUEUE="testQueue-${{ env.UNIQUE_ID }}"
|
||||||
|
echo "PUBSUB_AWS_SNSSQS_QUEUE=$PUBSUB_AWS_SNSSQS_QUEUE" >> $GITHUB_ENV
|
||||||
|
```
|
||||||
|
|
||||||
|
2. When updating the `tests.yml` defined inside `tests/config/<COMPONENT-TYPE>/` folder you should overwrite the default names of any resources the conformance tests use. These values should reference env variables which should be defined in the conformance.yml.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- component: aws.snssqs.terraform
|
||||||
|
operations: ["publish", "subscribe", "multiplehandlers"]
|
||||||
|
config:
|
||||||
|
pubsubName: aws-snssqs
|
||||||
|
testTopicName: ${{PUBSUB_AWS_SNSSQS_TOPIC}}
|
||||||
|
testMultiTopic1Name: ${{PUBSUB_AWS_SNSSQS_TOPIC_MULTI_1}}
|
||||||
|
testMultiTopic2Name: ${{PUBSUB_AWS_SNSSQS_TOPIC_MULTI_2}}
|
||||||
|
```
|
||||||
|
|
||||||
|
3. When writing your `component.yml` you should reference your credentials using env variables and any resources specified in the yaml should use env variables as well just as you did in the `test.yml`. Also if your component has an option that controls resource creation such as `disableEntityManagement` you will need to set it so it prohibits new resource creation. We want to use only terraform to provision resources and not dapr itself for these tests.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
metadata:
|
||||||
|
- name: accessKey
|
||||||
|
value: ${{AWS_ACCESS_KEY_ID}}
|
||||||
|
- name: secretKey
|
||||||
|
value: ${{AWS_SECRET_ACCESS_KEY}}
|
||||||
|
- name: region
|
||||||
|
value: "us-east-1"
|
||||||
|
- name: consumerID
|
||||||
|
value: ${{PUBSUB_AWS_SNSSQS_QUEUE}}
|
||||||
|
- name: disableEntityManagement
|
||||||
|
value: "true"
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
4. You will need to create a new terrafrorm file `component.tf` to provision your resources. The file should be placed in its own folder in the `.github/infrastructure/terraform/conformance` directory such as
|
||||||
|
`.github/infrastructure/terraform/conformance/pubsub/aws/snsqsq`. The terraform file should use a UNIQUE_ID variables and use this variables when naming its resources so they matched the names defined earlier. Make sure any resources your tests will use are defined in terraform.
|
||||||
|
|
||||||
|
```
|
||||||
|
variable "UNIQUE_ID" {
|
||||||
|
type = string
|
||||||
|
description = "Unique Id of the github worklow run."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
5. The component should be added to the `cron-components` step in conformance test workflow `.github/conformance.yml`. The component should have a variable named `terraform-dir` and the value should be the relative path from `.github/infrastructure/terraform/conformance` to the folder which the tests personal terraform files are located such as `pubsub/aws/snsqsq`.
|
||||||
|
|
||||||
|
```
|
||||||
|
- component: pubsub.aws.snssqs.terraform
|
||||||
|
terraform-dir: pubsub/aws/snssqs
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding new AWS component in github actions
|
||||||
|
|
||||||
|
1. For tests involving aws components we use a service account to provision the resources needed. If you are contributing a brand new component you will need to make sure our account has sufficient permissions to provision resources and use handle component. A Dapr STC member will have to update the service account so contact them for assistance.
|
||||||
|
|
||||||
|
2. In your component yaml for your tests you should set the component metadata properties `accesskey` and `secretkey` to the values of `${{AWS_ACCESS_KEY_ID}}` and `${{AWS_SECRET_ACCESS_KEY}}`. These env values will contain the credentials for the testing service account.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
metadata:
|
||||||
|
- name: accessKey
|
||||||
|
value: ${{AWS_ACCESS_KEY_ID}}
|
||||||
|
- name: secretKey
|
||||||
|
value: ${{AWS_SECRET_ACCESS_KEY}}
|
||||||
|
```
|
|
@ -31,6 +31,7 @@ import (
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/dapr/components-contrib/bindings"
|
"github.com/dapr/components-contrib/bindings"
|
||||||
|
@ -69,6 +70,7 @@ import (
|
||||||
p_pulsar "github.com/dapr/components-contrib/pubsub/pulsar"
|
p_pulsar "github.com/dapr/components-contrib/pubsub/pulsar"
|
||||||
p_rabbitmq "github.com/dapr/components-contrib/pubsub/rabbitmq"
|
p_rabbitmq "github.com/dapr/components-contrib/pubsub/rabbitmq"
|
||||||
p_redis "github.com/dapr/components-contrib/pubsub/redis"
|
p_redis "github.com/dapr/components-contrib/pubsub/redis"
|
||||||
|
p_solaceamqp "github.com/dapr/components-contrib/pubsub/solace/amqp"
|
||||||
ss_azure "github.com/dapr/components-contrib/secretstores/azure/keyvault"
|
ss_azure "github.com/dapr/components-contrib/secretstores/azure/keyvault"
|
||||||
ss_hashicorp_vault "github.com/dapr/components-contrib/secretstores/hashicorp/vault"
|
ss_hashicorp_vault "github.com/dapr/components-contrib/secretstores/hashicorp/vault"
|
||||||
ss_kubernetes "github.com/dapr/components-contrib/secretstores/kubernetes"
|
ss_kubernetes "github.com/dapr/components-contrib/secretstores/kubernetes"
|
||||||
|
@ -149,6 +151,7 @@ func LoadComponents(componentPath string) ([]Component, error) {
|
||||||
return components, nil
|
return components, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LookUpEnv returns the value of the specified environment variable or the empty string.
|
||||||
func LookUpEnv(key string) string {
|
func LookUpEnv(key string) string {
|
||||||
if val, ok := os.LookupEnv(key); ok {
|
if val, ok := os.LookupEnv(key); ok {
|
||||||
return val
|
return val
|
||||||
|
@ -166,6 +169,10 @@ func ParseConfigurationMap(t *testing.T, configMap map[string]interface{}) {
|
||||||
val = uuid.New().String()
|
val = uuid.New().String()
|
||||||
t.Logf("Generated UUID %s", val)
|
t.Logf("Generated UUID %s", val)
|
||||||
configMap[k] = val
|
configMap[k] = val
|
||||||
|
} else if strings.Contains(val, "${{") {
|
||||||
|
s := strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(val, "${{"), "}}"))
|
||||||
|
v := LookUpEnv(s)
|
||||||
|
configMap[k] = v
|
||||||
} else {
|
} else {
|
||||||
jsonMap := make(map[string]interface{})
|
jsonMap := make(map[string]interface{})
|
||||||
err := json.Unmarshal([]byte(val), &jsonMap)
|
err := json.Unmarshal([]byte(val), &jsonMap)
|
||||||
|
@ -194,6 +201,10 @@ func parseConfigurationInterfaceMap(t *testing.T, configMap map[interface{}]inte
|
||||||
val = uuid.New().String()
|
val = uuid.New().String()
|
||||||
t.Logf("Generated UUID %s", val)
|
t.Logf("Generated UUID %s", val)
|
||||||
configMap[k] = val
|
configMap[k] = val
|
||||||
|
} else if strings.Contains(val, "${{") {
|
||||||
|
s := strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(val, "${{"), "}}"))
|
||||||
|
v := LookUpEnv(s)
|
||||||
|
configMap[k] = v
|
||||||
} else {
|
} else {
|
||||||
jsonMap := make(map[string]interface{})
|
jsonMap := make(map[string]interface{})
|
||||||
err := json.Unmarshal([]byte(val), &jsonMap)
|
err := json.Unmarshal([]byte(val), &jsonMap)
|
||||||
|
@ -293,8 +304,8 @@ func decodeYaml(b []byte) (TestConfiguration, error) {
|
||||||
|
|
||||||
func (tc *TestConfiguration) loadComponentsAndProperties(t *testing.T, filepath string) (map[string]string, error) {
|
func (tc *TestConfiguration) loadComponentsAndProperties(t *testing.T, filepath string) (map[string]string, error) {
|
||||||
comps, err := LoadComponents(filepath)
|
comps, err := LoadComponents(filepath)
|
||||||
assert.Nil(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, 1, len(comps)) // We only expect a single component per file
|
require.Equal(t, 1, len(comps)) // We only expect a single component per file
|
||||||
c := comps[0]
|
c := comps[0]
|
||||||
props, err := ConvertMetadataToProperties(c.Spec.Metadata)
|
props, err := ConvertMetadataToProperties(c.Spec.Metadata)
|
||||||
|
|
||||||
|
@ -436,10 +447,14 @@ func loadPubSub(tc TestComponent) pubsub.PubSub {
|
||||||
pubsub = p_rabbitmq.NewRabbitMQ(testLogger)
|
pubsub = p_rabbitmq.NewRabbitMQ(testLogger)
|
||||||
case "in-memory":
|
case "in-memory":
|
||||||
pubsub = p_inmemory.New(testLogger)
|
pubsub = p_inmemory.New(testLogger)
|
||||||
case "aws.snssqs":
|
case "aws.snssqs.terraform":
|
||||||
|
pubsub = p_snssqs.NewSnsSqs(testLogger)
|
||||||
|
case "aws.snssqs.docker":
|
||||||
pubsub = p_snssqs.NewSnsSqs(testLogger)
|
pubsub = p_snssqs.NewSnsSqs(testLogger)
|
||||||
case "kubemq":
|
case "kubemq":
|
||||||
pubsub = p_kubemq.NewKubeMQ(testLogger)
|
pubsub = p_kubemq.NewKubeMQ(testLogger)
|
||||||
|
case "solace.amqp":
|
||||||
|
pubsub = p_solaceamqp.NewAMQPPubsub(testLogger)
|
||||||
default:
|
default:
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue