Compare commits
382 Commits
Author | SHA1 | Date |
---|---|---|
|
a9dafe6bac | |
|
4a508409d7 | |
|
649483dda6 | |
|
3525032f7a | |
|
2f78a401b5 | |
|
88fce6a140 | |
|
026f99710a | |
|
132f562e48 | |
|
294dd75354 | |
|
35b77e0c26 | |
|
5f17025027 | |
|
14921af0e1 | |
|
e53a258583 | |
|
4dfc9e55d5 | |
|
01a3fe76d5 | |
|
397766a23e | |
|
a68ca2179e | |
|
b2c31ceba2 | |
|
31a2088aac | |
|
70c99725fd | |
|
1bf9852e86 | |
|
d3eb43b827 | |
|
47947d8770 | |
|
b348969d81 | |
|
d8ac01bc76 | |
|
637d18d0f9 | |
|
716542ae32 | |
|
f406580f8e | |
|
0c2330bc19 | |
|
692560dd8f | |
|
e427822ad8 | |
|
1cae3ee094 | |
|
2997e472d3 | |
|
5ede374d0b | |
|
cf42966101 | |
|
27f04354f8 | |
|
dd13e6b083 | |
|
53d848c9d4 | |
|
20f02776a6 | |
|
a20547c324 | |
|
1132db59d6 | |
|
26808c927b | |
|
aca5116d95 | |
|
fc8636dbba | |
|
026ae762fa | |
|
dcaa80eef8 | |
|
1e295a7056 | |
|
6200ea81de | |
|
72c92fb1fe | |
|
2e4fc0bbd9 | |
|
1e095ed25a | |
|
f48b4128d6 | |
|
8c02ff33b4 | |
|
85cbbf123a | |
|
913ba4ce6f | |
|
1137759a9b | |
|
2aea31969f | |
|
f521a76f7b | |
|
e2b27d3538 | |
|
f3bd794b12 | |
|
0f09d25bcd | |
|
1a6a75a1ce | |
|
a00a853556 | |
|
2b924c46c7 | |
|
b05e19a431 | |
|
9833e56020 | |
|
f0a99c114c | |
|
b969bbfe88 | |
|
8f5b880afd | |
|
ab9422dff9 | |
|
c6bac52cab | |
|
54d59d5fc4 | |
|
dae321130c | |
|
4ca04dbb61 | |
|
1cbedb3c0e | |
|
28d46f6720 | |
|
c53499343a | |
|
69119d6f6c | |
|
dc65da292f | |
|
9012bdce7f | |
|
be7c19b742 | |
|
6d12e8408d | |
|
414e997524 | |
|
ff8d562e8e | |
|
8225a11bf3 | |
|
3830b414d8 | |
|
93f19c96d1 | |
|
dab1faaabd | |
|
181592079f | |
|
4c53816590 | |
|
9ea3fee247 | |
|
e5322262f6 | |
|
b6a5e80315 | |
|
dc8b4822d8 | |
|
c48f9046c2 | |
|
9156779aa8 | |
|
e53cf3401f | |
|
12d7c2ba4f | |
|
bffaeeb31f | |
|
a409bc1f96 | |
|
c4a4525aa5 | |
|
beb3f8f456 | |
|
2e35e1f2a0 | |
|
8f53312898 | |
|
82a70735f0 | |
|
273bea12ef | |
|
b656b0d5d5 | |
|
128691d9b1 | |
|
f09c2c2941 | |
|
1375f081c6 | |
|
bf07ca5078 | |
|
7619e75236 | |
|
5f3d7ea8eb | |
|
d09ffe13e0 | |
|
43a89f3c2b | |
|
2bd28b4e34 | |
|
864baaad3d | |
|
0375e200b6 | |
|
8414210c26 | |
|
499f66ff73 | |
|
a385743e35 | |
|
51e0c79dd4 | |
|
105dabb47a | |
|
787b23d5d0 | |
|
f23794f69b | |
|
f0be1a2d28 | |
|
b0bb3d785b | |
|
93cf5cb2c9 | |
|
eb82293623 | |
|
70fd16ab19 | |
|
4e4aa26c9e | |
|
58eef3bdb1 | |
|
1f46231d87 | |
|
f599616ea2 | |
|
1451363ab2 | |
|
16fef41401 | |
|
cfee998aba | |
|
a9548a7d68 | |
|
b9c12df7d4 | |
|
85252beeef | |
|
2502256a07 | |
|
6290422551 | |
|
49afc557e4 | |
|
6413239626 | |
|
fff4d41fb7 | |
|
e45dcba5a5 | |
|
cea703d082 | |
|
f99604f063 | |
|
e114060fbb | |
|
0a002b96f6 | |
|
5445cead81 | |
|
ba6609e624 | |
|
903b97d891 | |
|
9acfcc16b8 | |
|
43d905db6b | |
|
86abb49ab2 | |
|
c693061efb | |
|
0c687df973 | |
|
120a649caa | |
|
805c1f8423 | |
|
837c35a9c6 | |
|
28a3d649a3 | |
|
353447c992 | |
|
3b0f320025 | |
|
3da3d783d6 | |
|
7aa4013ca8 | |
|
e524ae7e6f | |
|
a3149e5a8c | |
|
85722a4e88 | |
|
419f03fc02 | |
|
7d39c46425 | |
|
be9d23e769 | |
|
e581f3c146 | |
|
59858c49fb | |
|
c47907d1a5 | |
|
b1525a457d | |
|
94c5618f46 | |
|
60cd144c9a | |
|
007d0e0bd2 | |
|
0c48ced685 | |
|
c0a21a0750 | |
|
56579c6d47 | |
|
e903af18bd | |
|
087a597ec5 | |
|
1aa44e925c | |
|
02c6b21ec6 | |
|
73997a2b8d | |
|
9dba9c8f9d | |
|
106b42fa5c | |
|
2fcfc1a3a4 | |
|
323e9bfcfd | |
|
425f9fc84f | |
|
7c80133fc8 | |
|
a7c64f4e8d | |
|
87aea87e95 | |
|
8dfa4b663e | |
|
79adc565c1 | |
|
ca00355413 | |
|
36a055ebd8 | |
|
15fa6d186f | |
|
9095b0e7e7 | |
|
ba5831b1ce | |
|
1f12557241 | |
|
516cf44aea | |
|
c10f59ea49 | |
|
934e86c4f7 | |
|
95690ac052 | |
|
dd8d2ba185 | |
|
3bcd0c7451 | |
|
8fe74b15aa | |
|
20a46e6657 | |
|
fe466beaa2 | |
|
7fc784e12e | |
|
88eb49c838 | |
|
f4e73b0e65 | |
|
0d488c2a9d | |
|
d6908e85e1 | |
|
8680e2785a | |
|
ef68cb6933 | |
|
7114fd0279 | |
|
7fd5524c58 | |
|
d47d1a7aad | |
|
896679e0b0 | |
|
ddc10f7ec7 | |
|
84cc54b633 | |
|
b3436837f2 | |
|
81e2e05f38 | |
|
9f2cc5c158 | |
|
515be48b8e | |
|
5384d54bcd | |
|
1208b3e3c0 | |
|
5a00d1c210 | |
|
f91eb8a10c | |
|
d4527f8f43 | |
|
75078a3e17 | |
|
e7db4cf3ad | |
|
a874485a32 | |
|
e9911a1635 | |
|
a0fe80d6b0 | |
|
d4ac39a8d4 | |
|
f4ea274139 | |
|
616da76cca | |
|
e83b79e225 | |
|
c7a913c552 | |
|
fb4d55b501 | |
|
2cb3c1cce1 | |
|
304ad6b515 | |
|
32b1b301b5 | |
|
f074240c0a | |
|
33a5fd20c0 | |
|
3e79d69cc7 | |
|
ba36895607 | |
|
0c2ce324b2 | |
|
7dbfd40327 | |
|
fe5ce08349 | |
|
75ac751803 | |
|
1c657f7b5b | |
|
f741e141c7 | |
|
81539d0b53 | |
|
247b08a70c | |
|
681e81f1bf | |
|
b65f1ea243 | |
|
9be6d157a4 | |
|
fe2b4aa1fb | |
|
824ccebe87 | |
|
a8aa194329 | |
|
a4a74ea7f6 | |
|
3398bf407a | |
|
a73dd1b172 | |
|
cb931691b4 | |
|
b3e2b1024a | |
|
470e7f4633 | |
|
7a5397f05a | |
|
da50003dd4 | |
|
80a16418c8 | |
|
01646b8833 | |
|
5d6ee65d26 | |
|
2978ab8332 | |
|
d16fd9ac63 | |
|
ecf14bc713 | |
|
9fdad8f3f7 | |
|
2baaecd841 | |
|
ff0aef40b1 | |
|
f2a58582e4 | |
|
94cb843d79 | |
|
f00bfdaeff | |
|
5bf478a30c | |
|
e10d5b7e86 | |
|
c957420341 | |
|
31ccb5f169 | |
|
b10ce96b49 | |
|
5d988974f8 | |
|
80fdafc59c | |
|
46b7535f4a | |
|
4a84a019a7 | |
|
7fcfd9a815 | |
|
6f9ab04a33 | |
|
8b6156f4f7 | |
|
7937d34bb7 | |
|
566c7fd31a | |
|
60322a1f1c | |
|
c2dbb03069 | |
|
25656c175d | |
|
c43af14d31 | |
|
af5bacebf4 | |
|
5d009c0618 | |
|
5f695a41c8 | |
|
69df184f51 | |
|
c288c519b9 | |
|
5278890290 | |
|
75338ac728 | |
|
387c238b87 | |
|
13dfbdbac6 | |
|
9ca07d9bca | |
|
76b8480ff9 | |
|
c75005582b | |
|
f89c5afdce | |
|
9957d6969d | |
|
608e4cb8a9 | |
|
22a695c745 | |
|
d067c13350 | |
|
ec05809ee6 | |
|
aa4d073cd5 | |
|
95045c4dfe | |
|
1ab15ef04b | |
|
a4012953ea | |
|
1349fca858 | |
|
fd8e3a2086 | |
|
760948a533 | |
|
836e373b38 | |
|
651834e9de | |
|
cef854fab6 | |
|
6d6cb2af0a | |
|
3bfa20bda1 | |
|
58228efd01 | |
|
c62e7c9ad7 | |
|
5adc33d079 | |
|
b242ef9ec6 | |
|
d08852c173 | |
|
a2bc8cf6e0 | |
|
de01000c9b | |
|
7ab8fde494 | |
|
e52df95532 | |
|
ef4780beaa | |
|
eebdb0e785 | |
|
8626de596c | |
|
c066e99936 | |
|
31485cc1e7 | |
|
41bba8baba | |
|
ea5954f66a | |
|
bf1f983305 | |
|
d9ea9c69c9 | |
|
36f09695b6 | |
|
a228d69836 | |
|
d7fc4d4a8e | |
|
4f229e45ca | |
|
8f8a56a0b9 | |
|
ce926db071 | |
|
f125940490 | |
|
fa90710794 | |
|
dfc28528b5 | |
|
d9c9fa819d | |
|
69a1c01801 | |
|
06defdcc22 | |
|
638b106bdb | |
|
ff06ae6b33 | |
|
1963726043 | |
|
f689322570 | |
|
1ac92a886a | |
|
899f4407dd | |
|
a7b7e3cbc5 | |
|
92b830b085 | |
|
d467890cf8 | |
|
cb89219916 | |
|
abaf74150e | |
|
99ce411ef8 | |
|
ac20c0a703 | |
|
b9e11ecf77 | |
|
6c596516d2 | |
|
eabeadf044 | |
|
5118b73cfc | |
|
c5f5a10f77 |
|
@ -0,0 +1,231 @@
|
|||
aws:
|
||||
- title: "AWS: Access Key ID and Secret Access Key"
|
||||
description: |
|
||||
Authenticate using an Access Key ID and Secret Access Key included in the metadata
|
||||
metadata:
|
||||
- name: region
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
The AWS Region where the AWS resource is deployed to.
|
||||
This will be marked required in Dapr 1.17.
|
||||
example: '"us-east-1"'
|
||||
- name: awsRegion
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
This maintains backwards compatibility with existing fields.
|
||||
It will be deprecated as of Dapr 1.17. Use 'region' instead.
|
||||
The AWS Region where the AWS resource is deployed to.
|
||||
example: '"us-east-1"'
|
||||
- name: accessKey
|
||||
description: AWS access key associated with an IAM account
|
||||
required: false
|
||||
sensitive: true
|
||||
example: '"AKIAIOSFODNN7EXAMPLE"'
|
||||
- name: secretKey
|
||||
description: The secret key associated with the access key
|
||||
required: false
|
||||
sensitive: true
|
||||
example: '"wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"'
|
||||
- name: sessionToken
|
||||
type: string
|
||||
required: false
|
||||
sensitive: true
|
||||
description: |
|
||||
AWS session token to use. A session token is only required if you are using
|
||||
temporary security credentials.
|
||||
example: '"TOKEN"'
|
||||
- title: "AWS: Assume IAM Role"
|
||||
description: |
|
||||
Assume a specific IAM role. Note: This is only supported for Kafka and PostgreSQL.
|
||||
metadata:
|
||||
- name: region
|
||||
type: string
|
||||
required: true
|
||||
description: |
|
||||
The AWS Region where the AWS resource is deployed to.
|
||||
example: '"us-east-1"'
|
||||
- name: assumeRoleArn
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
IAM role that has access to AWS resource.
|
||||
This is another option to authenticate with MSK and RDS Aurora aside from the AWS Credentials.
|
||||
This will be marked required in Dapr 1.17.
|
||||
example: '"arn:aws:iam::123456789:role/mskRole"'
|
||||
- name: sessionName
|
||||
type: string
|
||||
description: |
|
||||
The session name for assuming a role.
|
||||
example: '"MyAppSession"'
|
||||
default: '"DaprDefaultSession"'
|
||||
- title: "AWS: Credentials from Environment Variables"
|
||||
description: Use AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY from the environment
|
||||
- title: "AWS: IAM Roles Anywhere"
|
||||
description: Use X.509 certificates to establish trust between your AWS account and the Dapr cluster using AWS IAM Roles Anywhere.
|
||||
metadata:
|
||||
- name: trustAnchorArn
|
||||
description: |
|
||||
ARN of the AWS Trust Anchor in the AWS account granting trust to the Dapr Certificate Authority.
|
||||
example: arn:aws:rolesanywhere:us-west-1:012345678910:trust-anchor/01234568-0123-0123-0123-012345678901
|
||||
required: true
|
||||
- name: trustProfileArn
|
||||
description: |
|
||||
ARN of the AWS IAM Profile in the trusting AWS account.
|
||||
example: arn:aws:rolesanywhere:us-west-1:012345678910:profile/01234568-0123-0123-0123-012345678901
|
||||
required: true
|
||||
- name: assumeRoleArn
|
||||
description: |
|
||||
ARN of the AWS IAM role to assume in the trusting AWS account.
|
||||
example: arn:aws:iam:012345678910:role/exampleIAMRoleName
|
||||
required: true
|
||||
|
||||
azuread:
|
||||
- title: "Azure AD: Managed identity"
|
||||
description: Authenticate using Azure AD and a managed identity.
|
||||
metadata:
|
||||
- name: azureClientId
|
||||
description: |
|
||||
Client ID (application ID). Required if the service has multiple identities assigned.
|
||||
example: '"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"'
|
||||
- name: azureEnvironment
|
||||
description: |
|
||||
Optional name for the Azure environment if using a different Azure cloud
|
||||
default: AzurePublicCloud
|
||||
example: '"AzurePublicCloud"'
|
||||
allowedValues:
|
||||
- AzurePublicCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernmentCloud
|
||||
- title: "Azure AD: Client credentials"
|
||||
description: |
|
||||
Authenticate using Azure AD with client credentials, also known as "service principals".
|
||||
metadata:
|
||||
- name: azureTenantId
|
||||
description: ID of the Azure AD tenant
|
||||
required: true
|
||||
example: '"cd4b2887-304c-47e1-b4d5-65447fdd542a"'
|
||||
- name: azureClientId
|
||||
description: Client ID (application ID)
|
||||
required: true
|
||||
example: '"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"'
|
||||
- name: azureClientSecret
|
||||
description: Client secret (application password)
|
||||
required: true
|
||||
sensitive: true
|
||||
example: '"Ecy3XG7zVZK3/vl/a2NSB+a1zXLa8RnMum/IgD0E"'
|
||||
- name: azureEnvironment
|
||||
description: |
|
||||
Optional name for the Azure environment if using a different Azure cloud
|
||||
default: AzurePublicCloud
|
||||
example: '"AzurePublicCloud"'
|
||||
allowedValues:
|
||||
- AzurePublicCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernmentCloud
|
||||
- title: "Azure AD: Client certificate"
|
||||
description: |
|
||||
Authenticate using Azure AD with a client certificate. One of "azureCertificate" and "azureCertificateFile" is required.
|
||||
metadata:
|
||||
- name: azureTenantId
|
||||
description: ID of the Azure AD tenant
|
||||
required: true
|
||||
example: '"cd4b2887-304c-47e1-b4d5-65447fdd542a"'
|
||||
- name: azureClientId
|
||||
description: Client ID (application ID)
|
||||
required: true
|
||||
example: '"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"'
|
||||
- name: azureCertificate
|
||||
description: |
|
||||
Certificate and private key (in either a PEM file containing both the certificate and key, or in PFX/PKCS#12 format)
|
||||
sensitive: true
|
||||
example: |
|
||||
"-----BEGIN PRIVATE KEY-----\n MIIEvgI... \n -----END PRIVATE KEY-----
|
||||
\n -----BEGIN CERTIFICATE----- \n MIICoTC... \n -----END CERTIFICATE----- \n"
|
||||
- name: azureCertificateFile
|
||||
description: |
|
||||
Path to PEM or PFX/PKCS#12 file on disk, containing the certificate and private key.
|
||||
example: '"/path/to/file.pem"'
|
||||
- name: azureCertificatePassword
|
||||
description: Password for the certificate if encrypted.
|
||||
sensitive: true
|
||||
example: '"password"'
|
||||
- name: azureEnvironment
|
||||
description: |
|
||||
Optional name for the Azure environment if using a different Azure cloud
|
||||
default: AzurePublicCloud
|
||||
example: '"AzurePublicCloud"'
|
||||
allowedValues:
|
||||
- AzurePublicCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernmentCloud
|
||||
|
||||
gcp:
|
||||
- title: "GCP API Authentication with Service Account Key"
|
||||
description: |
|
||||
Authenticate authenticates API calls with the given service account or refresh token JSON credentials.
|
||||
metadata:
|
||||
- name: privateKeyID
|
||||
required: true
|
||||
sensitive: true
|
||||
description: |
|
||||
The GCP private key id. Replace with the value of "private_key_id" field of the Service Account Key file.
|
||||
example: '"privateKeyID"'
|
||||
- name: privateKey
|
||||
required: true
|
||||
sensitive: true
|
||||
description: |
|
||||
The GCP credentials private key. Replace with the value of "private_key" field of the Service Account Key file.
|
||||
example: '"-----BEGIN PRIVATE KEY-----\nMIIE...\\n-----END PRIVATE KEY-----\n"'
|
||||
- name: type
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
The GCP credentials type.
|
||||
example: '"service_account"'
|
||||
allowedValues:
|
||||
- service_account
|
||||
- name: projectID
|
||||
type: string
|
||||
required: true
|
||||
description: |
|
||||
GCP project id.
|
||||
example: '"projectID"'
|
||||
- name: clientEmail
|
||||
type: string
|
||||
required: true
|
||||
description: |
|
||||
GCP client email.
|
||||
example: '"client@email.com"'
|
||||
- name: clientID
|
||||
type: string
|
||||
required: true
|
||||
description: |
|
||||
The GCP client ID.
|
||||
example: '"0123456789-0123456789"'
|
||||
- name: authURI
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
The GCP account OAuth2 authorization server endpoint URI.
|
||||
example: '"https://accounts.google.com/o/oauth2/auth"'
|
||||
- name: tokenURI
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
The GCP account token server endpoint URI.
|
||||
example: '"https://oauth2.googleapis.com/token"'
|
||||
- name: authProviderX509CertURL
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
The GCP URL of the public x509 certificate, used to verify the signature
|
||||
on JWTs, such as ID tokens, signed by the authentication provider.
|
||||
example: '"https://www.googleapis.com/oauth2/v1/certs"'
|
||||
- name: clientX509CertURL
|
||||
type: string
|
||||
required: false
|
||||
description: |
|
||||
The GCP URL of the public x509 certificate, used to verify JWTs signed by the client.
|
||||
example: '"https://www.googleapis.com/robot/v1/metadata/x509/<PROJECT_NAME>.iam.gserviceaccount.com"'
|
|
@ -64,6 +64,7 @@ var bundleComponentMetadataCmd = &cobra.Command{
|
|||
fmt.Fprintln(os.Stderr, "Info: metadata file not found in component "+component)
|
||||
continue
|
||||
}
|
||||
fmt.Fprintln(os.Stderr, "Info: metadata file loaded for component "+component)
|
||||
bundle.Components = append(bundle.Components, componentMetadata)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
{
|
||||
"componentFolders": [
|
||||
"bindings",
|
||||
"configuration",
|
||||
"crypto",
|
||||
"lock",
|
||||
"middleware/http",
|
||||
"nameresolution",
|
||||
"pubsub",
|
||||
"secretstores",
|
||||
"state",
|
||||
"workflows"
|
||||
],
|
||||
"excludeFolders": [
|
||||
"bindings/alicloud",
|
||||
"bindings/aws",
|
||||
"bindings/azure",
|
||||
"bindings/gcp",
|
||||
"bindings/huawei",
|
||||
"bindings/rethinkdb",
|
||||
"bindings/twilio",
|
||||
"bindings/zeebe",
|
||||
"configuration/azure",
|
||||
"configuration/redis/internal",
|
||||
"crypto/azure",
|
||||
"crypto/kubernetes",
|
||||
"pubsub/aws",
|
||||
"pubsub/azure",
|
||||
"pubsub/azure/servicebus",
|
||||
"pubsub/gcp",
|
||||
"secretstores/alicloud",
|
||||
"secretstores/aws",
|
||||
"secretstores/azure",
|
||||
"secretstores/gcp",
|
||||
"secretstores/hashicorp",
|
||||
"secretstores/huaweicloud",
|
||||
"secretstores/local",
|
||||
"state/alicloud",
|
||||
"state/aws",
|
||||
"state/azure",
|
||||
"state/gcp",
|
||||
"state/hashicorp",
|
||||
"state/oci",
|
||||
"state/utils"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
componentFolders:
|
||||
- bindings
|
||||
- configuration
|
||||
- crypto
|
||||
- lock
|
||||
- middleware/http
|
||||
- nameresolution
|
||||
- pubsub
|
||||
- secretstores
|
||||
- state
|
||||
- workflows
|
||||
|
||||
excludeFolders:
|
||||
- bindings/alicloud
|
||||
- bindings/aws
|
||||
- bindings/azure
|
||||
- bindings/cloudflare
|
||||
- bindings/gcp
|
||||
- bindings/http/testdata
|
||||
- bindings/huawei
|
||||
- bindings/rethinkdb
|
||||
- bindings/twilio
|
||||
- bindings/wasm/testdata
|
||||
- bindings/zeebe
|
||||
- configuration/azure
|
||||
- configuration/redis/internal
|
||||
- crypto/azure
|
||||
- crypto/kubernetes
|
||||
- middleware/http/oauth2clientcredentials/mocks
|
||||
- middleware/http/wasm/example
|
||||
- middleware/http/wasm/internal
|
||||
- pubsub/aws
|
||||
- pubsub/azure
|
||||
- pubsub/azure/servicebus
|
||||
- pubsub/gcp
|
||||
- secretstores/alicloud
|
||||
- secretstores/aws
|
||||
- secretstores/azure
|
||||
- secretstores/gcp
|
||||
- secretstores/hashicorp
|
||||
- secretstores/huaweicloud
|
||||
- secretstores/local
|
||||
- secretstores/tencentcloud
|
||||
- state/alicloud
|
||||
- state/aws
|
||||
- state/azure
|
||||
- state/azure/blobstorage/internal
|
||||
- state/cloudflare
|
||||
- state/gcp
|
||||
- state/hashicorp
|
||||
- state/oci
|
||||
- state/postgresql
|
||||
- state/query
|
||||
- state/utils
|
|
@ -1,26 +1,28 @@
|
|||
module github.com/dapr/components-contrib/build-tools
|
||||
|
||||
go 1.20
|
||||
go 1.24.1
|
||||
|
||||
require (
|
||||
github.com/dapr/components-contrib v0.0.0
|
||||
github.com/invopop/jsonschema v0.6.0
|
||||
github.com/spf13/cobra v1.6.1
|
||||
github.com/xeipuuv/gojsonschema v1.2.1-0.20201027075954-b076d39a02e5
|
||||
golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1
|
||||
sigs.k8s.io/yaml v1.3.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
sigs.k8s.io/yaml v1.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/dapr/kit v0.11.1 // indirect
|
||||
github.com/dapr/kit v0.15.3-0.20250516121556-bc7dc566c45d // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20220423185008-bf980b35cac4 // indirect
|
||||
github.com/spf13/cast v1.5.1 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/cast v1.8.0 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
k8s.io/apimachinery v0.27.4 // indirect
|
||||
)
|
||||
|
||||
replace github.com/dapr/components-contrib => ../
|
||||
|
|
|
@ -1,35 +1,50 @@
|
|||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/dapr/kit v0.11.1 h1:qwV9HMVFAwS/KK//xEqJ+Ef9UjXdocrUGSgjsP5UCMM=
|
||||
github.com/dapr/kit v0.11.1/go.mod h1:dqcCSK9ethcPW4L9jC2t4WrPUU3mPA4oa53fJRhl34E=
|
||||
github.com/dapr/kit v0.15.3-0.20250516121556-bc7dc566c45d h1:v+kZn9ami23xBsruyZmKErIOSlCdW9pR8wfHUg5+jys=
|
||||
github.com/dapr/kit v0.15.3-0.20250516121556-bc7dc566c45d/go.mod h1:6w2Pr38zOAtBn+ld/jknwI4kgMfwanCIcFVnPykdPZQ=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 h1:i462o439ZjprVSFSZLZxcsoAe592sZB1rci2Z8j4wdk=
|
||||
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/invopop/jsonschema v0.6.0 h1:8e+xY8ZEn8gDHUYylSlLHy22P+SLeIRIHv3nM3hCbmY=
|
||||
github.com/invopop/jsonschema v0.6.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20220423185008-bf980b35cac4 h1:BpfhmLKZf+SjVanKKhCgf3bg+511DmU9eDQTen7LLbY=
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20220423185008-bf980b35cac4/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
|
||||
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
||||
github.com/spf13/cast v1.8.0 h1:gEN9K4b8Xws4EX0+a0reLmhq8moKn7ntRlQYgjPeCDk=
|
||||
github.com/spf13/cast v1.8.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
||||
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||
|
@ -37,13 +52,42 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo
|
|||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||
github.com/xeipuuv/gojsonschema v1.2.1-0.20201027075954-b076d39a02e5 h1:ImnGIsrcG8vwbovhYvvSY8fagVV6QhCWSWXfzwGDLVs=
|
||||
github.com/xeipuuv/gojsonschema v1.2.1-0.20201027075954-b076d39a02e5/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
||||
golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 h1:k/i9J1pBpvlfR+9QsetwPyERsqu1GIbi967PQMq3Ivc=
|
||||
golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a h1:Q8/wZp0KX97QFTc2ywcOE0YRjZPVIx+MXInMzdvQqcA=
|
||||
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
|
||||
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
|
||||
sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
|
||||
k8s.io/apimachinery v0.27.4 h1:CdxflD4AF61yewuid0fLl6bM4a3q04jWel0IlP+aYjs=
|
||||
k8s.io/apimachinery v0.27.4/go.mod h1:XNfZ6xklnMCOGGFNqXG7bUrQCoR04dh/E7FprV6pb+E=
|
||||
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
|
||||
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
|
||||
|
|
|
@ -15,28 +15,39 @@ package main
|
|||
|
||||
import (
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/dapr/components-contrib/build-tools/cmd"
|
||||
"github.com/dapr/components-contrib/build-tools/pkg/metadataschema"
|
||||
)
|
||||
|
||||
//go:embed component-folders.json
|
||||
var componentFoldersJSON []byte
|
||||
var (
|
||||
//go:embed component-folders.yaml
|
||||
componentFoldersYAML []byte
|
||||
//go:embed builtin-authentication-profiles.yaml
|
||||
builtinAuthenticationProfilesYAML []byte
|
||||
)
|
||||
|
||||
func init() {
|
||||
parsed := struct {
|
||||
ComponentFolders []string `json:"componentFolders"`
|
||||
ExcludeFolders []string `json:"excludeFolders"`
|
||||
func main() {
|
||||
// Parse component-folders.json
|
||||
parsedComponentFolders := struct {
|
||||
ComponentFolders []string `json:"componentFolders" yaml:"componentFolders"`
|
||||
ExcludeFolders []string `json:"excludeFolders" yaml:"excludeFolders"`
|
||||
}{}
|
||||
err := json.Unmarshal(componentFoldersJSON, &parsed)
|
||||
err := yaml.Unmarshal(componentFoldersYAML, &parsedComponentFolders)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
cmd.ComponentFolders = parsed.ComponentFolders
|
||||
cmd.ExcludeFolders = parsed.ExcludeFolders
|
||||
}
|
||||
cmd.ComponentFolders = parsedComponentFolders.ComponentFolders
|
||||
cmd.ExcludeFolders = parsedComponentFolders.ExcludeFolders
|
||||
|
||||
// Parse builtin-authentication-profiles.yaml
|
||||
err = yaml.Unmarshal(builtinAuthenticationProfilesYAML, &metadataschema.BuiltinAuthenticationProfiles)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"os"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/dapr/kit/logger"
|
||||
mdutils "github.com/dapr/components-contrib/metadata"
|
||||
|
@ -17,24 +17,24 @@ import (
|
|||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 2 {
|
||||
fmt.Println("Please provide the path to the components-contrib root as an argument")
|
||||
os.Exit(1)
|
||||
}
|
||||
basePath := os.Args[1]
|
||||
log := logger.NewLogger("metadata")
|
||||
if len(os.Args) < 2 {
|
||||
fmt.Println("Please provide the path to the components-contrib root as an argument")
|
||||
os.Exit(1)
|
||||
}
|
||||
basePath := os.Args[1]
|
||||
log := logger.NewLogger("metadata")
|
||||
|
||||
var (
|
||||
var (
|
||||
yamlMetadata *map[string]string
|
||||
missing map[string]string
|
||||
missing []string
|
||||
unexpected []string
|
||||
)
|
||||
missingByComponent := make(map[string]map[string]string)
|
||||
missingByComponent := make(map[string][]string)
|
||||
unexpectedByComponent := make(map[string][]string)
|
||||
|
||||
{{range $fullpkg, $val := .Pkgs}}
|
||||
instanceOf_{{index $val 0}} := {{index $val 0}}.{{index $val 1}}(log)
|
||||
metadataFor_{{index $val 0}} := instanceOf_{{index $val 0}}.GetComponentMetadata()
|
||||
metadataFor_{{index $val 0}} := instanceOf_{{index $val 0}}.(mdutils.ComponentWithMetadata).GetComponentMetadata()
|
||||
yamlMetadata = getYamlMetadata(basePath, "{{$fullpkg}}")
|
||||
missing = checkMissingMetadata(yamlMetadata, metadataFor_{{index $val 0}})
|
||||
if len(missing) > 0 {
|
||||
|
@ -127,14 +127,17 @@ func getYamlMetadata(basePath string, pkg string) *map[string]string {
|
|||
return &names
|
||||
}
|
||||
|
||||
func checkMissingMetadata(yamlMetadata *map[string]string, componentMetadata map[string]string) map[string]string {
|
||||
missingMetadata := make(map[string]string)
|
||||
func checkMissingMetadata(yamlMetadata *map[string]string, componentMetadata mdutils.MetadataMap) []string {
|
||||
missingMetadata := make([]string, 0)
|
||||
// if there is no yaml metadata, then we are not missing anything yet
|
||||
if yamlMetadata != nil && len(*yamlMetadata) > 0 {
|
||||
for key := range componentMetadata {
|
||||
for key, md := range componentMetadata {
|
||||
if md.Ignored {
|
||||
continue
|
||||
}
|
||||
lowerKey := strings.ToLower(key)
|
||||
if _, ok := (*yamlMetadata)[lowerKey]; !ok {
|
||||
missingMetadata[lowerKey] = componentMetadata[key]
|
||||
missingMetadata = append(missingMetadata, key)
|
||||
}
|
||||
// todo - check if the metadata is the same data type
|
||||
}
|
||||
|
|
|
@ -15,105 +15,56 @@ package metadataschema
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Built-in authentication profiles
|
||||
var BuiltinAuthenticationProfiles map[string][]AuthenticationProfile
|
||||
|
||||
// ParseBuiltinAuthenticationProfile returns an AuthenticationProfile(s) from a given BuiltinAuthenticationProfile.
|
||||
func ParseBuiltinAuthenticationProfile(bi BuiltinAuthenticationProfile) ([]AuthenticationProfile, error) {
|
||||
switch bi.Name {
|
||||
case "azuread":
|
||||
azureEnvironmentMetadata := Metadata{
|
||||
Name: "azureEnvironment",
|
||||
Required: false,
|
||||
Description: "Optional name for the Azure environment if using a different Azure cloud",
|
||||
Example: `"AzurePublicCloud"`,
|
||||
Default: "AzurePublicCloud",
|
||||
AllowedValues: []string{"AzurePublicCloud", "AzureChinaCloud", "AzureUSGovernmentCloud"},
|
||||
}
|
||||
profiles := []AuthenticationProfile{
|
||||
{
|
||||
Title: "Azure AD: Managed identity",
|
||||
Description: "Authenticate using Azure AD and a managed identity.",
|
||||
Metadata: mergedMetadata(bi.Metadata,
|
||||
Metadata{
|
||||
Name: "azureClientId",
|
||||
Description: "Client ID (application ID). Required if the service has multiple identities assigned.",
|
||||
Example: `"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"`,
|
||||
Required: false,
|
||||
},
|
||||
azureEnvironmentMetadata,
|
||||
),
|
||||
},
|
||||
{
|
||||
Title: "Azure AD: Client credentials",
|
||||
Description: "Authenticate using Azure AD with client credentials, also known as \"service principals\".",
|
||||
Metadata: mergedMetadata(bi.Metadata,
|
||||
Metadata{
|
||||
Name: "azureTenantId",
|
||||
Description: "ID of the Azure AD tenant",
|
||||
Example: `"cd4b2887-304c-47e1-b4d5-65447fdd542a"`,
|
||||
Required: true,
|
||||
},
|
||||
Metadata{
|
||||
Name: "azureClientId",
|
||||
Description: "Client ID (application ID)",
|
||||
Example: `"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"`,
|
||||
Required: true,
|
||||
},
|
||||
Metadata{
|
||||
Name: "azureClientSecret",
|
||||
Description: "Client secret (application password)",
|
||||
Example: `"Ecy3XG7zVZK3/vl/a2NSB+a1zXLa8RnMum/IgD0E"`,
|
||||
Required: true,
|
||||
Sensitive: true,
|
||||
},
|
||||
azureEnvironmentMetadata,
|
||||
),
|
||||
},
|
||||
{
|
||||
Title: "Azure AD: Client certificate",
|
||||
Description: `Authenticate using Azure AD with a client certificate. One of "azureCertificate" and "azureCertificateFile" is required.`,
|
||||
Metadata: mergedMetadata(bi.Metadata,
|
||||
Metadata{
|
||||
Name: "azureTenantId",
|
||||
Description: "ID of the Azure AD tenant",
|
||||
Example: `"cd4b2887-304c-47e1-b4d5-65447fdd542a"`,
|
||||
Required: true,
|
||||
},
|
||||
Metadata{
|
||||
Name: "azureClientId",
|
||||
Description: "Client ID (application ID)",
|
||||
Example: `"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"`,
|
||||
Required: true,
|
||||
},
|
||||
Metadata{
|
||||
Name: "azureCertificate",
|
||||
Description: "Certificate and private key (in either a PEM file containing both the certificate and key, or in PFX/PKCS#12 format)",
|
||||
Example: `"-----BEGIN PRIVATE KEY-----\n MIIEvgI... \n -----END PRIVATE KEY----- \n -----BEGIN CERTIFICATE----- \n MIICoTC... \n -----END CERTIFICATE----- \n"`,
|
||||
Required: false,
|
||||
Sensitive: true,
|
||||
},
|
||||
Metadata{
|
||||
Name: "azureCertificateFile",
|
||||
Description: "Path to PEM or PFX/PKCS#12 file on disk, containing the certificate and private key.",
|
||||
Example: `"/path/to/file.pem"`,
|
||||
Required: false,
|
||||
Sensitive: false,
|
||||
},
|
||||
Metadata{
|
||||
Name: "azureCertificatePassword",
|
||||
Description: "Password for the certificate if encrypted.",
|
||||
Example: `"password"`,
|
||||
Required: false,
|
||||
Sensitive: true,
|
||||
},
|
||||
azureEnvironmentMetadata,
|
||||
),
|
||||
},
|
||||
}
|
||||
return profiles, nil
|
||||
default:
|
||||
func ParseBuiltinAuthenticationProfile(bi BuiltinAuthenticationProfile, componentTitle string) ([]AuthenticationProfile, error) {
|
||||
profiles, ok := BuiltinAuthenticationProfiles[bi.Name]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("built-in authentication profile %s does not exist", bi.Name)
|
||||
}
|
||||
|
||||
res := make([]AuthenticationProfile, len(profiles))
|
||||
for i, profile := range profiles {
|
||||
res[i] = profile
|
||||
|
||||
// deep copy the metadata slice to avoid side effects when manually updating some req -> non-req fields to deprecate some fields for kafka/postgres
|
||||
// TODO: rm all of this manipulation in Dapr 1.17!!
|
||||
originalMetadata := profile.Metadata
|
||||
metadataCopy := make([]Metadata, len(originalMetadata))
|
||||
copy(metadataCopy, originalMetadata)
|
||||
|
||||
if componentTitle == "Apache Kafka" || strings.ToLower(componentTitle) == "postgresql" {
|
||||
removeRequiredOnSomeAWSFields(&metadataCopy)
|
||||
}
|
||||
|
||||
merged := mergedMetadata(bi.Metadata, metadataCopy...)
|
||||
|
||||
// Note: We must apply the removal of deprecated fields after the merge!!
|
||||
|
||||
// Here, we remove some deprecated fields as we support the transition to a new auth profile
|
||||
if profile.Title == "AWS: Assume IAM Role" && componentTitle == "Apache Kafka" || profile.Title == "AWS: Assume IAM Role" && strings.ToLower(componentTitle) == "postgresql" {
|
||||
merged = removeSomeDeprecatedFieldsOnUnrelatedAuthProfiles(merged)
|
||||
}
|
||||
|
||||
// Here, there are no metadata fields that need deprecating
|
||||
if profile.Title == "AWS: Credentials from Environment Variables" && componentTitle == "Apache Kafka" || profile.Title == "AWS: Credentials from Environment Variables" && strings.ToLower(componentTitle) == "postgresql" {
|
||||
merged = removeAllDeprecatedFieldsOnUnrelatedAuthProfiles(merged)
|
||||
}
|
||||
|
||||
// Here, this is a new auth profile, so rm all deprecating fields as unrelated.
|
||||
if profile.Title == "AWS: IAM Roles Anywhere" && componentTitle == "Apache Kafka" || profile.Title == "AWS: IAM Roles Anywhere" && strings.ToLower(componentTitle) == "postgresql" {
|
||||
merged = removeAllDeprecatedFieldsOnUnrelatedAuthProfiles(merged)
|
||||
}
|
||||
|
||||
res[i].Metadata = merged
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func mergedMetadata(base []Metadata, add ...Metadata) []Metadata {
|
||||
|
@ -126,3 +77,58 @@ func mergedMetadata(base []Metadata, add ...Metadata) []Metadata {
|
|||
res = append(res, add...)
|
||||
return res
|
||||
}
|
||||
|
||||
// removeRequiredOnSomeAWSFields needs to be removed in Dapr 1.17 as duplicated AWS IAM fields get removed,
|
||||
// and we standardize on these fields.
|
||||
// Currently, there are: awsAccessKey, accessKey and awsSecretKey, secretKey, and awsRegion and region fields.
|
||||
// We normally have accessKey, secretKey, and region fields marked required as it is part of the builtin AWS auth profile fields.
|
||||
// However, as we rm the aws prefixed ones, we need to then mark the normally required ones as not required only for postgres and kafka.
|
||||
// This way we do not break existing users, and transition them to the standardized fields.
|
||||
func removeRequiredOnSomeAWSFields(metadata *[]Metadata) {
|
||||
if metadata == nil {
|
||||
return
|
||||
}
|
||||
|
||||
for i := range *metadata {
|
||||
field := &(*metadata)[i]
|
||||
|
||||
if field == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Name == "accessKey" || field.Name == "secretKey" || field.Name == "region" {
|
||||
field.Required = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func removeAllDeprecatedFieldsOnUnrelatedAuthProfiles(metadata []Metadata) []Metadata {
|
||||
filteredMetadata := []Metadata{}
|
||||
for _, field := range metadata {
|
||||
if strings.HasPrefix(field.Name, "aws") {
|
||||
continue
|
||||
} else {
|
||||
filteredMetadata = append(filteredMetadata, field)
|
||||
}
|
||||
}
|
||||
|
||||
return filteredMetadata
|
||||
}
|
||||
|
||||
func removeSomeDeprecatedFieldsOnUnrelatedAuthProfiles(metadata []Metadata) []Metadata {
|
||||
filteredMetadata := []Metadata{}
|
||||
|
||||
for _, field := range metadata {
|
||||
// region is required in Assume Role auth profile, so this is needed for now.
|
||||
if field.Name == "region" {
|
||||
field.Required = true
|
||||
}
|
||||
if field.Name == "awsAccessKey" || field.Name == "awsSecretKey" || field.Name == "awsSessionToken" || field.Name == "awsRegion" {
|
||||
continue
|
||||
} else {
|
||||
filteredMetadata = append(filteredMetadata, field)
|
||||
}
|
||||
}
|
||||
|
||||
return filteredMetadata
|
||||
}
|
||||
|
|
|
@ -18,116 +18,113 @@ package metadataschema
|
|||
// ComponentMetadata is the schema for the metadata.yaml / metadata.json files.
|
||||
type ComponentMetadata struct {
|
||||
// Version of the component metadata schema.
|
||||
SchemaVersion string `json:"schemaVersion" jsonschema:"enum=v1"`
|
||||
SchemaVersion string `json:"schemaVersion" yaml:"schemaVersion" jsonschema:"enum=v1"`
|
||||
// Component type, of one of the allowed values.
|
||||
Type string `json:"type" jsonschema:"enum=bindings,enum=state,enum=secretstores,enum=pubsub,enum=workflows,enum=configuration,enum=lock,enum=middleware"`
|
||||
Type string `json:"type" yaml:"type" jsonschema:"enum=bindings,enum=state,enum=secretstores,enum=pubsub,enum=workflows,enum=configuration,enum=lock,enum=middleware,enum=crypto"`
|
||||
// Name of the component (without the inital type, e.g. "http" instead of "bindings.http").
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name" yaml:"name"`
|
||||
// Version of the component, with the leading "v", e.g. "v1".
|
||||
Version string `json:"version"`
|
||||
Version string `json:"version" yaml:"version"`
|
||||
// Component status.
|
||||
Status string `json:"status" jsonschema:"enum=stable,enum=beta,enum=alpha,enum=development-only"`
|
||||
Status string `json:"status" yaml:"status" jsonschema:"enum=stable,enum=beta,enum=alpha,enum=development-only"`
|
||||
// Title of the component, e.g. "HTTP".
|
||||
Title string `json:"title"`
|
||||
Title string `json:"title" yaml:"title"`
|
||||
// Additional description for the component, optional.
|
||||
Description string `json:"description,omitempty"`
|
||||
Description string `json:"description,omitempty" yaml:"description,omitempty"`
|
||||
// URLs with additional resources for the component, such as docs.
|
||||
URLs []URL `json:"urls"`
|
||||
URLs []URL `json:"urls" yaml:"urls"`
|
||||
// Properties for bindings only.
|
||||
// This should not present unless "type" is "bindings".
|
||||
Binding *Binding `json:"binding,omitempty"`
|
||||
Binding *Binding `json:"binding,omitempty" yaml:"binding,omitempty"`
|
||||
// Component capabilities.
|
||||
// For state stores, the presence of "actorStateStore" implies that the metadata property "actorStateStore" can be set. In that case, do not manually specify "actorStateStore" as metadata option.
|
||||
Capabilities []string `json:"capabilities,omitempty"`
|
||||
Capabilities []string `json:"capabilities,omitempty" yaml:"capabilities,omitempty"`
|
||||
// Authentication profiles for the component.
|
||||
AuthenticationProfiles []AuthenticationProfile `json:"authenticationProfiles,omitempty"`
|
||||
AuthenticationProfiles []AuthenticationProfile `json:"authenticationProfiles,omitempty" yaml:"authenticationProfiles,omitempty"`
|
||||
// Built-in authentication profiles to import.
|
||||
BuiltInAuthenticationProfiles []BuiltinAuthenticationProfile `json:"builtinAuthenticationProfiles,omitempty"`
|
||||
BuiltInAuthenticationProfiles []BuiltinAuthenticationProfile `json:"builtinAuthenticationProfiles,omitempty" yaml:"builtinAuthenticationProfiles,omitempty"`
|
||||
// Metadata options for the component.
|
||||
Metadata []Metadata `json:"metadata,omitempty"`
|
||||
Metadata []Metadata `json:"metadata,omitempty" yaml:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// URL represents one URL with additional resources.
|
||||
type URL struct {
|
||||
// Title of the URL.
|
||||
Title string `json:"title"`
|
||||
Title string `json:"title" yaml:"title"`
|
||||
// URL.
|
||||
URL string `json:"url"`
|
||||
URL string `json:"url" yaml:"url"`
|
||||
}
|
||||
|
||||
// Binding represents properties that are specific to bindings
|
||||
type Binding struct {
|
||||
// If "true", the binding can be used as input binding.
|
||||
Input bool `json:"input,omitempty"`
|
||||
Input bool `json:"input,omitempty" yaml:"input,omitempty"`
|
||||
// If "true", the binding can be used as output binding.
|
||||
Output bool `json:"output,omitempty"`
|
||||
Output bool `json:"output,omitempty" yaml:"output,omitempty"`
|
||||
// List of operations that the output binding support.
|
||||
// Required in output bindings, and not allowed in input-only bindings.
|
||||
Operations []BindingOperation `json:"operations"`
|
||||
Operations []BindingOperation `json:"operations" yaml:"operations"`
|
||||
}
|
||||
|
||||
// BindingOperation represents an operation offered by an output binding.
|
||||
type BindingOperation struct {
|
||||
// Name of the operation, such as "create", "post", "delete", etc.
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name" yaml:"name"`
|
||||
// Descrption of the operation.
|
||||
Description string `json:"description"`
|
||||
Description string `json:"description" yaml:"description"`
|
||||
}
|
||||
|
||||
// Metadata property.
|
||||
type Metadata struct {
|
||||
// Name of the metadata property.
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name" yaml:"name"`
|
||||
// Description of the property.
|
||||
Description string `json:"description"`
|
||||
Description string `json:"description" yaml:"description"`
|
||||
// If "true", the property is required
|
||||
Required bool `json:"required,omitempty"`
|
||||
Required bool `json:"required,omitempty" yaml:"required,omitempty"`
|
||||
// If "true", the property represents a sensitive value such as a password.
|
||||
Sensitive bool `json:"sensitive,omitempty"`
|
||||
Sensitive bool `json:"sensitive,omitempty" yaml:"sensitive,omitempty"`
|
||||
// Type of the property.
|
||||
// If this is empty, it's interpreted as "string".
|
||||
Type string `json:"type,omitempty" jsonschema:"enum=string,enum=number,enum=bool,enum=duration"`
|
||||
Type string `json:"type,omitempty" yaml:"type,omitempty" jsonschema:"enum=string,enum=number,enum=bool,enum=duration,enum=bytesize"`
|
||||
// Default value for the property.
|
||||
// If it's a string, don't forget to add quotes.
|
||||
Default string `json:"default,omitempty"`
|
||||
Default string `json:"default,omitempty" yaml:"default,omitempty"`
|
||||
// Example value.
|
||||
Example string `json:"example"`
|
||||
Example string `json:"example" yaml:"example"`
|
||||
// If set, forces the value to be one of those specified in this allowlist.
|
||||
AllowedValues []string `json:"allowedValues,omitempty"`
|
||||
AllowedValues []string `json:"allowedValues,omitempty" yaml:"allowedValues,omitempty"`
|
||||
// If set, specifies that the property is only applicable to bindings of the type specified below.
|
||||
// At least one of "input" and "output" must be "true".
|
||||
Binding *MetadataBinding `json:"binding,omitempty"`
|
||||
Binding *MetadataBinding `json:"binding,omitempty" yaml:"binding,omitempty"`
|
||||
// URL with additional information, such as docs.
|
||||
URL *URL `json:"url,omitempty"`
|
||||
URL *URL `json:"url,omitempty" yaml:"url,omitempty"`
|
||||
// If set, specifies that the property is deprecated and should not be used in new configurations.
|
||||
Deprecated bool `json:"deprecated,omitempty"`
|
||||
Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"`
|
||||
}
|
||||
|
||||
// MetadataBinding is the type for the "binding" property in the "metadata" object.
|
||||
type MetadataBinding struct {
|
||||
// If "true", the property can be used with the binding as input binding only.
|
||||
Input bool `json:"input,omitempty"`
|
||||
Input bool `json:"input,omitempty" yaml:"input,omitempty"`
|
||||
// If "true", the property can be used with the binding as output binding only.
|
||||
Output bool `json:"output,omitempty"`
|
||||
Output bool `json:"output,omitempty" yaml:"output,omitempty"`
|
||||
}
|
||||
|
||||
// AuthenticationProfile is the type for an authentication profile.
|
||||
type AuthenticationProfile struct {
|
||||
// Title of the authentication profile.
|
||||
Title string `json:"title"`
|
||||
Title string `json:"title" yaml:"title"`
|
||||
// Additional description for the authentication profile, optional.
|
||||
Description string `json:"description"`
|
||||
Description string `json:"description" yaml:"description"`
|
||||
// Metadata options applicable when using this authentication profile.
|
||||
Metadata []Metadata `json:"metadata,omitempty"`
|
||||
Metadata []Metadata `json:"metadata,omitempty" yaml:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// BuiltinAuthenticationProfile is a reference to a built-in authentication profile.
|
||||
type BuiltinAuthenticationProfile struct {
|
||||
// Name of the built-in authentication profile.
|
||||
// Currently supports:
|
||||
//
|
||||
// - `azuread` (Azure AD, including Managed Identity).
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name" yaml:"name"`
|
||||
// Additional metadata options applicable when using this authentication profile.
|
||||
Metadata []Metadata `json:"metadata,omitempty"`
|
||||
Metadata []Metadata `json:"metadata,omitempty" yaml:"metadata,omitempty"`
|
||||
}
|
||||
|
|
|
@ -16,13 +16,19 @@ package metadataschema
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
mdutils "github.com/dapr/components-contrib/metadata"
|
||||
)
|
||||
|
||||
const (
|
||||
bindingDirectionMetadataKey = "direction"
|
||||
bindingDirectionInput = "input"
|
||||
bindingDirectionOutput = "output"
|
||||
bindingRouteMetadataKey = "route"
|
||||
)
|
||||
|
||||
// IsValid performs additional validation and returns true if the object is valid.
|
||||
func (c *ComponentMetadata) IsValid() error {
|
||||
// Check valid component type
|
||||
|
@ -61,7 +67,7 @@ func (c *ComponentMetadata) IsValid() error {
|
|||
|
||||
// Append built-in authentication profiles
|
||||
for _, profile := range c.BuiltInAuthenticationProfiles {
|
||||
appendProfiles, err := ParseBuiltinAuthenticationProfile(profile)
|
||||
appendProfiles, err := ParseBuiltinAuthenticationProfile(profile, c.Title)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -70,6 +76,18 @@ func (c *ComponentMetadata) IsValid() error {
|
|||
// Remove the property builtinAuthenticationProfiles now
|
||||
c.BuiltInAuthenticationProfiles = nil
|
||||
|
||||
// Trim newlines from all descriptions
|
||||
c.Description = strings.TrimSpace(c.Description)
|
||||
for i := range c.AuthenticationProfiles {
|
||||
c.AuthenticationProfiles[i].Description = strings.TrimSpace(c.AuthenticationProfiles[i].Description)
|
||||
for j := range c.AuthenticationProfiles[i].Metadata {
|
||||
c.AuthenticationProfiles[i].Metadata[j].Description = strings.TrimSpace(c.AuthenticationProfiles[i].Metadata[j].Description)
|
||||
}
|
||||
}
|
||||
for i := range c.Metadata {
|
||||
c.Metadata[i].Description = strings.TrimSpace(c.Metadata[i].Description)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -92,6 +110,32 @@ func (c *ComponentMetadata) AppendBuiltin() error {
|
|||
},
|
||||
)
|
||||
}
|
||||
if slices.Contains(c.Capabilities, "transactional") {
|
||||
c.Metadata = append(c.Metadata,
|
||||
Metadata{
|
||||
Name: "outboxPublishPubsub",
|
||||
Type: "string",
|
||||
Description: "For outbox. Sets the name of the pub/sub component to deliver the notifications when publishing state changes",
|
||||
},
|
||||
Metadata{
|
||||
Name: "outboxPublishTopic",
|
||||
Type: "string",
|
||||
Description: `For outbox. Sets the topic that receives the state changes on the pub/sub configured with "outboxPublishPubsub". The message body will be a state transaction item for an insert or update operation`,
|
||||
},
|
||||
Metadata{
|
||||
Name: "outboxPubsub",
|
||||
Type: "string",
|
||||
Description: `For outbox. Sets the pub/sub component used by Dapr to coordinate the state and pub/sub transactions. If not set, the pub/sub component configured with "outboxPublishPubsub" is used. This is useful if you want to separate the pub/sub component used to send the notification state changes from the one used to coordinate the transaction`,
|
||||
Default: "outboxPublishPubsub",
|
||||
},
|
||||
Metadata{
|
||||
Name: "outboxDiscardWhenMissingState",
|
||||
Description: "By setting outboxDiscardWhenMissingState to true, Dapr discards the transaction if it cannot find the state in the database and does not retry. This setting can be useful if the state store data has been deleted for any reason before Dapr was able to deliver the message and you would like Dapr to drop the items from the pub/sub and stop retrying to fetch the state",
|
||||
Type: "bool",
|
||||
Default: "false",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
c.Metadata = append(c.Metadata,
|
||||
Metadata{
|
||||
|
@ -134,7 +178,92 @@ func (c *ComponentMetadata) AppendBuiltin() error {
|
|||
URL: "https://docs.dapr.io/developing-applications/building-blocks/pubsub/howto-namespace/",
|
||||
},
|
||||
},
|
||||
Metadata{
|
||||
Name: "allowedTopics",
|
||||
Type: "string",
|
||||
Description: "A comma-separated list of allowed topics for all applications. If empty (default) apps can publish and subscribe to all topics, notwithstanding `publishingScopes` and `subscriptionScopes`.",
|
||||
Example: `"app1=topic1;app2=topic2,topic3"`,
|
||||
URL: &URL{
|
||||
Title: "Documentation",
|
||||
URL: "https://docs.dapr.io/developing-applications/building-blocks/pubsub/pubsub-scopes/",
|
||||
},
|
||||
},
|
||||
Metadata{
|
||||
Name: "publishingScopes",
|
||||
Type: "string",
|
||||
Description: "A semicolon-separated list of applications and comma-separated topic lists, allowing that app to publish to that list of topics. If empty (default), apps can publish to all topics.",
|
||||
Example: `"app1=topic1;app2=topic2,topic3;app3="`,
|
||||
URL: &URL{
|
||||
Title: "Documentation",
|
||||
URL: "https://docs.dapr.io/developing-applications/building-blocks/pubsub/pubsub-scopes/",
|
||||
},
|
||||
},
|
||||
Metadata{
|
||||
Name: "subscriptionScopes",
|
||||
Type: "string",
|
||||
Description: "A semicolon-separated list of applications and comma-separated topic lists, allowing that app to subscribe to that list of topics. If empty (default), apps can subscribe to all topics.",
|
||||
Example: `"app1=topic1;app2=topic2,topic3"`,
|
||||
URL: &URL{
|
||||
Title: "Documentation",
|
||||
URL: "https://docs.dapr.io/developing-applications/building-blocks/pubsub/pubsub-scopes/",
|
||||
},
|
||||
},
|
||||
Metadata{
|
||||
Name: "protectedTopics",
|
||||
Type: "string",
|
||||
Description: `A comma-separated list of topics marked as "protected" for all applications. If a topic is marked as protected then an application must be explicitly granted publish or subscribe permissions through 'publishingScopes' or 'subscriptionScopes' to publish or subscribe to it.`,
|
||||
Example: `"topic1,topic2"`,
|
||||
URL: &URL{
|
||||
Title: "Documentation",
|
||||
URL: "https://docs.dapr.io/developing-applications/building-blocks/pubsub/pubsub-scopes/",
|
||||
},
|
||||
},
|
||||
)
|
||||
case mdutils.BindingType:
|
||||
if c.Binding != nil {
|
||||
if c.Metadata == nil {
|
||||
c.Metadata = []Metadata{}
|
||||
}
|
||||
|
||||
if c.Binding.Input {
|
||||
direction := bindingDirectionInput
|
||||
allowedValues := []string{
|
||||
bindingDirectionInput,
|
||||
}
|
||||
|
||||
if c.Binding.Output {
|
||||
direction = fmt.Sprintf("%s,%s", bindingDirectionInput, bindingDirectionOutput)
|
||||
allowedValues = append(allowedValues, bindingDirectionOutput, direction)
|
||||
}
|
||||
|
||||
c.Metadata = append(c.Metadata,
|
||||
Metadata{
|
||||
Name: bindingDirectionMetadataKey,
|
||||
Type: "string",
|
||||
Description: "Indicates the direction of the binding component.",
|
||||
Example: `"` + direction + `"`,
|
||||
URL: &URL{
|
||||
Title: "Documentation",
|
||||
URL: "https://docs.dapr.io/reference/api/bindings_api/#binding-direction-optional",
|
||||
},
|
||||
AllowedValues: allowedValues,
|
||||
},
|
||||
)
|
||||
|
||||
c.Metadata = append(c.Metadata,
|
||||
Metadata{
|
||||
Name: bindingRouteMetadataKey,
|
||||
Type: "string",
|
||||
Description: "Specifies a custom route for incoming events.",
|
||||
Example: `"/custom-path"`,
|
||||
URL: &URL{
|
||||
Title: "Documentation",
|
||||
URL: "https://docs.dapr.io/developing-applications/building-blocks/bindings/howto-triggers/#specify-a-custom-route",
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check to ensure the data is in sync
|
||||
|
|
|
@ -67,7 +67,7 @@
|
|||
"go.toolsManagement.checkForUpdates": "local",
|
||||
"go.useLanguageServer": true,
|
||||
"go.gopath": "/go",
|
||||
"go.buildTags": "e2e,perf,conftests,unit,integration_test,certtests",
|
||||
"go.buildTags": "e2e,perf,conftests,unit,integration_test,certtests,allcomponents",
|
||||
"git.alwaysSignOff": true,
|
||||
"terminal.integrated.env.linux": {
|
||||
"GOLANG_PROTOBUF_REGISTRATION_CONFLICT": "ignore"
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
organization: dapr
|
||||
defaultSticker: clrqfypv0282430gjx4hys94pc
|
||||
stickers:
|
||||
-
|
||||
id: clrqfypv0282430gjx4hys94pc
|
||||
alias: components-badge
|
|
@ -0,0 +1,26 @@
|
|||
// ------------------------------------------------------------------------
|
||||
// Copyright 2023 The Dapr Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
@description('Specifies the name of the App Configuration store.')
|
||||
param configStoreName string
|
||||
|
||||
@description('Specifies the Azure location where the app configuration store should be created.')
|
||||
param location string = resourceGroup().location
|
||||
|
||||
resource configStore 'Microsoft.AppConfiguration/configurationStores@2021-10-01-preview' = {
|
||||
name: configStoreName
|
||||
location: location
|
||||
sku: {
|
||||
name: 'free'
|
||||
}
|
||||
}
|
|
@ -24,6 +24,9 @@ var eventHubPubsubName = 'eventhubs-pubsub-topic'
|
|||
var eventHubPubsubPolicyName = '${eventHubPubsubName}-policy'
|
||||
var eventHubPubsubConsumerGroupName = '${eventHubPubsubName}-cg'
|
||||
|
||||
var eventHubBulkPubsubName = 'eventhubs-pubsub-topic-bulk'
|
||||
var eventHubBulkPubsubPolicyName = '${eventHubBulkPubsubName}-policy'
|
||||
|
||||
var certificationEventHubPubsubTopicActiveName = 'certification-pubsub-topic-active'
|
||||
var certificationEventHubPubsubTopicActivePolicyName = '${certificationEventHubPubsubTopicActiveName}-policy'
|
||||
|
||||
|
@ -96,6 +99,24 @@ resource eventHubsNamespace 'Microsoft.EventHub/namespaces@2017-04-01' = {
|
|||
name: eventHubPubsubConsumerGroupName
|
||||
}
|
||||
}
|
||||
resource eventHubBulkPubsub 'eventhubs' = {
|
||||
name: eventHubBulkPubsubName
|
||||
properties: {
|
||||
messageRetentionInDays: 1
|
||||
}
|
||||
resource eventHubBulkPubsubPolicy 'authorizationRules' = {
|
||||
name: eventHubBulkPubsubPolicyName
|
||||
properties: {
|
||||
rights: [
|
||||
'Send'
|
||||
'Listen'
|
||||
]
|
||||
}
|
||||
}
|
||||
resource eventHubPubsubConsumerGroup 'consumergroups' = {
|
||||
name: eventHubPubsubConsumerGroupName
|
||||
}
|
||||
}
|
||||
resource certificationEventHubPubsubTopicActive 'eventhubs' = {
|
||||
name: certificationEventHubPubsubTopicActiveName
|
||||
properties: {
|
||||
|
@ -175,6 +196,9 @@ output eventHubPubsubName string = eventHubsNamespace::eventHubPubsub.name
|
|||
output eventHubPubsubPolicyName string = eventHubsNamespace::eventHubPubsub::eventHubPubsubPolicy.name
|
||||
output eventHubPubsubConsumerGroupName string = eventHubsNamespace::eventHubPubsub::eventHubPubsubConsumerGroup.name
|
||||
|
||||
output eventHubBulkPubsubName string = eventHubsNamespace::eventHubBulkPubsub.name
|
||||
output eventHubBulkPubsubPolicyName string = eventHubsNamespace::eventHubBulkPubsub::eventHubBulkPubsubPolicy.name
|
||||
|
||||
output eventHubsNamespacePolicyName string = eventHubsNamespace::eventHubPubsubNamespacePolicy.name
|
||||
output certificationEventHubPubsubTopicActiveName string = eventHubsNamespace::certificationEventHubPubsubTopicActive.name
|
||||
output certificationEventHubPubsubTopicActivePolicyName string = eventHubsNamespace::certificationEventHubPubsubTopicActive::certificationEventHubPubsubTopicActivePolicy.name
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
// ------------------------------------------------------------------------
|
||||
// Copyright 2021 The Dapr Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// ------------------------------------------------------------------------
|
||||
|
||||
param postgresServerName string
|
||||
param sdkAuthSpId string
|
||||
param sdkAuthSpName string
|
||||
param rgLocation string = resourceGroup().location
|
||||
param confTestTags object = {}
|
||||
param postgresqlVersion string = '14'
|
||||
param tenantId string = subscription().tenantId
|
||||
|
||||
resource postgresServer 'Microsoft.DBforPostgreSQL/flexibleServers@2023-03-01-preview' = {
|
||||
name: postgresServerName
|
||||
location: rgLocation
|
||||
tags: confTestTags
|
||||
sku: {
|
||||
name: 'Standard_B1ms'
|
||||
tier: 'Burstable'
|
||||
}
|
||||
properties: {
|
||||
storage: {
|
||||
storageSizeGB: 32
|
||||
autoGrow: 'Disabled'
|
||||
}
|
||||
authConfig: {
|
||||
activeDirectoryAuth: 'Enabled'
|
||||
passwordAuth: 'Disabled'
|
||||
tenantId: tenantId
|
||||
}
|
||||
network: {}
|
||||
version: postgresqlVersion
|
||||
}
|
||||
|
||||
resource daprTestDB 'databases@2023-03-01-preview' = {
|
||||
name: 'dapr_test'
|
||||
properties: {
|
||||
charset: 'UTF8'
|
||||
collation: 'en_US.utf8'
|
||||
}
|
||||
}
|
||||
|
||||
resource fwRules 'firewallRules@2023-03-01-preview' = {
|
||||
name: 'allowall'
|
||||
properties: {
|
||||
startIpAddress: '0.0.0.0'
|
||||
endIpAddress: '255.255.255.255'
|
||||
}
|
||||
}
|
||||
|
||||
resource azureAdAdmin 'administrators@2023-03-01-preview' = {
|
||||
name: sdkAuthSpId
|
||||
properties: {
|
||||
principalType: 'ServicePrincipal'
|
||||
principalName: sdkAuthSpName
|
||||
tenantId: tenantId
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,6 +14,7 @@
|
|||
param sqlServerName string
|
||||
param rgLocation string = resourceGroup().location
|
||||
param confTestTags object = {}
|
||||
@secure()
|
||||
param sqlServerAdminPassword string
|
||||
|
||||
var sqlServerAdminName = '${sqlServerName}-admin'
|
||||
|
|
|
@ -15,7 +15,7 @@ param storageName string
|
|||
param rgLocation string = resourceGroup().location
|
||||
param confTestTags object = {}
|
||||
|
||||
resource storageAccount 'Microsoft.Storage/storageAccounts@2021-02-01' = {
|
||||
resource storageAccount 'Microsoft.Storage/storageAccounts@2022-09-01' = {
|
||||
name: storageName
|
||||
sku: {
|
||||
name: 'Standard_RAGRS'
|
||||
|
@ -23,27 +23,23 @@ resource storageAccount 'Microsoft.Storage/storageAccounts@2021-02-01' = {
|
|||
kind: 'StorageV2'
|
||||
location: rgLocation
|
||||
tags: confTestTags
|
||||
}
|
||||
|
||||
resource blobServices 'Microsoft.Storage/storageAccounts/blobServices@2021-02-01' = {
|
||||
parent: storageAccount
|
||||
name: 'default'
|
||||
properties: {
|
||||
deleteRetentionPolicy: {
|
||||
enabled: true
|
||||
days: 1
|
||||
resource blobServices 'blobServices@2022-09-01' = {
|
||||
name: 'default'
|
||||
properties: {
|
||||
deleteRetentionPolicy: {
|
||||
enabled: true
|
||||
days: 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource tableServices 'tableServices@2022-09-01' = {
|
||||
name: 'default'
|
||||
properties: {}
|
||||
|
||||
resource certificationTable 'tables@2022-09-01' = {
|
||||
name: 'certificationTable'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource tableServices 'Microsoft.Storage/storageAccounts/tableServices@2021-09-01' = {
|
||||
parent: storageAccount
|
||||
name: 'default'
|
||||
properties: {}
|
||||
}
|
||||
|
||||
resource certificationTable 'Microsoft.Storage/storageAccounts/tableServices/tables@2021-09-01' = {
|
||||
name: 'certificationTable'
|
||||
parent: tableServices
|
||||
properties: {}
|
||||
}
|
||||
|
|
|
@ -33,9 +33,12 @@ param adminId string
|
|||
|
||||
@minLength(36)
|
||||
@maxLength(36)
|
||||
@description('Provide the objectId of the Service Principal using secret auth with get access to secrets in Azure Key Vault.')
|
||||
@description('Provide the objectId of the Service Principal using secret auth with get access to secrets in Azure Key Vault and access Azure PostgreSQL')
|
||||
param sdkAuthSpId string
|
||||
|
||||
@description('Provide the name of the Service Principal using secret auth with get access to secrets in Azure Key Vault and access Azure PostgreSQL')
|
||||
param sdkAuthSpName string
|
||||
|
||||
@minLength(36)
|
||||
@maxLength(36)
|
||||
@description('Provide the objectId of the Service Principal using cert auth with get and list access to all assets in Azure Key Vault.')
|
||||
|
@ -43,6 +46,7 @@ param certAuthSpId string
|
|||
|
||||
@minLength(16)
|
||||
@description('Provide the SQL server admin password of at least 16 characters.')
|
||||
@secure()
|
||||
param sqlServerAdminPassword string
|
||||
|
||||
var confTestRgName = '${toLower(namePrefix)}-conf-test-rg'
|
||||
|
@ -54,7 +58,9 @@ var iotHubName = '${toLower(namePrefix)}-conf-test-iothub'
|
|||
var keyVaultName = '${toLower(namePrefix)}-conf-test-kv'
|
||||
var serviceBusName = '${toLower(namePrefix)}-conf-test-servicebus'
|
||||
var sqlServerName = '${toLower(namePrefix)}-conf-test-sql'
|
||||
var postgresServerName = '${toLower(namePrefix)}-conf-test-pg'
|
||||
var storageName = '${toLower(namePrefix)}ctstorage'
|
||||
var appconfigStoreName = '${toLower(namePrefix)}-conf-test-cfg'
|
||||
|
||||
resource confTestRg 'Microsoft.Resources/resourceGroups@2021-04-01' = {
|
||||
name: confTestRgName
|
||||
|
@ -72,6 +78,7 @@ module cosmosDb 'conf-test-azure-cosmosdb.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
cosmosDbName: cosmosDbName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,6 +88,7 @@ module cosmosDbTable 'conf-test-azure-cosmosdb-table.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
cosmosDbTableAPIName: cosmosDbTableAPIName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,6 +98,7 @@ module eventGridTopic 'conf-test-azure-eventgrid.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
eventGridTopicName: eventGridTopicName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,6 +108,7 @@ module eventHubsNamespace 'conf-test-azure-eventhubs.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
eventHubsNamespaceName: eventHubsNamespaceName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,6 +118,7 @@ module iotHub 'conf-test-azure-iothub.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
iotHubName: iotHubName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -120,6 +131,7 @@ module keyVault 'conf-test-azure-keyvault.bicep' = {
|
|||
certAuthSpId: certAuthSpId
|
||||
keyVaultName: keyVaultName
|
||||
sdkAuthSpId: sdkAuthSpId
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -129,6 +141,7 @@ module serviceBus 'conf-test-azure-servicebus.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
serviceBusName: serviceBusName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -139,6 +152,7 @@ module sqlServer 'conf-test-azure-sqlserver.bicep' = {
|
|||
confTestTags: confTestTags
|
||||
sqlServerName: sqlServerName
|
||||
sqlServerAdminPassword: sqlServerAdminPassword
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -148,6 +162,28 @@ module storage 'conf-test-azure-storage.bicep' = {
|
|||
params: {
|
||||
confTestTags: confTestTags
|
||||
storageName: storageName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
module postgres 'conf-test-azure-postgres.bicep' = {
|
||||
name: postgresServerName
|
||||
scope: resourceGroup(confTestRg.name)
|
||||
params: {
|
||||
confTestTags: confTestTags
|
||||
postgresServerName: postgresServerName
|
||||
sdkAuthSpId: sdkAuthSpId
|
||||
sdkAuthSpName: sdkAuthSpName
|
||||
rgLocation: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
module appconfig 'conf-test-azure-appconfig.bicep' = {
|
||||
name: appconfigStoreName
|
||||
scope: resourceGroup(confTestRg.name)
|
||||
params: {
|
||||
configStoreName: appconfigStoreName
|
||||
location: rgLocation
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -176,4 +212,6 @@ output keyVaultName string = keyVault.name
|
|||
output serviceBusName string = serviceBus.name
|
||||
output sqlServerName string = sqlServer.name
|
||||
output sqlServerAdminName string = sqlServer.outputs.sqlServerAdminName
|
||||
output postgresServerName string = postgres.name
|
||||
output storageName string = storage.name
|
||||
output appconfigName string = appconfig.name
|
||||
|
|
|
@ -229,11 +229,18 @@ SQL_SERVER_NAME_VAR_NAME="AzureSqlServerName"
|
|||
SQL_SERVER_DB_NAME_VAR_NAME="AzureSqlServerDbName"
|
||||
SQL_SERVER_CONNECTION_STRING_VAR_NAME="AzureSqlServerConnectionString"
|
||||
|
||||
AZURE_DB_POSTGRES_CONNSTRING_VAR_NAME="AzureDBPostgresConnectionString"
|
||||
AZURE_DB_POSTGRES_CLIENT_ID_VAR_NAME="AzureDBPostgresClientId"
|
||||
AZURE_DB_POSTGRES_CLIENT_SECRET_VAR_NAME="AzureDBPostgresClientSecret"
|
||||
AZURE_DB_POSTGRES_TENANT_ID_VAR_NAME="AzureDBPostgresTenantId"
|
||||
|
||||
STORAGE_ACCESS_KEY_VAR_NAME="AzureBlobStorageAccessKey"
|
||||
STORAGE_ACCOUNT_VAR_NAME="AzureBlobStorageAccount"
|
||||
STORAGE_CONTAINER_VAR_NAME="AzureBlobStorageContainer"
|
||||
STORAGE_QUEUE_VAR_NAME="AzureBlobStorageQueue"
|
||||
|
||||
AZURE_APP_CONFIG_NAME_VAR_NAME="AzureAppConfigName"
|
||||
|
||||
# Derived variables
|
||||
if [[ -z "${ADMIN_ID}" ]]; then
|
||||
# If the user did not pass an admin ID, look it up in the directory
|
||||
|
@ -269,7 +276,7 @@ if [[ -n ${CREDENTIALS_PATH} ]]; then
|
|||
fi
|
||||
SDK_AUTH_SP_NAME="$(az ad sp show --id "${SDK_AUTH_SP_APPID}" --query "appDisplayName" --output tsv)"
|
||||
SDK_AUTH_SP_ID="$(az ad sp show --id "${SDK_AUTH_SP_APPID}" --query "id" --output tsv)"
|
||||
echo "Using Service Principal from ${CREDENTIALS_PATH} for SDK Auth: ${SDK_AUTH_SP_NAME}"
|
||||
echo "Using Service Principal from ${CREDENTIALS_PATH} for SDK Auth: ${SDK_AUTH_SP_NAME} (ID: ${SDK_AUTH_SP_ID})"
|
||||
else
|
||||
SDK_AUTH_SP_NAME="${PREFIX}-conf-test-runner-sp"
|
||||
SDK_AUTH_SP_INFO="$(az ad sp create-for-rbac --name "${SDK_AUTH_SP_NAME}" --sdk-auth --years 1)"
|
||||
|
@ -277,7 +284,7 @@ else
|
|||
SDK_AUTH_SP_CLIENT_SECRET="$(echo "${SDK_AUTH_SP_INFO}" | jq -r '.clientSecret')"
|
||||
SDK_AUTH_SP_ID="$(az ad sp list --display-name "${SDK_AUTH_SP_NAME}" --query "[].id" --output tsv)"
|
||||
echo "${SDK_AUTH_SP_INFO}"
|
||||
echo "Created Service Principal for SDK Auth: ${SDK_AUTH_SP_NAME}"
|
||||
echo "Created Service Principal for SDK Auth: ${SDK_AUTH_SP_NAME} (ID: ${SDK_AUTH_SP_ID})"
|
||||
AZURE_CREDENTIALS_FILENAME="${OUTPUT_PATH}/AZURE_CREDENTIALS"
|
||||
echo "${SDK_AUTH_SP_INFO}" > "${AZURE_CREDENTIALS_FILENAME}"
|
||||
fi
|
||||
|
@ -292,7 +299,17 @@ echo "Building conf-test-azure.bicep to ${ARM_TEMPLATE_FILE} ..."
|
|||
az bicep build --file conf-test-azure.bicep --outfile "${ARM_TEMPLATE_FILE}"
|
||||
|
||||
echo "Creating azure deployment ${DEPLOY_NAME} in ${DEPLOY_LOCATION} and resource prefix ${PREFIX}-* ..."
|
||||
az deployment sub create --name "${DEPLOY_NAME}" --location "${DEPLOY_LOCATION}" --template-file "${ARM_TEMPLATE_FILE}" -p namePrefix="${PREFIX}" -p adminId="${ADMIN_ID}" -p certAuthSpId="${CERT_AUTH_SP_ID}" -p sdkAuthSpId="${SDK_AUTH_SP_ID}" -p rgLocation="${DEPLOY_LOCATION}" -p sqlServerAdminPassword="${SQL_SERVER_ADMIN_PASSWORD}"
|
||||
az deployment sub create \
|
||||
--name "${DEPLOY_NAME}" \
|
||||
--location "${DEPLOY_LOCATION}" \
|
||||
--template-file "${ARM_TEMPLATE_FILE}" \
|
||||
-p namePrefix="${PREFIX}" \
|
||||
-p adminId="${ADMIN_ID}" \
|
||||
-p certAuthSpId="${CERT_AUTH_SP_ID}" \
|
||||
-p sdkAuthSpId="${SDK_AUTH_SP_ID}" \
|
||||
-p sdkAuthSpName="${SDK_AUTH_SP_NAME}" \
|
||||
-p rgLocation="${DEPLOY_LOCATION}" \
|
||||
-p sqlServerAdminPassword="${SQL_SERVER_ADMIN_PASSWORD}"
|
||||
|
||||
echo "Sleeping for 5s to allow created ARM deployment info to propagate to query endpoints ..."
|
||||
sleep 5
|
||||
|
@ -348,6 +365,8 @@ CERTIFICATION_EVENT_HUB_PUB_SUB_TOPICMULTI2_NAME="$(az deployment sub show --nam
|
|||
echo "INFO: CERTIFICATION_EVENT_HUB_PUB_SUB_TOPICMULTI2_NAME=${CERTIFICATION_EVENT_HUB_PUB_SUB_TOPICMULTI2_NAME}"
|
||||
#end
|
||||
|
||||
AZURE_APP_CONFIG_NAME="$(az deployment sub show --name "${DEPLOY_NAME}" --query "properties.outputs.appconfigName.value" --output tsv)"
|
||||
|
||||
IOT_HUB_NAME="$(az deployment sub show --name "${DEPLOY_NAME}" --query "properties.outputs.iotHubName.value" --output tsv)"
|
||||
echo "INFO: IOT_HUB_NAME=${IOT_HUB_NAME}"
|
||||
IOT_HUB_BINDINGS_CONSUMER_GROUP_FULLNAME="$(az deployment sub show --name "${DEPLOY_NAME}" --query "properties.outputs.iotHubBindingsConsumerGroupName.value" --output tsv)"
|
||||
|
@ -382,7 +401,7 @@ echo "Created Identity ${MANAGED_IDENTITY_ID}"
|
|||
# az container create -g ${RESOURCE_GROUP_NAME} -n testcontainer --image golang:latest --command-line "tail -f /dev/null" --assign-identity $MANAGED_IDENTITY_ID
|
||||
|
||||
echo "Granting identity azure-managed-identity permissions to access the Key Vault ${KEYVAULT_NAME}"
|
||||
az keyvault set-policy --name "${KEYVAULT_NAME}" -g "${RESOURCE_GROUP_NAME}" --secret-permissions get list --object-id "${MANAGED_IDENTITY_SP}"
|
||||
az keyvault set-policy --name "${KEYVAULT_NAME}" -g "${RESOURCE_GROUP_NAME}" --secret-permissions get list --certificate-permissions get list --key-permissions all --object-id "${MANAGED_IDENTITY_SP}"
|
||||
# Other tests verifying managed identity will want to grant permission like so:
|
||||
# MSYS_NO_PATHCONV=1 az role assignment create --assignee-object-id "${MANAGED_IDENTITY_SP}" --assignee-principal-type ServicePrincipal --role "Azure Service Bus Data Owner" --scope "/subscriptions/${SUB_ID}/resourceGroups/${RESOURCE_GROUP_NAME}/providers/Microsoft.ServiceBus/namespaces/${SERVICE_BUS_NAME}"
|
||||
|
||||
|
@ -393,7 +412,7 @@ echo "Creating service principal ${AKV_SPAUTH_SP_NAME} for use with KeyVault ${K
|
|||
|
||||
# Give the service principal read access to the KeyVault Secrets
|
||||
AKV_SPAUTH_SP_OBJECTID="$(az ad sp show --id ${AKV_SPAUTH_SP_CLIENT_ID} --query id -otsv)"
|
||||
az keyvault set-policy --name "${KEYVAULT_NAME}" -g "${RESOURCE_GROUP_NAME}" --secret-permissions get list --object-id "${AKV_SPAUTH_SP_OBJECTID}"
|
||||
az keyvault set-policy --name "${KEYVAULT_NAME}" -g "${RESOURCE_GROUP_NAME}" --secret-permissions get list --certificate-permissions get list --key-permissions all --object-id "${AKV_SPAUTH_SP_OBJECTID}"
|
||||
|
||||
# Update service principal credentials and roles for created resources
|
||||
echo "Creating ${CERT_AUTH_SP_NAME} certificate ..."
|
||||
|
@ -546,6 +565,7 @@ az keyvault secret set --name "${KEYVAULT_SERVICE_PRINCIPAL_CLIENT_ID_VAR_NAME}"
|
|||
KEYVAULT_SERVICE_PRINCIPAL_CLIENT_SECRET=${AKV_SPAUTH_SP_CLIENT_SECRET}
|
||||
echo export ${KEYVAULT_SERVICE_PRINCIPAL_CLIENT_SECRET_VAR_NAME}=\"${KEYVAULT_SERVICE_PRINCIPAL_CLIENT_SECRET}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${KEYVAULT_SERVICE_PRINCIPAL_CLIENT_SECRET_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${KEYVAULT_SERVICE_PRINCIPAL_CLIENT_SECRET}"
|
||||
|
||||
# ------------------------------------
|
||||
# Populate Blob Storage test settings
|
||||
# ------------------------------------
|
||||
|
@ -671,6 +691,24 @@ SQL_SERVER_CONNECTION_STRING="Server=${SQL_SERVER_NAME}.database.windows.net;por
|
|||
echo export ${SQL_SERVER_CONNECTION_STRING_VAR_NAME}=\"${SQL_SERVER_CONNECTION_STRING}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${SQL_SERVER_CONNECTION_STRING_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${SQL_SERVER_CONNECTION_STRING}"
|
||||
|
||||
# ----------------------------------
|
||||
# Populate Azure Database for PostgreSQL test settings
|
||||
# ----------------------------------
|
||||
echo "Configuring Azure Database for PostgreSQL test settings ..."
|
||||
|
||||
AZURE_DB_POSTGRES_CONNSTRING="host=${PREFIX}-conf-test-pg.postgres.database.azure.com user=${SDK_AUTH_SP_NAME} port=5432 connect_timeout=30 database=dapr_test"
|
||||
echo export ${AZURE_DB_POSTGRES_CONNSTRING_VAR_NAME}=\"${AZURE_DB_POSTGRES_CONNSTRING}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${AZURE_DB_POSTGRES_CONNSTRING_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${AZURE_DB_POSTGRES_CONNSTRING}"
|
||||
|
||||
echo export ${AZURE_DB_POSTGRES_CLIENT_ID_VAR_NAME}=\"${SDK_AUTH_SP_APPID}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${AZURE_DB_POSTGRES_CLIENT_ID_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${SDK_AUTH_SP_APPID}"
|
||||
|
||||
echo export ${AZURE_DB_POSTGRES_CLIENT_SECRET_VAR_NAME}=\"${SDK_AUTH_SP_CLIENT_SECRET}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${AZURE_DB_POSTGRES_CLIENT_SECRET_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${SDK_AUTH_SP_CLIENT_SECRET}"
|
||||
|
||||
echo export ${AZURE_DB_POSTGRES_TENANT_ID_VAR_NAME}=\"${TENANT_ID}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${AZURE_DB_POSTGRES_TENANT_ID_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${TENANT_ID}"
|
||||
|
||||
# ----------------------------------
|
||||
# Populate Event Hubs test settings
|
||||
# ----------------------------------
|
||||
|
@ -724,6 +762,11 @@ EVENT_HUBS_PUBSUB_CONTAINER_NAME="${PREFIX}-eventhubs-pubsub-container"
|
|||
echo export ${EVENT_HUBS_PUBSUB_CONTAINER_VAR_NAME}=\"${EVENT_HUBS_PUBSUB_CONTAINER_NAME}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${EVENT_HUBS_PUBSUB_CONTAINER_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${EVENT_HUBS_PUBSUB_CONTAINER_NAME}"
|
||||
|
||||
# ------------------------------
|
||||
# Populate Azure App config info
|
||||
# ------------------------------
|
||||
echo export ${AZURE_APP_CONFIG_NAME_VAR_NAME}=\"${AZURE_APP_CONFIG_NAME}\" >> "${ENV_CONFIG_FILENAME}"
|
||||
az keyvault secret set --name "${AZURE_APP_CONFIG_NAME_VAR_NAME}" --vault-name "${KEYVAULT_NAME}" --value "${AZURE_APP_CONFIG_NAME}"
|
||||
# ----------------------------------
|
||||
# Populate IoT Hub test settings
|
||||
# ----------------------------------
|
||||
|
@ -765,6 +808,8 @@ az role assignment create --assignee "${CERTIFICATION_SPAUTH_SP_PRINCIPAL_ID}" -
|
|||
# Azure Service Bus
|
||||
ASB_ID=$(az servicebus namespace show --resource-group "${RESOURCE_GROUP_NAME}" --name "${SERVICE_BUS_NAME}" --query "id" -otsv)
|
||||
az role assignment create --assignee "${CERTIFICATION_SPAUTH_SP_PRINCIPAL_ID}" --role "Azure Service Bus Data Owner" --scope "${ASB_ID}"
|
||||
# Azure App Config
|
||||
az role assignment create --assignee "${CERTIFICATION_SPAUTH_SP_PRINCIPAL_ID}" --role "App Configuration Data Owner" --scope "/subscriptions/${SUB_ID}/resourceGroups/${RESOURCE_GROUP_NAME}/providers/Microsoft.AppConfiguration/configurationStores/${AZURE_APP_CONFIG_NAME}"
|
||||
|
||||
# Now export the service principal information
|
||||
CERTIFICATION_TENANT_ID="$(az ad sp list --display-name "${CERTIFICATION_SPAUTH_SP_NAME}" --query "[].appOwnerOrganizationId" --output tsv)"
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
Use `docker.yaml` file to override the default dynamic config value (they are specified
|
||||
when creating the service config).
|
||||
|
||||
Each key can have zero or more values and each value can have zero or more
|
||||
constraints. There are only three types of constraint:
|
||||
1. `namespace`: `string`
|
||||
2. `taskQueueName`: `string`
|
||||
3. `taskType`: `int` (`1`:`Workflow`, `2`:`Activity`)
|
||||
A value will be selected and returned if all its has exactly the same constraints
|
||||
as the ones specified in query filters (including the number of constraints).
|
||||
|
||||
Please use the following format:
|
||||
```
|
||||
testGetBoolPropertyKey:
|
||||
- value: false
|
||||
- value: true
|
||||
constraints:
|
||||
namespace: "global-samples-namespace"
|
||||
- value: false
|
||||
constraints:
|
||||
namespace: "samples-namespace"
|
||||
testGetDurationPropertyKey:
|
||||
- value: "1m"
|
||||
constraints:
|
||||
namespace: "samples-namespace"
|
||||
taskQueueName: "longIdleTimeTaskqueue"
|
||||
testGetFloat64PropertyKey:
|
||||
- value: 12.0
|
||||
constraints:
|
||||
namespace: "samples-namespace"
|
||||
testGetMapPropertyKey:
|
||||
- value:
|
||||
key1: 1
|
||||
key2: "value 2"
|
||||
key3:
|
||||
- false
|
||||
- key4: true
|
||||
key5: 2.0
|
||||
```
|
|
@ -1,3 +0,0 @@
|
|||
system.forceSearchAttributesCacheRefreshOnRead:
|
||||
- value: true # Dev setup only. Please don't turn this on in production.
|
||||
constraints: {}
|
|
@ -1,6 +0,0 @@
|
|||
limit.maxIDLength:
|
||||
- value: 255
|
||||
constraints: {}
|
||||
system.forceSearchAttributesCacheRefreshOnRead:
|
||||
- value: true # Dev setup only. Please don't turn this on in production.
|
||||
constraints: {}
|
|
@ -1,20 +0,0 @@
|
|||
# The base go-image
|
||||
FROM golang:1.18-alpine
|
||||
|
||||
# Create a directory for the app
|
||||
RUN mkdir /app
|
||||
|
||||
# Copy all files from the current directory to the app directory
|
||||
COPY . /app
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
RUN go get
|
||||
|
||||
# Run command as described:
|
||||
# go build will build an executable file named server in the current directory
|
||||
RUN go build -o server .
|
||||
|
||||
# Run the server executable
|
||||
CMD [ "/app/server" ]
|
|
@ -1,35 +0,0 @@
|
|||
module github/dapr/workflow/worker
|
||||
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/zouyx/agollo/v3 v3.4.5
|
||||
go.temporal.io/sdk v1.21.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/facebookgo/clock v0.0.0-20150410010913-600d898af40a // indirect
|
||||
github.com/gogo/googleapis v1.4.1 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/gogo/status v1.1.1 // indirect
|
||||
github.com/golang/mock v1.6.0 // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect
|
||||
github.com/pborman/uuid v1.2.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/robfig/cron v1.2.0 // indirect
|
||||
github.com/stretchr/objx v0.5.0 // indirect
|
||||
github.com/stretchr/testify v1.8.2 // indirect
|
||||
go.temporal.io/api v1.18.1 // indirect
|
||||
go.uber.org/atomic v1.10.0 // indirect
|
||||
golang.org/x/net v0.7.0 // indirect
|
||||
golang.org/x/sys v0.5.0 // indirect
|
||||
golang.org/x/text v0.7.0 // indirect
|
||||
golang.org/x/time v0.3.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20230223222841-637eb2293923 // indirect
|
||||
google.golang.org/grpc v1.53.0 // indirect
|
||||
google.golang.org/protobuf v1.28.1 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
File diff suppressed because it is too large
Load Diff
|
@ -1,91 +0,0 @@
|
|||
/*
|
||||
Copyright 2021 The Dapr Authors
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/zouyx/agollo/v3/component/log"
|
||||
"go.temporal.io/sdk/activity"
|
||||
"go.temporal.io/sdk/client"
|
||||
"go.temporal.io/sdk/worker"
|
||||
"go.temporal.io/sdk/workflow"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Sleep for a bit so the docker container can spin up
|
||||
time.Sleep(30 * time.Second)
|
||||
TaskQueueString := "TestTaskQueue"
|
||||
|
||||
// construct client here
|
||||
cOpt := client.Options{}
|
||||
cOpt.HostPort = "temporal:7233"
|
||||
cOpt.Identity = "TemporalTestClient"
|
||||
// Create the workflow client
|
||||
clientTwo, err := client.Dial(cOpt)
|
||||
if err != nil {
|
||||
log.Error("Unable to create client.")
|
||||
return
|
||||
}
|
||||
wOpt := worker.Options{}
|
||||
// Make default options for task q and worker options and workflow options
|
||||
w := worker.New(clientTwo, TaskQueueString, wOpt)
|
||||
|
||||
// Register workflows and activities
|
||||
w.RegisterWorkflow(TestWorkflow)
|
||||
w.RegisterActivity(ExampleActivity)
|
||||
|
||||
err = w.Start()
|
||||
if err != nil {
|
||||
log.Error("Unable to start worker.")
|
||||
return
|
||||
}
|
||||
w.Run(worker.InterruptCh())
|
||||
}
|
||||
|
||||
func TestWorkflow(ctx workflow.Context, runtimeSeconds int) error {
|
||||
options := workflow.ActivityOptions{
|
||||
TaskQueue: "TestTaskQueue",
|
||||
ScheduleToCloseTimeout: time.Second * 60,
|
||||
ScheduleToStartTimeout: time.Second * 60,
|
||||
StartToCloseTimeout: time.Second * 60,
|
||||
HeartbeatTimeout: time.Second * 5,
|
||||
WaitForCancellation: false,
|
||||
}
|
||||
|
||||
ctx = workflow.WithActivityOptions(ctx, options)
|
||||
err := workflow.ExecuteActivity(ctx, ExampleActivity, runtimeSeconds).Get(ctx, nil)
|
||||
if err != nil {
|
||||
log.Error("Unable to execute activity.")
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ExampleActivity(ctx context.Context, runtimeSeconds int) error {
|
||||
counter := 0
|
||||
for i := 0; i <= runtimeSeconds; i++ {
|
||||
|
||||
select {
|
||||
case <-time.After(1 * time.Second):
|
||||
counter++
|
||||
activity.RecordHeartbeat(ctx, "")
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -2,7 +2,7 @@ version: '2'
|
|||
services:
|
||||
db:
|
||||
image: mysql:8
|
||||
command: --default-authentication-plugin=mysql_native_password
|
||||
command: --mysql_native_password=ON
|
||||
restart: always
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
version: '2'
|
||||
services:
|
||||
natsstreaming:
|
||||
image: nats-streaming:latest
|
||||
ports:
|
||||
- "4222:4222"
|
||||
- "8222:8222"
|
|
@ -1,7 +1,7 @@
|
|||
version: '2'
|
||||
services:
|
||||
db:
|
||||
image: postgres:15
|
||||
image: postgres:15-alpine
|
||||
restart: always
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
version: "3.8"
|
||||
|
||||
services:
|
||||
localstack:
|
||||
container_name: "conformance-aws-secrets-manager"
|
||||
image: localstack/localstack
|
||||
ports:
|
||||
- "127.0.0.1:4566:4566"
|
||||
environment:
|
||||
- DEBUG=1
|
||||
- DOCKER_HOST=unix:///var/run/docker.sock
|
||||
volumes:
|
||||
- "${PWD}/.github/scripts/docker-compose-init/init-conformance-state-aws-secrets-manager.sh:/etc/localstack/init/ready.d/init-aws.sh" # ready hook
|
||||
- "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack"
|
||||
- "/var/run/docker.sock:/var/run/docker.sock"
|
|
@ -9,9 +9,13 @@ services:
|
|||
shm_size: 1g
|
||||
ulimits:
|
||||
core: -1
|
||||
# Setting nofile to 4096 and hard to 1048576, as recommended by Solace documentation
|
||||
# Otherwise, the container will have an error and crash with:
|
||||
# ERROR POST Violation [022]:Required system resource missing, Hard resource limit nofile 1048576 is required, 6592 detected
|
||||
# https://docs.solace.com/Software-Broker/System-Resource-Requirements.htm#concurrent-open-files-considerations
|
||||
nofile:
|
||||
soft: 2448
|
||||
hard: 6592
|
||||
soft: 4096
|
||||
hard: 1048576
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
version: '2'
|
||||
version: '3'
|
||||
services:
|
||||
sqlserver:
|
||||
image: mcr.microsoft.com/mssql/server:2019-latest
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
version: "3.5"
|
||||
services:
|
||||
worker:
|
||||
build: ./conformance/temporal/worker
|
||||
networks:
|
||||
- temporal-network
|
||||
depends_on:
|
||||
- temporal
|
||||
postgresql:
|
||||
container_name: temporal-postgresql
|
||||
environment:
|
||||
POSTGRES_PASSWORD: temporal
|
||||
POSTGRES_USER: temporal
|
||||
image: postgres:13
|
||||
networks:
|
||||
- temporal-network
|
||||
ports:
|
||||
- 5432:5432
|
||||
temporal:
|
||||
container_name: temporal
|
||||
depends_on:
|
||||
- postgresql
|
||||
environment:
|
||||
- DB=postgresql
|
||||
- DB_PORT=5432
|
||||
- POSTGRES_USER=temporal
|
||||
- POSTGRES_PWD=temporal
|
||||
- POSTGRES_SEEDS=postgresql
|
||||
- DYNAMIC_CONFIG_FILE_PATH=config/dynamicconfig/development-sql.yaml
|
||||
image: temporalio/auto-setup:1.17.1
|
||||
networks:
|
||||
- temporal-network
|
||||
ports:
|
||||
- 7233:7233
|
||||
volumes:
|
||||
- ./conformance/temporal/server/dynamicconfig:/etc/temporal/config/dynamicconfig
|
||||
networks:
|
||||
temporal-network:
|
||||
driver: bridge
|
||||
name: temporal-network
|
54
.github/infrastructure/terraform/conformance/secretstores/aws/secretsmanager/secretsmanager.tf
vendored
Normal file
54
.github/infrastructure/terraform/conformance/secretstores/aws/secretsmanager/secretsmanager.tf
vendored
Normal file
|
@ -0,0 +1,54 @@
|
|||
terraform {
|
||||
required_version = ">=0.13"
|
||||
|
||||
required_providers {
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
version = "~> 4.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "TIMESTAMP" {
|
||||
type = string
|
||||
description = "Timestamp of the GitHub workflow run."
|
||||
}
|
||||
|
||||
variable "UNIQUE_ID" {
|
||||
type = string
|
||||
description = "Unique ID of the GitHub workflow run."
|
||||
}
|
||||
|
||||
provider "aws" {
|
||||
region = "us-east-1"
|
||||
default_tags {
|
||||
tags = {
|
||||
Purpose = "AutomatedConformanceTesting"
|
||||
Timestamp = "${var.TIMESTAMP}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Create the first secret in AWS Secrets Manager
|
||||
resource "aws_secretsmanager_secret" "conftestsecret" {
|
||||
name = "conftestsecret"
|
||||
description = "Secret for conformance test"
|
||||
recovery_window_in_days = 0
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret_version" "conftestsecret_value" {
|
||||
secret_id = aws_secretsmanager_secret.conftestsecret.id
|
||||
secret_string = "abcd"
|
||||
}
|
||||
|
||||
# Create the second secret in AWS Secrets Manager
|
||||
resource "aws_secretsmanager_secret" "secondsecret" {
|
||||
name = "secondsecret"
|
||||
description = "Another secret for conformance test"
|
||||
recovery_window_in_days = 0
|
||||
}
|
||||
|
||||
resource "aws_secretsmanager_secret_version" "secondsecret_value" {
|
||||
secret_id = aws_secretsmanager_secret.secondsecret.id
|
||||
secret_string = "efgh"
|
||||
}
|
|
@ -34,6 +34,10 @@ resource "aws_dynamodb_table" "conformance_test_basic_table" {
|
|||
billing_mode = "PROVISIONED"
|
||||
read_capacity = "10"
|
||||
write_capacity = "10"
|
||||
ttl {
|
||||
attribute_name = "expiresAt"
|
||||
enabled = true
|
||||
}
|
||||
attribute {
|
||||
name = "key"
|
||||
type = "S"
|
||||
|
|
|
@ -54,16 +54,16 @@ Before this module can be used on a project, you must ensure that the following
|
|||
$ terraform init
|
||||
|
||||
$ terraform refresh -var="gh_repo=dapr/components-contrib" \
|
||||
-var="project_id=dapr-tests" -var="service_account=comp-contrib-wif" \
|
||||
-var="wif_pool_name=contrib-cert-tests"
|
||||
-var="project_id=dapr-tests" -var="service_account=dapr-contrib-wif-sa" \
|
||||
-var="wif_pool_name=dapr-contrib-cert-tests"
|
||||
|
||||
$ terraform plan -var="gh_repo=dapr/components-contrib" \
|
||||
-var="project_id=dapr-tests" -var="service_account=comp-contrib-wif" \
|
||||
-var="wif_pool_name=contrib-cert-tests"
|
||||
-var="project_id=dapr-tests" -var="service_account=dapr-contrib-wif-sa" \
|
||||
-var="wif_pool_name=dapr-contrib-cert-tests"
|
||||
|
||||
$ terraform apply --auto-approve -var="gh_repo=dapr/components-contrib" \
|
||||
-var="project_id=dapr-tests" -var="service_account=comp-contrib-wif" \
|
||||
-var="wif_pool_name=contrib-cert-tests"
|
||||
-var="project_id=dapr-tests" -var="service_account=dapr-contrib-wif-sa" \
|
||||
-var="wif_pool_name=dapr-contrib-cert-tests"
|
||||
```
|
||||
|
||||
|
||||
|
@ -72,7 +72,7 @@ $ terraform apply --auto-approve -var="gh_repo=dapr/components-contrib" \
|
|||
```
|
||||
$ terraform output
|
||||
|
||||
pool_name = "projects/***/locations/global/workloadIdentityPools/contrib-cert-tests-gh-pool"
|
||||
provider_name = "projects/***/locations/global/workloadIdentityPools/contrib-cert-tests-gh-pool/providers/contrib-cert-tests-gh-provider"
|
||||
pool_name = "projects/***/locations/global/workloadIdentityPools/dapr-contrib-cert-tests-pool"
|
||||
provider_name = "projects/***/locations/global/workloadIdentityPools/dapr-contrib-cert-tests-pool/providers/dapr-contrib-cert-tests-provider"
|
||||
sa_email = "***"
|
||||
```
|
||||
|
|
|
@ -17,8 +17,8 @@ module "oidc" {
|
|||
source = "terraform-google-modules/github-actions-runners/google//modules/gh-oidc"
|
||||
version = "~> 3.1.1"
|
||||
project_id = var.project_id
|
||||
pool_id = "${var.wif_pool_name}-gh-pool"
|
||||
provider_id = "${var.wif_pool_name}-gh-provider"
|
||||
pool_id = "${var.wif_pool_name}-pool"
|
||||
provider_id = "${var.wif_pool_name}-provider"
|
||||
sa_mapping = {
|
||||
(google_service_account.sa.account_id) = {
|
||||
sa_name = google_service_account.sa.name
|
||||
|
|
|
@ -4,4 +4,4 @@ set -e
|
|||
|
||||
export INFLUX_TOKEN=$(openssl rand -base64 32)
|
||||
echo "INFLUX_TOKEN=$INFLUX_TOKEN" >> $GITHUB_ENV
|
||||
docker-compose -f .github/infrastructure/docker-compose-influxdb.yml -p influxdb up -d
|
||||
docker compose -f .github/infrastructure/docker-compose-influxdb.yml -p influxdb up -d
|
||||
|
|
9
.github/scripts/components-scripts/conformance-secretstores.aws.secretsmanager.secretsmanager-destroy.sh
vendored
Executable file
9
.github/scripts/components-scripts/conformance-secretstores.aws.secretsmanager.secretsmanager-destroy.sh
vendored
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/bin/sh
|
||||
|
||||
set +e
|
||||
|
||||
# Navigate to the Terraform directory
|
||||
cd ".github/infrastructure/terraform/conformance/secretstores/aws/secretsmanager"
|
||||
|
||||
# Run Terraform
|
||||
terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
|
15
.github/scripts/components-scripts/conformance-secretstores.aws.secretsmanager.secretsmanager-setup.sh
vendored
Executable file
15
.github/scripts/components-scripts/conformance-secretstores.aws.secretsmanager.secretsmanager-setup.sh
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
# Set variables for GitHub Actions
|
||||
echo "AWS_REGION=us-east-1" >> $GITHUB_ENV
|
||||
|
||||
# Navigate to the Terraform directory
|
||||
cd ".github/infrastructure/terraform/conformance/secretstores/aws/secretsmanager"
|
||||
|
||||
# Run Terraform
|
||||
terraform init
|
||||
terraform validate -no-color
|
||||
terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
|
||||
terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
|
|
@ -4,14 +4,14 @@ set -e
|
|||
|
||||
# Rebuild the Worker
|
||||
(
|
||||
cd internal/component/cloudflare/worker-src;
|
||||
cd common/component/cloudflare/worker-src;
|
||||
npm ci;
|
||||
npm run build;
|
||||
)
|
||||
|
||||
# Check that the code of the worker is correct
|
||||
git diff --exit-code ./internal/component/cloudflare/workers/code \
|
||||
|| (echo "The source code of the Cloudflare Worker has changed, but the Worker has not been recompiled. Please re-compile the Worker by running 'npm ci && npm run build' in 'internal/component/cloudflare/worker-src'" && exit 1)
|
||||
git diff --exit-code ./common/component/cloudflare/workers/code \
|
||||
|| (echo "The source code of the Cloudflare Worker has changed, but the Worker has not been recompiled. Please re-compile the Worker by running 'npm ci && npm run build' in 'common/component/cloudflare/worker-src'" && exit 1)
|
||||
|
||||
# Remove dashes from UNIQUE_ID
|
||||
Suffix=$(echo "$UNIQUE_ID" | sed -E 's/-//g')
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
FILE="$1"
|
||||
PROJECT="${2:-$FILE}"
|
||||
|
||||
docker compose -f .github/infrastructure/docker-compose-${FILE}.yml -p ${PROJECT} logs
|
|
@ -5,4 +5,4 @@ set -e
|
|||
FILE="$1"
|
||||
PROJECT="${2:-$FILE}"
|
||||
|
||||
docker-compose -f .github/infrastructure/docker-compose-${FILE}.yml -p ${PROJECT} up -d
|
||||
docker compose -f .github/infrastructure/docker-compose-${FILE}.yml -p ${PROJECT} up -d
|
||||
|
|
|
@ -7,10 +7,12 @@ const owners = [
|
|||
'berndverst',
|
||||
'daixiang0',
|
||||
'DeepanshuA',
|
||||
'elena-kolevska',
|
||||
'halspang',
|
||||
'ItalyPaleAle',
|
||||
'jjcollinge',
|
||||
'joshvanl',
|
||||
'mikeee',
|
||||
'msfussell',
|
||||
'mukundansundar',
|
||||
'pkedy',
|
||||
|
@ -19,6 +21,7 @@ const owners = [
|
|||
'RyanLettieri',
|
||||
'shivamkm07',
|
||||
'shubham1172',
|
||||
'sicoyle',
|
||||
'skyao',
|
||||
'Taction',
|
||||
'tmacam',
|
||||
|
@ -94,6 +97,10 @@ async function handleIssueCommentCreate({ github, context }) {
|
|||
case '/ok-to-test':
|
||||
await cmdOkToTest(github, issue, isFromPulls)
|
||||
break
|
||||
case command.match(/^\/rerun \d+/)?.input:
|
||||
const workflowrunid = command.match(/\d+/)[0];
|
||||
await rerunWorkflow(github, issue, workflowrunid)
|
||||
break
|
||||
default:
|
||||
console.log(
|
||||
`[handleIssueCommentCreate] command ${command} not found, exiting.`
|
||||
|
@ -234,3 +241,18 @@ async function cmdOkToTest(github, issue, isFromPulls) {
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rerun all failed jobs of a given workflow run ID.
|
||||
* @param {*} github GitHub object reference
|
||||
* @param {*} issue GitHub issue object
|
||||
* @param {int} workflowrunid the workflow run ID for which to rerun all failed jobs
|
||||
*/
|
||||
async function rerunWorkflow(github, issue, workflowrunid) {
|
||||
// Rerun all failed jobs of the specified workflow run
|
||||
const pull = await github.rest.actions.reRunWorkflowFailedJobs({
|
||||
owner: issue.owner,
|
||||
repo: issue.repo,
|
||||
run_id: workflowrunid,
|
||||
});
|
||||
}
|
||||
|
|
9
.github/scripts/docker-compose-init/init-conformance-state-aws-secrets-manager.sh
vendored
Executable file
9
.github/scripts/docker-compose-init/init-conformance-state-aws-secrets-manager.sh
vendored
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
awslocal secretsmanager create-secret \
|
||||
--name conftestsecret \
|
||||
--secret-string "abcd"
|
||||
|
||||
awslocal secretsmanager create-secret \
|
||||
--name secondsecret \
|
||||
--secret-string "efgh"
|
|
@ -19,7 +19,7 @@ const components = {
|
|||
],
|
||||
sourcePkg: [
|
||||
'bindings/azure/blobstorage',
|
||||
'internal/component/azure/blobstorage',
|
||||
'common/component/azure/blobstorage',
|
||||
],
|
||||
},
|
||||
'bindings.azure.cosmosdb': {
|
||||
|
@ -73,7 +73,7 @@ const components = {
|
|||
],
|
||||
sourcePkg: [
|
||||
'bindings/azure/eventhubs',
|
||||
'internal/component/azure/eventhubs',
|
||||
'common/component/azure/eventhubs',
|
||||
],
|
||||
},
|
||||
'bindings.azure.servicebusqueues': {
|
||||
|
@ -82,7 +82,7 @@ const components = {
|
|||
requiredSecrets: ['AzureServiceBusConnectionString'],
|
||||
sourcePkg: [
|
||||
'bindings/azure/servicebusqueues',
|
||||
'internal/component/azure/servicebus',
|
||||
'common/component/azure/servicebus',
|
||||
],
|
||||
},
|
||||
'bindings.azure.storagequeues': {
|
||||
|
@ -120,6 +120,12 @@ const components = {
|
|||
'bindings.dubbo': {
|
||||
certification: true,
|
||||
},
|
||||
'bindings.zeebe.command': {
|
||||
certification: true,
|
||||
},
|
||||
'bindings.zeebe.jobworker': {
|
||||
certification: true,
|
||||
},
|
||||
'bindings.http': {
|
||||
conformance: true,
|
||||
},
|
||||
|
@ -129,17 +135,17 @@ const components = {
|
|||
},
|
||||
'bindings.kafka': {
|
||||
certification: true,
|
||||
sourcePkg: ['bindings/kafka', 'internal/component/kafka'],
|
||||
sourcePkg: ['bindings/kafka', 'common/component/kafka'],
|
||||
},
|
||||
'bindings.kafka-confluent': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh confluent',
|
||||
sourcePkg: ['bindings/kafka', 'internal/component/kafka'],
|
||||
sourcePkg: ['bindings/kafka', 'common/component/kafka'],
|
||||
},
|
||||
'bindings.kafka-wurstmeister': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh kafka',
|
||||
sourcePkg: ['bindings/kafka', 'internal/component/kafka'],
|
||||
sourcePkg: ['bindings/kafka', 'common/component/kafka'],
|
||||
},
|
||||
'bindings.kitex': {
|
||||
certification: true,
|
||||
|
@ -167,9 +173,28 @@ const components = {
|
|||
sourcePkg: ['bindings/mqtt3'],
|
||||
},
|
||||
'bindings.postgres': {
|
||||
conformance: true,
|
||||
certification: true,
|
||||
},
|
||||
'bindings.postgresql.docker': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh postgresql',
|
||||
sourcePkg: [
|
||||
'bindings/postgresql',
|
||||
'common/authentication/postgresql',
|
||||
],
|
||||
},
|
||||
'bindings.postgresql.azure': {
|
||||
conformance: true,
|
||||
requiredSecrets: [
|
||||
'AzureDBPostgresConnectionString',
|
||||
'AzureDBPostgresClientId',
|
||||
'AzureDBPostgresClientSecret',
|
||||
'AzureDBPostgresTenantId',
|
||||
],
|
||||
sourcePkg: [
|
||||
'bindings/postgresql',
|
||||
'common/authentication/postgresql',
|
||||
],
|
||||
},
|
||||
'bindings.rabbitmq': {
|
||||
conformance: true,
|
||||
|
@ -178,22 +203,45 @@ const components = {
|
|||
},
|
||||
'bindings.redis': {
|
||||
certification: true,
|
||||
sourcePkg: ['bindings/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['bindings/redis', 'common/component/redis'],
|
||||
},
|
||||
'bindings.redis.v6': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redisjson redis',
|
||||
sourcePkg: ['bindings/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['bindings/redis', 'common/component/redis'],
|
||||
},
|
||||
'bindings.redis.v7': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redis7 redis',
|
||||
sourcePkg: ['bindings/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['bindings/redis', 'common/component/redis'],
|
||||
},
|
||||
'configuration.postgres': {
|
||||
conformance: true,
|
||||
certification: true,
|
||||
sourcePkg: [
|
||||
'configuration/postgresql',
|
||||
'common/authentication/postgresql',
|
||||
],
|
||||
},
|
||||
'configuration.postgresql.docker': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh postgresql',
|
||||
sourcePkg: [
|
||||
'configuration/postgresql',
|
||||
'common/authentication/postgresql',
|
||||
],
|
||||
},
|
||||
'configuration.postgresql.azure': {
|
||||
conformance: true,
|
||||
requiredSecrets: [
|
||||
'AzureDBPostgresConnectionString',
|
||||
'AzureDBPostgresClientId',
|
||||
'AzureDBPostgresClientSecret',
|
||||
'AzureDBPostgresTenantId',
|
||||
],
|
||||
sourcePkg: [
|
||||
'configuration/postgresql',
|
||||
'common/authentication/postgresql',
|
||||
],
|
||||
},
|
||||
'configuration.redis.v6': {
|
||||
conformance: true,
|
||||
|
@ -224,12 +272,25 @@ const components = {
|
|||
'crypto.jwks': {
|
||||
conformance: true,
|
||||
},
|
||||
'lock.redis.v6': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redisjson redis',
|
||||
sourcePkg: ['lock/redis', 'common/component/redis'],
|
||||
},
|
||||
'lock.redis.v7': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redis7 redis',
|
||||
sourcePkg: ['lock/redis', 'common/component/redis'],
|
||||
},
|
||||
'middleware.http.bearer': {
|
||||
certification: true,
|
||||
},
|
||||
'middleware.http.ratelimit': {
|
||||
certification: true,
|
||||
},
|
||||
'middleware.http.opa': {
|
||||
'certification': true,
|
||||
},
|
||||
'pubsub.aws.snssqs': {
|
||||
certification: true,
|
||||
requireAWSCredentials: true,
|
||||
|
@ -292,7 +353,7 @@ const components = {
|
|||
],
|
||||
sourcePkg: [
|
||||
'pubsub/azure/eventhubs',
|
||||
'internal/component/azure/eventhubs',
|
||||
'common/component/azure/eventhubs',
|
||||
],
|
||||
},
|
||||
'pubsub.azure.servicebus.queues': {
|
||||
|
@ -300,7 +361,7 @@ const components = {
|
|||
requiredSecrets: ['AzureServiceBusConnectionString'],
|
||||
sourcePkg: [
|
||||
'pubsub/azure/servicebus/queues',
|
||||
'internal/component/azure/servicebus',
|
||||
'common/component/azure/servicebus',
|
||||
],
|
||||
},
|
||||
'pubsub.azure.servicebus.topics': {
|
||||
|
@ -315,7 +376,7 @@ const components = {
|
|||
],
|
||||
sourcePkg: [
|
||||
'pubsub/azure/servicebus/topics',
|
||||
'internal/component/azure/servicebus',
|
||||
'common/component/azure/servicebus',
|
||||
],
|
||||
},
|
||||
'pubsub.in-memory': {
|
||||
|
@ -327,17 +388,17 @@ const components = {
|
|||
},
|
||||
'pubsub.kafka': {
|
||||
certification: true,
|
||||
sourcePkg: ['pubsub/kafka', 'internal/component/kafka'],
|
||||
sourcePkg: ['pubsub/kafka', 'common/component/kafka'],
|
||||
},
|
||||
'pubsub.kafka-confluent': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh confluent',
|
||||
sourcePkg: ['pubsub/kafka', 'internal/component/kafka'],
|
||||
sourcePkg: ['pubsub/kafka', 'common/component/kafka'],
|
||||
},
|
||||
'pubsub.kafka-wurstmeister': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh kafka',
|
||||
sourcePkg: ['pubsub/kafka', 'internal/component/kafka'],
|
||||
sourcePkg: ['pubsub/kafka', 'common/component/kafka'],
|
||||
},
|
||||
'pubsub.kubemq': {
|
||||
conformance: true,
|
||||
|
@ -356,10 +417,6 @@ const components = {
|
|||
conformanceSetup: 'docker-compose.sh vernemq',
|
||||
sourcePkg: ['pubsub/mqtt3'],
|
||||
},
|
||||
'pubsub.natsstreaming': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh natsstreaming',
|
||||
},
|
||||
'pubsub.pulsar': {
|
||||
conformance: true,
|
||||
certification: true,
|
||||
|
@ -373,7 +430,7 @@ const components = {
|
|||
'pubsub.redis.v6': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redisjson redis',
|
||||
sourcePkg: ['pubsub/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['pubsub/redis', 'common/component/redis'],
|
||||
},
|
||||
// This test is currently disabled due to issues with Redis v7
|
||||
/*'pubsub.redis.v7': {
|
||||
|
@ -383,6 +440,7 @@ const components = {
|
|||
'pubsub.solace': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh solace',
|
||||
conformanceLogs: 'docker-compose-logs.sh solace',
|
||||
},
|
||||
'secretstores.azure.keyvault': {
|
||||
certification: true,
|
||||
|
@ -435,6 +493,17 @@ const components = {
|
|||
conformance: true,
|
||||
certification: true,
|
||||
},
|
||||
'secretstores.aws.secretsmanager.terraform': {
|
||||
conformance: true,
|
||||
requireAWSCredentials: true,
|
||||
requireTerraform: true,
|
||||
conformanceSetup: 'conformance-secretstores.aws.secretsmanager.secretsmanager-setup.sh',
|
||||
conformanceDestroy: 'conformance-secretstores.aws.secretsmanager.secretsmanager-destroy.sh',
|
||||
},
|
||||
'secretstores.aws.secretsmanager.docker': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh secrets-manager',
|
||||
},
|
||||
'state.aws.dynamodb': {
|
||||
certification: true,
|
||||
requireAWSCredentials: true,
|
||||
|
@ -455,8 +524,37 @@ const components = {
|
|||
conformanceDestroy: 'conformance-state.aws.dynamodb-destroy.sh',
|
||||
sourcePkg: 'state/aws/dynamodb',
|
||||
},
|
||||
'state.azure.blobstorage': {
|
||||
'state.azure.blobstorage.v2': {
|
||||
conformance: true,
|
||||
requiredSecrets: [
|
||||
'AzureBlobStorageAccount',
|
||||
'AzureBlobStorageAccessKey',
|
||||
'AzureCertificationTenantId',
|
||||
'AzureCertificationServicePrincipalClientId',
|
||||
'AzureCertificationServicePrincipalClientSecret',
|
||||
'AzureBlobStorageContainer',
|
||||
],
|
||||
sourcePkg: [
|
||||
'state/azure/blobstorage',
|
||||
'common/component/azure/blobstorage',
|
||||
],
|
||||
},
|
||||
'state.azure.blobstorage.v1': {
|
||||
conformance: true,
|
||||
requiredSecrets: [
|
||||
'AzureBlobStorageAccount',
|
||||
'AzureBlobStorageAccessKey',
|
||||
'AzureCertificationTenantId',
|
||||
'AzureCertificationServicePrincipalClientId',
|
||||
'AzureCertificationServicePrincipalClientSecret',
|
||||
'AzureBlobStorageContainer',
|
||||
],
|
||||
sourcePkg: [
|
||||
'state/azure/blobstorage',
|
||||
'common/component/azure/blobstorage',
|
||||
],
|
||||
},
|
||||
'state.azure.blobstorage': {
|
||||
certification: true,
|
||||
requiredSecrets: [
|
||||
'AzureBlobStorageAccount',
|
||||
|
@ -468,7 +566,7 @@ const components = {
|
|||
],
|
||||
sourcePkg: [
|
||||
'state/azure/blobstorage',
|
||||
'internal/component/azure/blobstorage',
|
||||
'common/component/azure/blobstorage',
|
||||
],
|
||||
},
|
||||
'state.azure.cosmosdb': {
|
||||
|
@ -532,17 +630,24 @@ const components = {
|
|||
conformanceSetup: 'conformance-state.cloudflare.workerskv-setup.sh',
|
||||
conformanceDestroy: 'conformance-state.cloudflare.workerskv-destroy.sh',
|
||||
},
|
||||
'state.cockroachdb': {
|
||||
'state.cockroachdb.v1': {
|
||||
conformance: true,
|
||||
certification: true,
|
||||
conformanceSetup: 'docker-compose.sh cockroachdb',
|
||||
sourcePkg: [
|
||||
'state/cockroachdb',
|
||||
'internal/component/postgresql',
|
||||
'internal/component/sql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/postgresql/v1',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.etcd': {
|
||||
'state.etcd.v1': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh etcd',
|
||||
},
|
||||
'state.etcd.v2': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh etcd',
|
||||
},
|
||||
|
@ -561,45 +666,118 @@ const components = {
|
|||
},
|
||||
'state.mysql': {
|
||||
certification: true,
|
||||
sourcePkg: ['state/mysql', 'internal/component/sql'],
|
||||
sourcePkg: ['state/mysql', 'common/component/sql'],
|
||||
},
|
||||
'state.mysql.mariadb': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh mariadb',
|
||||
sourcePkg: ['state/mysql', 'internal/component/sql'],
|
||||
sourcePkg: ['state/mysql', 'common/component/sql'],
|
||||
},
|
||||
'state.mysql.mysql': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh mysql',
|
||||
sourcePkg: ['state/mysql', 'internal/component/sql'],
|
||||
sourcePkg: ['state/mysql', 'common/component/sql'],
|
||||
},
|
||||
'state.oracledatabase': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh oracledatabase',
|
||||
},
|
||||
'state.postgresql': {
|
||||
conformance: true,
|
||||
'state.postgresql.v1': {
|
||||
certification: true,
|
||||
sourcePkg: [
|
||||
'state/postgresql/v1',
|
||||
'common/authentication/postgresql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/postgresql/v1',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.postgresql.v1.docker': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh postgresql',
|
||||
sourcePkg: [
|
||||
'state/postgresql',
|
||||
'internal/component/postgresql',
|
||||
'internal/component/sql',
|
||||
'state/postgresql/v1',
|
||||
'common/authentication/postgresql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/postgresql/v1',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.postgresql.v1.azure': {
|
||||
conformance: true,
|
||||
requiredSecrets: [
|
||||
'AzureDBPostgresConnectionString',
|
||||
'AzureDBPostgresClientId',
|
||||
'AzureDBPostgresClientSecret',
|
||||
'AzureDBPostgresTenantId',
|
||||
],
|
||||
sourcePkg: [
|
||||
'state/postgresql/v1',
|
||||
'common/authentication/postgresql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/postgresql/v1',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.postgresql.v2': {
|
||||
certification: true,
|
||||
sourcePkg: [
|
||||
'state/postgresql/v2',
|
||||
'common/authentication/postgresql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.postgresql.v2.docker': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh postgresql',
|
||||
sourcePkg: [
|
||||
'state/postgresql/v2',
|
||||
'common/authentication/postgresql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.postgresql.v2.azure': {
|
||||
conformance: true,
|
||||
requiredSecrets: [
|
||||
'AzureDBPostgresConnectionString',
|
||||
'AzureDBPostgresClientId',
|
||||
'AzureDBPostgresClientSecret',
|
||||
'AzureDBPostgresTenantId',
|
||||
],
|
||||
sourcePkg: [
|
||||
'state/postgresql/v2',
|
||||
'common/authentication/postgresql',
|
||||
'common/component/postgresql/interfaces',
|
||||
'common/component/postgresql/transactions',
|
||||
'common/component/sql',
|
||||
'common/component/sql/migrations',
|
||||
],
|
||||
},
|
||||
'state.redis': {
|
||||
certification: true,
|
||||
sourcePkg: ['state/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['state/redis', 'common/component/redis'],
|
||||
},
|
||||
'state.redis.v6': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redisjson redis',
|
||||
sourcePkg: ['state/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['state/redis', 'common/component/redis'],
|
||||
},
|
||||
'state.redis.v7': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh redis7 redis',
|
||||
sourcePkg: ['state/redis', 'internal/component/redis'],
|
||||
sourcePkg: ['state/redis', 'common/component/redis'],
|
||||
},
|
||||
'state.rethinkdb': {
|
||||
conformance: true,
|
||||
|
@ -608,14 +786,14 @@ const components = {
|
|||
'state.sqlite': {
|
||||
conformance: true,
|
||||
certification: true,
|
||||
sourcePkg: ['state/sqlite', 'internal/component/sql'],
|
||||
sourcePkg: ['state/sqlite', 'common/component/sql'],
|
||||
},
|
||||
'state.sqlserver': {
|
||||
conformance: true,
|
||||
certification: true,
|
||||
conformanceSetup: 'docker-compose.sh sqlserver',
|
||||
requiredSecrets: ['AzureSqlServerConnectionString'],
|
||||
sourcePkg: ['state/sqlserver', 'internal/component/sql'],
|
||||
sourcePkg: ['state/sqlserver', 'common/component/sql'],
|
||||
},
|
||||
// 'state.gcp.firestore.docker': {
|
||||
// conformance: true,
|
||||
|
@ -632,10 +810,6 @@ const components = {
|
|||
requireGCPCredentials: true,
|
||||
certificationSetup: 'certification-state.gcp.firestore-setup.sh',
|
||||
},
|
||||
'workflows.temporal': {
|
||||
conformance: true,
|
||||
conformanceSetup: 'docker-compose.sh temporal',
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -651,6 +825,7 @@ const components = {
|
|||
* @property {boolean?} requireTerraform If true, requires Terraform
|
||||
* @property {boolean?} requireKind If true, requires KinD
|
||||
* @property {string?} conformanceSetup Setup script for conformance tests
|
||||
* @property {string?} conformanceLogs Logs script for conformance tests
|
||||
* @property {string?} conformanceDestroy Destroy script for conformance tests
|
||||
* @property {string?} certificationSetup Setup script for certification tests
|
||||
* @property {string?} certificationDestroy Destroy script for certification tests
|
||||
|
@ -672,6 +847,7 @@ const components = {
|
|||
* @property {boolean?} require-kind Requires KinD
|
||||
* @property {string?} setup-script Setup script
|
||||
* @property {string?} destroy-script Destroy script
|
||||
* @property {string?} logs-script Logs script in case of failure
|
||||
* @property {string?} nodejs-version Install the specified Node.js version if set
|
||||
* @property {string?} mongodb-version Install the specified MongoDB version if set
|
||||
* @property {string?} source-pkg Source package
|
||||
|
@ -742,6 +918,7 @@ function GenerateMatrix(testKind, enableCloudTests) {
|
|||
'require-kind': comp.requireKind ? 'true' : undefined,
|
||||
'setup-script': comp[testKind + 'Setup'] || undefined,
|
||||
'destroy-script': comp[testKind + 'Destroy'] || undefined,
|
||||
'logs-script': comp[testKind + 'Logs'] || undefined,
|
||||
'nodejs-version': comp.nodeJsVersion || undefined,
|
||||
'mongodb-version': comp.mongoDbVersion || undefined,
|
||||
'source-pkg': comp.sourcePkg
|
||||
|
|
|
@ -24,8 +24,7 @@ on:
|
|||
- 'release-*'
|
||||
pull_request:
|
||||
branches:
|
||||
# TODO: REMOVE "master" BEFORE MERGING
|
||||
- 'master'
|
||||
- 'main'
|
||||
- 'release-*'
|
||||
|
||||
jobs:
|
||||
|
@ -45,7 +44,7 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
@ -98,12 +97,12 @@ jobs:
|
|||
run:
|
||||
shell: bash
|
||||
|
||||
needs:
|
||||
needs:
|
||||
- generate-matrix
|
||||
|
||||
strategy:
|
||||
fail-fast: false # Keep running even if one component fails
|
||||
matrix:
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.generate-matrix.outputs.test-matrix) }}
|
||||
|
||||
steps:
|
||||
|
@ -121,7 +120,7 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
@ -225,9 +224,10 @@ jobs:
|
|||
|
||||
- name: Set up Go
|
||||
id: setup-go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: 'false'
|
||||
|
||||
- name: Download Go dependencies
|
||||
working-directory: ${{ env.TEST_PATH }}
|
||||
|
@ -254,12 +254,12 @@ jobs:
|
|||
AWS_REGION: "${{ env.AWS_REGION }}"
|
||||
run: |
|
||||
echo "Running certification tests for ${{ matrix.component }} ... "
|
||||
echo "Source Pacakge: " ${{ matrix.source-pkg }}
|
||||
echo "Source Package: " ${{ matrix.source-pkg }}
|
||||
export GOLANG_PROTOBUF_REGISTRATION_CONFLICT=ignore
|
||||
set +e
|
||||
gotestsum --jsonfile ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.json \
|
||||
--junitfile ${{ env.TEST_OUTPUT_FILE_PREFIX }}_certification.xml --format standard-quiet -- \
|
||||
-coverprofile=cover.out -covermode=set -tags=certtests -coverpkg=${{ matrix.source-pkg }}
|
||||
-coverprofile=cover.out -covermode=set -tags=certtests,unit -timeout=30m -coverpkg=${{ matrix.source-pkg }}
|
||||
status=$?
|
||||
echo "Completed certification tests for ${{ matrix.component }} ... "
|
||||
if test $status -ne 0; then
|
||||
|
@ -292,10 +292,10 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Upload Cert Coverage Report File
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
name: cert_code_cov
|
||||
name: ${{ matrix.component }}_cert_code_cov
|
||||
path: ${{ env.TEST_PATH }}/tmp/cert_code_cov_files
|
||||
retention-days: 7
|
||||
|
||||
|
@ -311,10 +311,10 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Upload result files
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: result_files
|
||||
name: ${{ matrix.component }}_result_files
|
||||
path: tmp/result_files
|
||||
retention-days: 1
|
||||
|
||||
|
@ -334,7 +334,7 @@ jobs:
|
|||
name: Post-completion
|
||||
runs-on: ubuntu-22.04
|
||||
if: always()
|
||||
needs:
|
||||
needs:
|
||||
- certification
|
||||
- generate-matrix
|
||||
steps:
|
||||
|
@ -349,11 +349,11 @@ jobs:
|
|||
|
||||
- name: Download test result artifact
|
||||
if: always() && env.PR_NUMBER != ''
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
continue-on-error: true
|
||||
id: testresults
|
||||
with:
|
||||
name: result_files
|
||||
# name: not being specified which will result in all artifacts being downloaded
|
||||
path: tmp/result_files
|
||||
|
||||
- name: Build message
|
||||
|
|
|
@ -65,7 +65,7 @@ jobs:
|
|||
GOOS: ${{ matrix.target_os }}
|
||||
GOARCH: ${{ matrix.target_arch }}
|
||||
GOPROXY: https://proxy.golang.org
|
||||
GOLANGCI_LINT_VER: "v1.51.2"
|
||||
GOLANGCI_LINT_VER: "v1.64.6"
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
@ -97,16 +97,17 @@ jobs:
|
|||
fi
|
||||
- name: Check out code into the Go module directory
|
||||
if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
- name: Set up Go
|
||||
id: setup-go
|
||||
if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: 'false'
|
||||
- name: Cache Go modules (Linux)
|
||||
if: matrix.target_os == 'linux'
|
||||
uses: actions/cache@v3
|
||||
|
@ -142,10 +143,11 @@ jobs:
|
|||
run: make check-component-metadata-schema-diff
|
||||
- name: Run golangci-lint
|
||||
if: matrix.target_arch == 'amd64' && matrix.target_os == 'linux' && steps.skip_check.outputs.should_skip != 'true'
|
||||
uses: golangci/golangci-lint-action@v3.2.0
|
||||
uses: golangci/golangci-lint-action@v6.0.1
|
||||
with:
|
||||
version: ${{ env.GOLANGCI_LINT_VER }}
|
||||
skip-cache: true
|
||||
only-new-issues: true
|
||||
args: --timeout 15m
|
||||
- name: Run go mod tidy check diff
|
||||
if: matrix.target_arch == 'amd64' && matrix.target_os == 'linux' && steps.skip_check.outputs.should_skip != 'true'
|
||||
|
|
|
@ -17,12 +17,12 @@ on:
|
|||
push:
|
||||
branches:
|
||||
- feature/*
|
||||
- gh-readonly-queue/master/*
|
||||
- gh-readonly-queue/main/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- feature/*
|
||||
- gh-readonly-queue/master/*
|
||||
- gh-readonly-queue/main/*
|
||||
merge_group:
|
||||
|
||||
jobs:
|
||||
|
@ -33,17 +33,18 @@ jobs:
|
|||
GOOS: linux
|
||||
GOARCH: amd64
|
||||
GOPROXY: https://proxy.golang.org
|
||||
GOLANGCI_LINT_VER: "v1.51.2"
|
||||
GOLANGCI_LINT_VER: "v1.64.6"
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Go
|
||||
id: setup-go
|
||||
if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: 'false'
|
||||
- name: Cache Go modules (Linux)
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
|
@ -61,10 +62,11 @@ jobs:
|
|||
run: make check-component-metadata
|
||||
- name: Run golangci-lint
|
||||
if: steps.skip_check.outputs.should_skip != 'true'
|
||||
uses: golangci/golangci-lint-action@v3.4.0
|
||||
uses: golangci/golangci-lint-action@v6.0.1
|
||||
with:
|
||||
version: ${{ env.GOLANGCI_LINT_VER }}
|
||||
skip-cache: true
|
||||
only-new-issues: true
|
||||
args: --timeout 15m
|
||||
- name: Run go mod tidy check diff
|
||||
if: steps.skip_check.outputs.should_skip != 'true'
|
||||
|
|
|
@ -22,12 +22,12 @@ on:
|
|||
push:
|
||||
branches:
|
||||
- 'release-*'
|
||||
- 'gh-readonly-queue/master/*'
|
||||
- 'gh-readonly-queue/main/*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'main'
|
||||
- 'release-*'
|
||||
- 'gh-readonly-queue/master/*'
|
||||
- 'gh-readonly-queue/main/*'
|
||||
merge_group:
|
||||
|
||||
jobs:
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
@ -100,12 +100,12 @@ jobs:
|
|||
run:
|
||||
shell: bash
|
||||
|
||||
needs:
|
||||
needs:
|
||||
- generate-matrix
|
||||
|
||||
strategy:
|
||||
fail-fast: false # Keep running even if one component fails
|
||||
matrix:
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.generate-matrix.outputs.test-matrix) }}
|
||||
|
||||
steps:
|
||||
|
@ -126,7 +126,7 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
@ -142,8 +142,6 @@ jobs:
|
|||
- name: Configure conformance test and source path
|
||||
run: |
|
||||
TEST_COMPONENT=$(echo ${{ matrix.component }} | sed -E 's/\./\//g')
|
||||
export TEST_PATH="tests/certification/${TEST_COMPONENT}"
|
||||
echo "TEST_PATH=$TEST_PATH" >> $GITHUB_ENV
|
||||
export SOURCE_PATH="github.com/dapr/components-contrib/${TEST_COMPONENT}"
|
||||
echo "SOURCE_PATH=$SOURCE_PATH" >> $GITHUB_ENV
|
||||
# converts slashes to dots in this string, so that it doesn't consider them sub-folders
|
||||
|
@ -236,9 +234,10 @@ jobs:
|
|||
mongodb-replica-set: test-rs
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: 'false'
|
||||
|
||||
- name: Install Node.js ${{ matrix.nodejs-version }}
|
||||
if: matrix.nodejs-version != ''
|
||||
|
@ -268,7 +267,7 @@ jobs:
|
|||
- name: Run tests
|
||||
continue-on-error: true
|
||||
run: |
|
||||
set -e
|
||||
set -e
|
||||
KIND=$(echo ${{ matrix.component }} | cut -d. -f1)
|
||||
NAME=$(echo ${{ matrix.component }} | cut -d. -f2-)
|
||||
KIND_UPPER="$(tr '[:lower:]' '[:upper:]' <<< ${KIND:0:1})${KIND:1}"
|
||||
|
@ -278,7 +277,7 @@ jobs:
|
|||
fi
|
||||
|
||||
echo "Running tests for Test${KIND_UPPER}Conformance/${KIND}/${NAME} ... "
|
||||
echo "Source Pacakge: " ${{ matrix.source-pkg }}
|
||||
echo "Source Package: " ${{ matrix.source-pkg }}
|
||||
|
||||
set +e
|
||||
gotestsum --jsonfile ${{ env.TEST_OUTPUT_FILE_PREFIX }}_conformance.json \
|
||||
|
@ -318,6 +317,10 @@ jobs:
|
|||
exit 1
|
||||
fi
|
||||
|
||||
- name: Retrieve infrastructure failure logs
|
||||
if: failure() && matrix.logs-script != ''
|
||||
run: .github/scripts/components-scripts/${{ matrix.logs-script }}
|
||||
|
||||
- name: Prepare test result info
|
||||
if: always()
|
||||
run: |
|
||||
|
@ -330,31 +333,31 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Upload result files
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: result_files
|
||||
name: ${{ matrix.component }}_result_files
|
||||
path: tmp/result_files
|
||||
retention-days: 1
|
||||
|
||||
|
||||
- name: Prepare coverage report file to upload
|
||||
if: github.event_name == 'schedule'
|
||||
run: |
|
||||
mkdir -p tmp/conf_code_cov
|
||||
cp cover.out tmp/conf_code_cov/${{ env.SOURCE_PATH_LINEAR }}.out
|
||||
|
||||
|
||||
- name: Upload coverage report file
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
name: conf_code_cov
|
||||
name: ${{ matrix.component }}_conf_code_cov
|
||||
path: tmp/conf_code_cov
|
||||
retention-days: 7
|
||||
|
||||
# Upload logs for test analytics to consume
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.component }}_conformance_test
|
||||
path: ${{ env.TEST_OUTPUT_FILE_PREFIX }}_conformance.*
|
||||
|
@ -382,11 +385,11 @@ jobs:
|
|||
|
||||
- name: Download test result artifact
|
||||
if: always() && env.PR_NUMBER != ''
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
continue-on-error: true
|
||||
id: testresults
|
||||
with:
|
||||
name: result_files
|
||||
# name: not being specified which will result in all artifacts being downloaded
|
||||
path: tmp/result_files
|
||||
|
||||
- name: Build message
|
||||
|
|
|
@ -30,15 +30,16 @@ jobs:
|
|||
GOCOVMERGE_VER: "b5bfa59"
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: 'false'
|
||||
|
||||
- name: Setup go dependencies
|
||||
run: |
|
||||
|
@ -51,7 +52,7 @@ jobs:
|
|||
with:
|
||||
workflow: certification.yml
|
||||
workflow_conclusion: "success"
|
||||
branch: master
|
||||
branch: main
|
||||
event: schedule
|
||||
if_no_artifact_found: error
|
||||
name: cert_code_cov
|
||||
|
@ -62,7 +63,7 @@ jobs:
|
|||
with:
|
||||
workflow: conformance.yml
|
||||
workflow_conclusion: "success"
|
||||
branch: master
|
||||
branch: main
|
||||
event: schedule
|
||||
if_no_artifact_found: error
|
||||
name: conf_code_cov
|
||||
|
@ -116,4 +117,4 @@ jobs:
|
|||
env:
|
||||
SERVER_URL: ${{ secrets.DISCORD_MONITORING_WEBHOOK_URL }}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install dependencies
|
||||
run: pip install PyGithub
|
||||
- name: Automerge and update
|
||||
|
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3 # required to make the script available for next step
|
||||
uses: actions/checkout@v4 # required to make the script available for next step
|
||||
- name: Issue analyzer
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
|
|
|
@ -15,13 +15,13 @@ name: fossa
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- release-*
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- release-*
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
|
@ -32,7 +32,7 @@ jobs:
|
|||
FOSSA_API_KEY: b88e1f4287c3108c8751bf106fb46db6 # This is a push-only token that is safe to be exposed.
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Run FOSSA Scan"
|
||||
uses: fossas/fossa-action@v1.3.1 # Use a specific version if locking is preferred
|
||||
|
|
|
@ -10,11 +10,16 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: 'false'
|
||||
- name: Build component-metadata-bundle.json
|
||||
run: make bundle-component-metadata
|
||||
- name: Upload component-metadata-bundle.json
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
files: component-metadata-bundle.json
|
||||
files: component-metadata-bundle.json
|
108
.golangci.yml
108
.golangci.yml
|
@ -15,13 +15,15 @@ run:
|
|||
# list of build tags, all linters use it. Default is empty list.
|
||||
build-tags:
|
||||
- certtests
|
||||
- conftests
|
||||
- metadata
|
||||
|
||||
# which dirs to skip: they won't be analyzed;
|
||||
# can use regexp here: generated.*, regexp is applied on full path;
|
||||
# default value is empty list, but next dirs are always skipped independently
|
||||
# from this option's value:
|
||||
# vendor$, third_party$, testdata$, examples$, Godeps$, builtin$
|
||||
skip-dirs:
|
||||
issues.exclude-dirs:
|
||||
- ^vendor$
|
||||
|
||||
# which files to skip: they will be analyzed, but issues from them
|
||||
|
@ -35,7 +37,7 @@ run:
|
|||
# output configuration options
|
||||
output:
|
||||
# colored-line-number|line-number|json|tab|checkstyle, default is "colored-line-number"
|
||||
format: tab
|
||||
formats: tab
|
||||
|
||||
# print lines of code with issue, default is true
|
||||
print-issued-lines: true
|
||||
|
@ -58,7 +60,7 @@ linters-settings:
|
|||
# [deprecated] comma-separated list of pairs of the form pkg:regex
|
||||
# the regex is used to ignore names within pkg. (default "fmt:.*").
|
||||
# see https://github.com/kisielk/errcheck#the-deprecated-method for details
|
||||
ignore: fmt:.*,io/ioutil:^Read.*
|
||||
exclude-functions: fmt:.*,io/ioutil:^Read.*
|
||||
|
||||
# path to a file containing a list of functions to exclude from checking
|
||||
# see https://github.com/kisielk/errcheck#excluding-functions for details
|
||||
|
@ -69,9 +71,6 @@ linters-settings:
|
|||
statements: 40
|
||||
|
||||
govet:
|
||||
# report about shadowed variables
|
||||
check-shadowing: true
|
||||
|
||||
# settings per analyzer
|
||||
settings:
|
||||
printf: # analyzer name, run `go tool vet help` to see all analyzers
|
||||
|
@ -84,6 +83,7 @@ linters-settings:
|
|||
# enable or disable analyzers by name
|
||||
enable:
|
||||
- atomicalign
|
||||
- shadow
|
||||
enable-all: false
|
||||
disable:
|
||||
- shadow
|
||||
|
@ -104,9 +104,6 @@ linters-settings:
|
|||
gocognit:
|
||||
# minimal code complexity to report, 30 by default (but we recommend 10-20)
|
||||
min-complexity: 10
|
||||
maligned:
|
||||
# print struct with more effective memory layout or not, false by default
|
||||
suggest-new: true
|
||||
dupl:
|
||||
# tokens count to trigger issue, 150 by default
|
||||
threshold: 100
|
||||
|
@ -116,22 +113,59 @@ linters-settings:
|
|||
# minimal occurrences count to trigger, 3 by default
|
||||
min-occurrences: 5
|
||||
depguard:
|
||||
list-type: denylist
|
||||
include-go-root: false
|
||||
packages-with-error-message:
|
||||
- "go.uber.org/atomic": "must use sync/atomic"
|
||||
- "golang.org/x/net/context": "must use context"
|
||||
- "github.com/pkg/errors": "must use standard library (errors package and/or fmt.Errorf)"
|
||||
- "github.com/Sirupsen/logrus": "must use github.com/dapr/kit/logger"
|
||||
- "github.com/agrea/ptr": "must use github.com/dapr/kit/ptr"
|
||||
- "github.com/cenkalti/backoff$": "must use github.com/cenkalti/backoff/v4"
|
||||
- "github.com/cenkalti/backoff/v2": "must use github.com/cenkalti/backoff/v4"
|
||||
- "github.com/cenkalti/backoff/v3": "must use github.com/cenkalti/backoff/v4"
|
||||
- "github.com/dgrijalva/jwt-go": "must use github.com/lestrrat-go/jwx/v2"
|
||||
- "github.com/golang-jwt/jwt$": "must use github.com/lestrrat-go/jwx/v2"
|
||||
- "github.com/golang-jwt/jwt/v4": "must use github.com/lestrrat-go/jwx/v2"
|
||||
- "github.com/lestrrat-go/jwx/jwa": "must use github.com/lestrrat-go/jwx/v2"
|
||||
- "github.com/lestrrat-go/jwx/jwt": "must use github.com/lestrrat-go/jwx/v2"
|
||||
rules:
|
||||
main:
|
||||
deny:
|
||||
- pkg: "github.com/golang-jwt/jwt/v5"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2/jwt"
|
||||
- pkg: "github.com/golang-jwt/jwt/v4"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2/jwt"
|
||||
- pkg: "github.com/Sirupsen/logrus"
|
||||
desc: "must use github.com/dapr/kit/logger"
|
||||
- pkg: "github.com/agrea/ptr"
|
||||
desc: "must use github.com/dapr/kit/ptr"
|
||||
- pkg: "go.uber.org/atomic"
|
||||
desc: "must use sync/atomic"
|
||||
- pkg: "golang.org/x/net/context"
|
||||
desc: "must use context"
|
||||
- pkg: "github.com/pkg/errors"
|
||||
desc: "must use standard library (errors package and/or fmt.Errorf)"
|
||||
- pkg: "golang.org/x/exp/slices"
|
||||
desc: "must use slices from standard library"
|
||||
- pkg: "github.com/go-chi/chi$"
|
||||
desc: "must use github.com/go-chi/chi/v5"
|
||||
- pkg: "github.com/cenkalti/backoff$"
|
||||
desc: "must use github.com/cenkalti/backoff/v4"
|
||||
- pkg: "github.com/cenkalti/backoff/v2"
|
||||
desc: "must use github.com/cenkalti/backoff/v4"
|
||||
- pkg: "github.com/cenkalti/backoff/v3"
|
||||
desc: "must use github.com/cenkalti/backoff/v4"
|
||||
- pkg: "github.com/benbjohnson/clock"
|
||||
desc: "must use k8s.io/utils/clock"
|
||||
- pkg: "github.com/ghodss/yaml"
|
||||
desc: "must use sigs.k8s.io/yaml"
|
||||
- pkg: "gopkg.in/yaml.v2"
|
||||
desc: "must use gopkg.in/yaml.v3"
|
||||
- pkg: "github.com/golang-jwt/jwt"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2"
|
||||
- pkg: "github.com/golang-jwt/jwt/v2"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2"
|
||||
- pkg: "github.com/golang-jwt/jwt/v3"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2"
|
||||
- pkg: "github.com/golang-jwt/jwt/v4"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2"
|
||||
- pkg: "github.com/gogo/status"
|
||||
desc: "must use google.golang.org/grpc/status"
|
||||
- pkg: "github.com/gogo/protobuf"
|
||||
desc: "must use google.golang.org/protobuf"
|
||||
- pkg: "github.com/lestrrat-go/jwx/jwa"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2"
|
||||
- pkg: "github.com/lestrrat-go/jwx/jwt"
|
||||
desc: "must use github.com/lestrrat-go/jwx/v2"
|
||||
- pkg: "github.com/labstack/gommon/log"
|
||||
desc: "must use github.com/dapr/kit/logger"
|
||||
- pkg: "github.com/gobuffalo/logger"
|
||||
desc: "must use github.com/dapr/kit/logger"
|
||||
misspell:
|
||||
# Correct spellings using locale preferences for US or UK.
|
||||
# Default is to use a neutral variety of English.
|
||||
|
@ -227,6 +261,11 @@ linters-settings:
|
|||
allow-case-traling-whitespace: true
|
||||
# Allow declarations (var) to be cuddled.
|
||||
allow-cuddle-declarations: false
|
||||
testifylint:
|
||||
disable:
|
||||
- float-compare
|
||||
- negative-positive
|
||||
- go-require
|
||||
|
||||
linters:
|
||||
fast: false
|
||||
|
@ -242,28 +281,23 @@ linters:
|
|||
- gocyclo
|
||||
- gocognit
|
||||
- godox
|
||||
- interfacer
|
||||
- lll
|
||||
- maligned
|
||||
- scopelint
|
||||
- unparam
|
||||
- wsl
|
||||
- gomnd
|
||||
- mnd
|
||||
- testpackage
|
||||
- goerr113
|
||||
- err113
|
||||
- nestif
|
||||
- nlreturn
|
||||
- exhaustive
|
||||
- exhaustruct
|
||||
- noctx
|
||||
- gci
|
||||
- golint
|
||||
- tparallel
|
||||
- paralleltest
|
||||
- wrapcheck
|
||||
- tagliatelle
|
||||
- ireturn
|
||||
- exhaustivestruct
|
||||
- errchkjson
|
||||
- contextcheck
|
||||
- gomoddirectives
|
||||
|
@ -272,7 +306,6 @@ linters:
|
|||
- varnamelen
|
||||
- errorlint
|
||||
- forcetypeassert
|
||||
- ifshort
|
||||
- maintidx
|
||||
- nilnil
|
||||
- predeclared
|
||||
|
@ -285,7 +318,8 @@ linters:
|
|||
- asasalint
|
||||
- rowserrcheck
|
||||
- sqlclosecheck
|
||||
- structcheck
|
||||
- deadcode
|
||||
- nosnakecase
|
||||
- varcheck
|
||||
- goconst
|
||||
- tagalign
|
||||
- inamedparam
|
||||
- canonicalheader
|
||||
- fatcontext
|
||||
|
|
|
@ -1 +1 @@
|
|||
internal/component/cloudflare/workers/code/
|
||||
common/component/cloudflare/workers/code/
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
Thank you for your interest in Dapr!
|
||||
|
||||
This project welcomes contributions and suggestions. Most contributions require you to signoff on your commits via
|
||||
the Developer Certificate of Origin (DCO). When you submit a pull request, a DCO-bot will automatically determine
|
||||
whether you need to provide signoff for your commit. Please follow the instructions provided by DCO-bot, as pull
|
||||
This project welcomes contributions and suggestions. Most contributions require you to signoff on your commits via
|
||||
the Developer Certificate of Origin (DCO). When you submit a pull request, a DCO-bot will automatically determine
|
||||
whether you need to provide signoff for your commit. Please follow the instructions provided by DCO-bot, as pull
|
||||
requests cannot be merged until the author(s) have provided signoff to fulfill the DCO requirement.
|
||||
You may find more information on the DCO requirements [below](#developer-certificate-of-origin-signing-your-work).
|
||||
|
||||
|
@ -64,7 +64,7 @@ All contributions come through pull requests. To submit a proposed change, we re
|
|||
|
||||
#### Use work-in-progress PRs for early feedback
|
||||
|
||||
A good way to communicate before investing too much time is to create a "Work-in-progress" PR and share it with your reviewers. The standard way of doing this is to add a "[WIP]" prefix in your PR's title and assign the **do-not-merge** label. This will let people looking at your PR know that it is not well baked yet.
|
||||
A good way to communicate before investing too much time is to create a "Work-in-progress" PR and share it with your reviewers. The standard way of doing this is to open your PR as a draft, add a "[WIP]" prefix in your PR's title, and assign the **do-not-merge** label. This will let people looking at your PR know that it is not well baked yet.
|
||||
|
||||
### Developer Certificate of Origin: Signing your work
|
||||
|
||||
|
|
22
Makefile
22
Makefile
|
@ -65,7 +65,7 @@ export GH_LINT_VERSION := $(shell grep 'GOLANGCI_LINT_VER:' .github/workflows/co
|
|||
ifeq (,$(LINTER_BINARY))
|
||||
INSTALLED_LINT_VERSION := "v0.0.0"
|
||||
else
|
||||
INSTALLED_LINT_VERSION=v$(shell $(LINTER_BINARY) version | grep -Eo '([0-9]+\.)+[0-9]+' - || "")
|
||||
INSTALLED_LINT_VERSION=v$(shell $(LINTER_BINARY) version | grep -Eo '([0-9]+\.)+[0-9]+' - | head -1 || "")
|
||||
endif
|
||||
|
||||
# Build tools
|
||||
|
@ -100,6 +100,7 @@ verify-linter-version:
|
|||
echo "[!] Yours: $(INSTALLED_LINT_VERSION)"; \
|
||||
echo "[!] Theirs: $(GH_LINT_VERSION)"; \
|
||||
echo "[!] Upgrade: curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin $(GH_LINT_VERSION)"; \
|
||||
GOLANGCI_LINT=$(go env GOPATH)/bin/$(GOLANGCI_LINT) \
|
||||
sleep 3; \
|
||||
fi;
|
||||
|
||||
|
@ -109,15 +110,19 @@ verify-linter-version:
|
|||
################################################################################
|
||||
.PHONY: test
|
||||
test:
|
||||
CGO_ENABLED=$(CGO) go test ./... $(COVERAGE_OPTS) $(BUILDMODE) --timeout=15m
|
||||
CGO_ENABLED=$(CGO) go test ./... $(COVERAGE_OPTS) $(BUILDMODE) -tags metadata --timeout=15m
|
||||
|
||||
################################################################################
|
||||
# Target: lint #
|
||||
################################################################################
|
||||
.PHONY: lint
|
||||
lint: verify-linter-installed verify-linter-version
|
||||
# Due to https://github.com/golangci/golangci-lint/issues/580, we need to add --fix for windows
|
||||
$(GOLANGCI_LINT) run --timeout=20m
|
||||
ifdef LINT_BASE
|
||||
@echo "LINT_BASE is set to "$(LINT_BASE)". Linter will only check diff."
|
||||
$(GOLANGCI_LINT) run --timeout=20m --max-same-issues 0 --max-issues-per-linter 0 --new-from-rev $(shell git rev-parse $(LINT_BASE))
|
||||
else
|
||||
$(GOLANGCI_LINT) run --timeout=20m --max-same-issues 0 --max-issues-per-linter 0
|
||||
endif
|
||||
|
||||
################################################################################
|
||||
# Target: modtidy-all #
|
||||
|
@ -225,10 +230,10 @@ check-component-metadata:
|
|||
$(RUN_BUILD_TOOLS) generate-metadata-analyzer-app --outputfile ./metadataanalyzer/main.go
|
||||
cd metadataanalyzer && \
|
||||
go mod init metadataanalyzer && \
|
||||
go get "github.com/dapr/components-contrib@master" && \
|
||||
go get "github.com/dapr/components-contrib@main" && \
|
||||
go mod edit -replace "github.com/dapr/components-contrib"="../" && \
|
||||
go mod tidy && \
|
||||
go build . && \
|
||||
go build -tags metadata . && \
|
||||
rm ./go.mod && rm ./go.sum && rm ./main.go && \
|
||||
./metadataanalyzer ../
|
||||
|
||||
|
@ -251,8 +256,3 @@ prettier-format:
|
|||
.PHONY: conf-tests
|
||||
conf-tests:
|
||||
CGO_ENABLED=$(CGO) go test -v -tags=conftests -count=1 ./tests/conformance
|
||||
|
||||
################################################################################
|
||||
# Target: e2e #
|
||||
################################################################################
|
||||
include tests/e2e/e2e_tests.mk
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Components Contrib
|
||||
|
||||
[](https://github.com/dapr/components-contrib/actions/workflows/components-contrib-all.yml)
|
||||
[](https://github.com/dapr/components-contrib/actions/workflows/components-contrib-all.yml)
|
||||
[](https://discord.com/channels/778680217417809931/781589820128493598)
|
||||
[](https://github.com/dapr/components-contrib/blob/master/LICENSE)
|
||||
[](https://github.com/dapr/components-contrib/blob/main/LICENSE)
|
||||
[](https://app.fossa.com/projects/custom%2B162%2Fgithub.com%2Fdapr%2Fcomponents-contrib?ref=badge_shield)
|
||||
|
||||
The purpose of Components Contrib is to provide open, community-driven, reusable components for building distributed applications.
|
||||
|
@ -19,7 +19,7 @@ Available component types:
|
|||
* [Name resolvers](nameresolution/README.md)
|
||||
* [Configuration stores](configuration/README.md)
|
||||
* [Middlewares](middleware/README.md)
|
||||
* [Workflow services](workflow/README.md)
|
||||
* [Workflow services](workflows/README.md)
|
||||
|
||||
For documentation on how components are being used in Dapr in a language/platform agnostic way, visit [Dapr Docs](https://docs.dapr.io).
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ package webhook
|
|||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
type Settings struct {
|
||||
|
@ -30,7 +30,7 @@ type Settings struct {
|
|||
}
|
||||
|
||||
func (s *Settings) Decode(in interface{}) error {
|
||||
return metadata.DecodeMetadata(in, s)
|
||||
return kitmd.DecodeMetadata(in, s)
|
||||
}
|
||||
|
||||
func (s *Settings) Validate() error {
|
||||
|
|
|
@ -207,11 +207,10 @@ func (t *DingTalkWebhook) sendMessage(ctx context.Context, req *bindings.InvokeR
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (t *DingTalkWebhook) GetComponentMetadata() map[string]string {
|
||||
func (t *DingTalkWebhook) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := Settings{}
|
||||
metadataInfo := map[string]string{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func getPostURL(urlPath, secret string) (string, error) {
|
||||
|
|
|
@ -45,7 +45,7 @@ func TestPublishMsg(t *testing.T) { //nolint:paralleltest
|
|||
}
|
||||
|
||||
body, err := io.ReadAll(r.Body)
|
||||
require.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, msg, string(body))
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
@ -57,11 +57,11 @@ func TestPublishMsg(t *testing.T) { //nolint:paralleltest
|
|||
}}}
|
||||
|
||||
d := NewDingTalkWebhook(logger.NewLogger("test"))
|
||||
err := d.Init(context.Background(), m)
|
||||
err := d.Init(t.Context(), m)
|
||||
require.NoError(t, err)
|
||||
|
||||
req := &bindings.InvokeRequest{Data: []byte(msg), Operation: bindings.CreateOperation, Metadata: map[string]string{}}
|
||||
_, err = d.Invoke(context.Background(), req)
|
||||
_, err = d.Invoke(t.Context(), req)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
@ -78,8 +78,8 @@ func TestBindingReadAndInvoke(t *testing.T) { //nolint:paralleltest
|
|||
}}
|
||||
|
||||
d := NewDingTalkWebhook(logger.NewLogger("test"))
|
||||
err := d.Init(context.Background(), m)
|
||||
assert.NoError(t, err)
|
||||
err := d.Init(t.Context(), m)
|
||||
require.NoError(t, err)
|
||||
|
||||
var count int32
|
||||
ch := make(chan bool, 1)
|
||||
|
@ -92,16 +92,16 @@ func TestBindingReadAndInvoke(t *testing.T) { //nolint:paralleltest
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
err = d.Read(context.Background(), handler)
|
||||
err = d.Read(t.Context(), handler)
|
||||
require.NoError(t, err)
|
||||
|
||||
req := &bindings.InvokeRequest{Data: []byte(msg), Operation: bindings.GetOperation, Metadata: map[string]string{}}
|
||||
_, err = d.Invoke(context.Background(), req)
|
||||
_, err = d.Invoke(t.Context(), req)
|
||||
require.NoError(t, err)
|
||||
|
||||
select {
|
||||
case <-ch:
|
||||
require.True(t, atomic.LoadInt32(&count) > 0)
|
||||
require.Greater(t, atomic.LoadInt32(&count), int32(0))
|
||||
case <-time.After(time.Second):
|
||||
require.FailNow(t, "read timeout")
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ func TestBindingClose(t *testing.T) {
|
|||
"id": "x",
|
||||
},
|
||||
}}
|
||||
assert.NoError(t, d.Init(context.Background(), m))
|
||||
assert.NoError(t, d.Close())
|
||||
assert.NoError(t, d.Close(), "second close should not error")
|
||||
require.NoError(t, d.Init(t.Context(), m))
|
||||
require.NoError(t, d.Close())
|
||||
require.NoError(t, d.Close(), "second close should not error")
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import (
|
|||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
// AliCloudOSS is a binding for an AliCloud OSS storage bucket.
|
||||
|
@ -90,7 +91,7 @@ func (s *AliCloudOSS) Invoke(_ context.Context, req *bindings.InvokeRequest) (*b
|
|||
|
||||
func (s *AliCloudOSS) parseMetadata(meta bindings.Metadata) (*ossMetadata, error) {
|
||||
var m ossMetadata
|
||||
err := metadata.DecodeMetadata(meta.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(meta.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -108,9 +109,12 @@ func (s *AliCloudOSS) getClient(metadata *ossMetadata) (*oss.Client, error) {
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (s *AliCloudOSS) GetComponentMetadata() map[string]string {
|
||||
func (s *AliCloudOSS) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := ossMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (s *AliCloudOSS) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
)
|
||||
|
@ -26,7 +27,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
m.Properties = map[string]string{"AccessKey": "key", "Endpoint": "endpoint", "AccessKeyID": "accessKeyID", "Bucket": "test"}
|
||||
aliCloudOSS := AliCloudOSS{}
|
||||
meta, err := aliCloudOSS.parseMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", meta.AccessKey)
|
||||
assert.Equal(t, "endpoint", meta.Endpoint)
|
||||
assert.Equal(t, "accessKeyID", meta.AccessKeyID)
|
||||
|
|
|
@ -3,7 +3,7 @@ package sls
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"errors"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
|
@ -13,6 +13,7 @@ import (
|
|||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
type AliCloudSlsLogstorage struct {
|
||||
|
@ -60,16 +61,16 @@ func NewAliCloudSlsLogstorage(logger logger.Logger) bindings.OutputBinding {
|
|||
func (s *AliCloudSlsLogstorage) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
|
||||
// verify the metadata property
|
||||
if logProject := req.Metadata["project"]; logProject == "" {
|
||||
return nil, fmt.Errorf("SLS binding error: project property not supplied")
|
||||
return nil, errors.New("SLS binding error: project property not supplied")
|
||||
}
|
||||
if logstore := req.Metadata["logstore"]; logstore == "" {
|
||||
return nil, fmt.Errorf("SLS binding error: logstore property not supplied")
|
||||
return nil, errors.New("SLS binding error: logstore property not supplied")
|
||||
}
|
||||
if topic := req.Metadata["topic"]; topic == "" {
|
||||
return nil, fmt.Errorf("SLS binding error: topic property not supplied")
|
||||
return nil, errors.New("SLS binding error: topic property not supplied")
|
||||
}
|
||||
if source := req.Metadata["source"]; source == "" {
|
||||
return nil, fmt.Errorf("SLS binding error: source property not supplied")
|
||||
return nil, errors.New("SLS binding error: source property not supplied")
|
||||
}
|
||||
|
||||
log, err := s.parseLog(req)
|
||||
|
@ -95,12 +96,13 @@ func (s *AliCloudSlsLogstorage) parseLog(req *bindings.InvokeRequest) (*sls.Log,
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//nolint:gosec
|
||||
return producer.GenerateLog(uint32(time.Now().Unix()), logInfo), nil
|
||||
}
|
||||
|
||||
func (s *AliCloudSlsLogstorage) parseMeta(meta bindings.Metadata) (*SlsLogstorageMetadata, error) {
|
||||
var m SlsLogstorageMetadata
|
||||
err := metadata.DecodeMetadata(meta.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(meta.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -128,9 +130,16 @@ func (callback *Callback) Fail(result *producer.Result) {
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (s *AliCloudSlsLogstorage) GetComponentMetadata() map[string]string {
|
||||
func (s *AliCloudSlsLogstorage) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := SlsLogstorageMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (s *AliCloudSlsLogstorage) Close() error {
|
||||
if s.producer != nil {
|
||||
return s.producer.Close(time.Second.Milliseconds() * 5)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
)
|
||||
|
@ -23,7 +24,7 @@ func TestSlsLogstorageMetadata(t *testing.T) {
|
|||
|
||||
meta, err := aliCloudSlsLogstorage.parseMeta(m)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "ENDPOINT", meta.Endpoint)
|
||||
assert.Equal(t, "ACCESSKEYID", meta.AccessKeyID)
|
||||
assert.Equal(t, "ACCESSKEYSECRET", meta.AccessKeySecret)
|
||||
|
|
|
@ -27,6 +27,7 @@ import (
|
|||
"github.com/dapr/components-contrib/bindings"
|
||||
contribMetadata "github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -123,7 +124,7 @@ func (s *AliCloudTableStore) Operations() []bindings.OperationKind {
|
|||
|
||||
func (s *AliCloudTableStore) parseMetadata(metadata bindings.Metadata) (*tablestoreMetadata, error) {
|
||||
m := tablestoreMetadata{}
|
||||
err := contribMetadata.DecodeMetadata(metadata.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(metadata.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -270,7 +271,6 @@ func (s *AliCloudTableStore) create(req *bindings.InvokeRequest, resp *bindings.
|
|||
}
|
||||
|
||||
_, err = s.client.PutRow(putRequest)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -301,7 +301,6 @@ func (s *AliCloudTableStore) delete(req *bindings.InvokeRequest, resp *bindings.
|
|||
change.SetCondition(tablestore.RowExistenceExpectation_IGNORE) //nolint:nosnakecase
|
||||
deleteReq := &tablestore.DeleteRowRequest{DeleteRowChange: change}
|
||||
_, err = s.client.DeleteRow(deleteReq)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -347,9 +346,12 @@ func contains(arr []string, str string) bool {
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (s *AliCloudTableStore) GetComponentMetadata() map[string]string {
|
||||
func (s *AliCloudTableStore) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := tablestoreMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (s *AliCloudTableStore) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -14,12 +14,12 @@ limitations under the License.
|
|||
package tablestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
|
@ -33,7 +33,7 @@ func TestTableStoreMetadata(t *testing.T) {
|
|||
|
||||
meta, err := aliCloudTableStore.parseMetadata(m)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "ACCESSKEYID", meta.AccessKeyID)
|
||||
assert.Equal(t, "ACCESSKEY", meta.AccessKey)
|
||||
assert.Equal(t, "INSTANCENAME", meta.InstanceName)
|
||||
|
@ -51,7 +51,7 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
metadata := bindings.Metadata{Base: metadata.Base{
|
||||
Properties: getTestProperties(),
|
||||
}}
|
||||
aliCloudTableStore.Init(context.Background(), metadata)
|
||||
aliCloudTableStore.Init(t.Context(), metadata)
|
||||
|
||||
// test create
|
||||
putData := map[string]interface{}{
|
||||
|
@ -60,7 +60,7 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
"column2": int64(2),
|
||||
}
|
||||
data, err := json.Marshal(putData)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
putRowReq := &bindings.InvokeRequest{
|
||||
Operation: bindings.CreateOperation,
|
||||
Metadata: map[string]string{
|
||||
|
@ -70,9 +70,9 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
Data: data,
|
||||
}
|
||||
|
||||
putInvokeResp, err := aliCloudTableStore.Invoke(context.Background(), putRowReq)
|
||||
putInvokeResp, err := aliCloudTableStore.Invoke(t.Context(), putRowReq)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, putInvokeResp)
|
||||
|
||||
putRowReq.Data, _ = json.Marshal(map[string]interface{}{
|
||||
|
@ -81,16 +81,16 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
"column2": int64(2),
|
||||
})
|
||||
|
||||
putInvokeResp, err = aliCloudTableStore.Invoke(context.Background(), putRowReq)
|
||||
putInvokeResp, err = aliCloudTableStore.Invoke(t.Context(), putRowReq)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, putInvokeResp)
|
||||
|
||||
// test get
|
||||
getData, err := json.Marshal(map[string]interface{}{
|
||||
"pk1": "data1",
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
getInvokeReq := &bindings.InvokeRequest{
|
||||
Operation: bindings.GetOperation,
|
||||
Metadata: map[string]string{
|
||||
|
@ -101,15 +101,15 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
Data: getData,
|
||||
}
|
||||
|
||||
getInvokeResp, err := aliCloudTableStore.Invoke(context.Background(), getInvokeReq)
|
||||
getInvokeResp, err := aliCloudTableStore.Invoke(t.Context(), getInvokeReq)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, getInvokeResp)
|
||||
|
||||
respData := make(map[string]interface{})
|
||||
err = json.Unmarshal(getInvokeResp.Data, &respData)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, putData["column1"], respData["column1"])
|
||||
assert.Equal(t, putData["column2"], int64(respData["column2"].(float64)))
|
||||
|
@ -123,7 +123,7 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
"pk1": "data2",
|
||||
},
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
|
||||
listReq := &bindings.InvokeRequest{
|
||||
Operation: bindings.ListOperation,
|
||||
|
@ -135,24 +135,24 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
Data: listData,
|
||||
}
|
||||
|
||||
listResp, err := aliCloudTableStore.Invoke(context.Background(), listReq)
|
||||
assert.Nil(t, err)
|
||||
listResp, err := aliCloudTableStore.Invoke(t.Context(), listReq)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, listResp)
|
||||
|
||||
listRespData := make([]map[string]interface{}, len(listData))
|
||||
err = json.Unmarshal(listResp.Data, &listRespData)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, listRespData, 2)
|
||||
|
||||
assert.Equal(t, listRespData[0]["column1"], putData["column1"])
|
||||
assert.Equal(t, listRespData[1]["pk1"], "data2")
|
||||
assert.Equal(t, "data2", listRespData[1]["pk1"])
|
||||
|
||||
// test delete
|
||||
deleteData, err := json.Marshal(map[string]interface{}{
|
||||
"pk1": "data1",
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
|
||||
deleteReq := &bindings.InvokeRequest{
|
||||
Operation: bindings.DeleteOperation,
|
||||
|
@ -163,14 +163,14 @@ func TestDataEncodeAndDecode(t *testing.T) {
|
|||
Data: deleteData,
|
||||
}
|
||||
|
||||
deleteResp, err := aliCloudTableStore.Invoke(context.Background(), deleteReq)
|
||||
deleteResp, err := aliCloudTableStore.Invoke(t.Context(), deleteReq)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, deleteResp)
|
||||
|
||||
getInvokeResp, err = aliCloudTableStore.Invoke(context.Background(), getInvokeReq)
|
||||
getInvokeResp, err = aliCloudTableStore.Invoke(t.Context(), getInvokeReq)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, getInvokeResp.Data)
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import (
|
|||
"github.com/dapr/components-contrib/bindings"
|
||||
contribMetadata "github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -89,7 +90,7 @@ func NewAPNS(logger logger.Logger) bindings.OutputBinding {
|
|||
// in the binding's configuration.
|
||||
func (a *APNS) Init(ctx context.Context, metadata bindings.Metadata) error {
|
||||
m := APNSmetadata{}
|
||||
err := contribMetadata.DecodeMetadata(metadata.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(metadata.Properties, &m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -261,9 +262,12 @@ func makeErrorResponse(httpResponse *http.Response) (*bindings.InvokeResponse, e
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *APNS) GetComponentMetadata() map[string]string {
|
||||
func (a *APNS) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := APNSmetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (a *APNS) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@ package apns
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
@ -23,6 +22,7 @@ import (
|
|||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
|
@ -51,8 +51,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Nil(t, err)
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, developmentPrefix, binding.urlPrefix)
|
||||
})
|
||||
|
||||
|
@ -66,8 +66,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Nil(t, err)
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, productionPrefix, binding.urlPrefix)
|
||||
})
|
||||
|
||||
|
@ -80,8 +80,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Nil(t, err)
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, productionPrefix, binding.urlPrefix)
|
||||
})
|
||||
|
||||
|
@ -93,8 +93,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Error(t, err, "the key-id parameter is required")
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.Error(t, err, "the key-id parameter is required")
|
||||
})
|
||||
|
||||
t.Run("valid key ID", func(t *testing.T) {
|
||||
|
@ -106,8 +106,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Nil(t, err)
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, testKeyID, binding.authorizationBuilder.keyID)
|
||||
})
|
||||
|
||||
|
@ -119,8 +119,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Error(t, err, "the team-id parameter is required")
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.Error(t, err, "the team-id parameter is required")
|
||||
})
|
||||
|
||||
t.Run("valid team ID", func(t *testing.T) {
|
||||
|
@ -132,8 +132,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Nil(t, err)
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, testTeamID, binding.authorizationBuilder.teamID)
|
||||
})
|
||||
|
||||
|
@ -145,8 +145,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Error(t, err, "the private-key parameter is required")
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.Error(t, err, "the private-key parameter is required")
|
||||
})
|
||||
|
||||
t.Run("valid private key", func(t *testing.T) {
|
||||
|
@ -158,8 +158,8 @@ func TestInit(t *testing.T) {
|
|||
},
|
||||
}}
|
||||
binding := NewAPNS(testLogger).(*APNS)
|
||||
err := binding.Init(context.Background(), metadata)
|
||||
assert.Nil(t, err)
|
||||
err := binding.Init(t.Context(), metadata)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, binding.authorizationBuilder.privateKey)
|
||||
})
|
||||
}
|
||||
|
@ -168,7 +168,7 @@ func TestOperations(t *testing.T) {
|
|||
testLogger := logger.NewLogger("test")
|
||||
testBinding := NewAPNS(testLogger).(*APNS)
|
||||
operations := testBinding.Operations()
|
||||
assert.Equal(t, 1, len(operations))
|
||||
assert.Len(t, operations, 1)
|
||||
assert.Equal(t, bindings.CreateOperation, operations[0])
|
||||
}
|
||||
|
||||
|
@ -191,8 +191,8 @@ func TestInvoke(t *testing.T) {
|
|||
t.Run("operation must be create", func(t *testing.T) {
|
||||
testBinding := makeTestBinding(t, testLogger)
|
||||
req := &bindings.InvokeRequest{Operation: bindings.DeleteOperation}
|
||||
_, err := testBinding.Invoke(context.TODO(), req)
|
||||
assert.Error(t, err, "operation not supported: delete")
|
||||
_, err := testBinding.Invoke(t.Context(), req)
|
||||
require.Error(t, err, "operation not supported: delete")
|
||||
})
|
||||
|
||||
t.Run("the device token is required", func(t *testing.T) {
|
||||
|
@ -201,8 +201,8 @@ func TestInvoke(t *testing.T) {
|
|||
Operation: bindings.CreateOperation,
|
||||
Metadata: map[string]string{},
|
||||
}
|
||||
_, err := testBinding.Invoke(context.TODO(), req)
|
||||
assert.Error(t, err, "the device-token parameter is required")
|
||||
_, err := testBinding.Invoke(t.Context(), req)
|
||||
require.Error(t, err, "the device-token parameter is required")
|
||||
})
|
||||
|
||||
t.Run("the authorization header is sent", func(t *testing.T) {
|
||||
|
@ -212,7 +212,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the push type header is sent", func(t *testing.T) {
|
||||
|
@ -223,7 +223,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the message ID is sent", func(t *testing.T) {
|
||||
|
@ -234,7 +234,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the expiration is sent", func(t *testing.T) {
|
||||
|
@ -245,7 +245,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the priority is sent", func(t *testing.T) {
|
||||
|
@ -256,7 +256,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the topic is sent", func(t *testing.T) {
|
||||
|
@ -267,7 +267,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the collapse ID is sent", func(t *testing.T) {
|
||||
|
@ -278,7 +278,7 @@ func TestInvoke(t *testing.T) {
|
|||
|
||||
return successResponse()
|
||||
})
|
||||
_, _ = testBinding.Invoke(context.TODO(), successRequest)
|
||||
_, _ = testBinding.Invoke(t.Context(), successRequest)
|
||||
})
|
||||
|
||||
t.Run("the message ID is returned", func(t *testing.T) {
|
||||
|
@ -286,13 +286,13 @@ func TestInvoke(t *testing.T) {
|
|||
testBinding.client = newTestClient(func(req *http.Request) *http.Response {
|
||||
return successResponse()
|
||||
})
|
||||
response, err := testBinding.Invoke(context.TODO(), successRequest)
|
||||
assert.Nil(t, err)
|
||||
response, err := testBinding.Invoke(t.Context(), successRequest)
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, response.Data)
|
||||
var body notificationResponse
|
||||
decoder := jsoniter.NewDecoder(bytes.NewReader(response.Data))
|
||||
err = decoder.Decode(&body)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "12345", body.MessageID)
|
||||
})
|
||||
|
||||
|
@ -306,8 +306,8 @@ func TestInvoke(t *testing.T) {
|
|||
Body: io.NopCloser(strings.NewReader(body)),
|
||||
}
|
||||
})
|
||||
_, err := testBinding.Invoke(context.TODO(), successRequest)
|
||||
assert.Error(t, err, "BadDeviceToken")
|
||||
_, err := testBinding.Invoke(t.Context(), successRequest)
|
||||
require.Error(t, err, "BadDeviceToken")
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -321,8 +321,8 @@ func makeTestBinding(t *testing.T, log logger.Logger) *APNS {
|
|||
privateKeyKey: testPrivateKey,
|
||||
},
|
||||
}}
|
||||
err := testBinding.Init(context.Background(), bindingMetadata)
|
||||
assert.Nil(t, err)
|
||||
err := testBinding.Init(t.Context(), bindingMetadata)
|
||||
require.NoError(t, err)
|
||||
|
||||
return testBinding
|
||||
}
|
||||
|
|
|
@ -23,16 +23,17 @@ import (
|
|||
"github.com/aws/aws-sdk-go/service/dynamodb/dynamodbattribute"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
awsAuth "github.com/dapr/components-contrib/internal/authentication/aws"
|
||||
awsAuth "github.com/dapr/components-contrib/common/authentication/aws"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
// DynamoDB allows performing stateful operations on AWS DynamoDB.
|
||||
type DynamoDB struct {
|
||||
client *dynamodb.DynamoDB
|
||||
table string
|
||||
logger logger.Logger
|
||||
authProvider awsAuth.Provider
|
||||
table string
|
||||
logger logger.Logger
|
||||
}
|
||||
|
||||
type dynamoDBMetadata struct {
|
||||
|
@ -50,18 +51,27 @@ func NewDynamoDB(logger logger.Logger) bindings.OutputBinding {
|
|||
}
|
||||
|
||||
// Init performs connection parsing for DynamoDB.
|
||||
func (d *DynamoDB) Init(_ context.Context, metadata bindings.Metadata) error {
|
||||
func (d *DynamoDB) Init(ctx context.Context, metadata bindings.Metadata) error {
|
||||
meta, err := d.getDynamoDBMetadata(metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
client, err := d.getClient(meta)
|
||||
opts := awsAuth.Options{
|
||||
Logger: d.logger,
|
||||
Properties: metadata.Properties,
|
||||
Region: meta.Region,
|
||||
Endpoint: meta.Endpoint,
|
||||
AccessKey: meta.AccessKey,
|
||||
SecretKey: meta.SecretKey,
|
||||
SessionToken: meta.SessionToken,
|
||||
}
|
||||
|
||||
provider, err := awsAuth.NewProvider(ctx, opts, awsAuth.GetConfig(opts))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
d.client = client
|
||||
d.authProvider = provider
|
||||
d.table = meta.Table
|
||||
|
||||
return nil
|
||||
|
@ -83,7 +93,7 @@ func (d *DynamoDB) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bi
|
|||
return nil, err
|
||||
}
|
||||
|
||||
_, err = d.client.PutItemWithContext(ctx, &dynamodb.PutItemInput{
|
||||
_, err = d.authProvider.DynamoDB().DynamoDB.PutItemWithContext(ctx, &dynamodb.PutItemInput{
|
||||
Item: item,
|
||||
TableName: aws.String(d.table),
|
||||
})
|
||||
|
@ -96,7 +106,7 @@ func (d *DynamoDB) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bi
|
|||
|
||||
func (d *DynamoDB) getDynamoDBMetadata(spec bindings.Metadata) (*dynamoDBMetadata, error) {
|
||||
var meta dynamoDBMetadata
|
||||
err := metadata.DecodeMetadata(spec.Properties, &meta)
|
||||
err := kitmd.DecodeMetadata(spec.Properties, &meta)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -104,20 +114,16 @@ func (d *DynamoDB) getDynamoDBMetadata(spec bindings.Metadata) (*dynamoDBMetadat
|
|||
return &meta, nil
|
||||
}
|
||||
|
||||
func (d *DynamoDB) getClient(metadata *dynamoDBMetadata) (*dynamodb.DynamoDB, error) {
|
||||
sess, err := awsAuth.GetClient(metadata.AccessKey, metadata.SecretKey, metadata.SessionToken, metadata.Region, metadata.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c := dynamodb.New(sess)
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (d *DynamoDB) GetComponentMetadata() map[string]string {
|
||||
func (d *DynamoDB) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := dynamoDBMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (d *DynamoDB) Close() error {
|
||||
if d.authProvider != nil {
|
||||
return d.authProvider.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
)
|
||||
|
@ -28,7 +29,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
}
|
||||
dy := DynamoDB{}
|
||||
meta, err := dy.getDynamoDBMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", meta.AccessKey)
|
||||
assert.Equal(t, "a", meta.Region)
|
||||
assert.Equal(t, "a", meta.SecretKey)
|
||||
|
|
|
@ -23,32 +23,32 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/request"
|
||||
"github.com/aws/aws-sdk-go/service/kinesis"
|
||||
"github.com/cenkalti/backoff/v4"
|
||||
"github.com/google/uuid"
|
||||
"github.com/vmware/vmware-go-kcl/clientlibrary/config"
|
||||
"github.com/vmware/vmware-go-kcl/clientlibrary/interfaces"
|
||||
"github.com/vmware/vmware-go-kcl/clientlibrary/worker"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
awsAuth "github.com/dapr/components-contrib/internal/authentication/aws"
|
||||
awsAuth "github.com/dapr/components-contrib/common/authentication/aws"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
// AWSKinesis allows receiving and sending data to/from AWS Kinesis stream.
|
||||
type AWSKinesis struct {
|
||||
client *kinesis.Kinesis
|
||||
metadata *kinesisMetadata
|
||||
authProvider awsAuth.Provider
|
||||
metadata *kinesisMetadata
|
||||
|
||||
worker *worker.Worker
|
||||
workerConfig *config.KinesisClientLibConfiguration
|
||||
worker *worker.Worker
|
||||
|
||||
streamARN *string
|
||||
consumerARN *string
|
||||
logger logger.Logger
|
||||
streamName string
|
||||
consumerName string
|
||||
consumerARN *string
|
||||
logger logger.Logger
|
||||
consumerMode string
|
||||
|
||||
closed atomic.Bool
|
||||
closeCh chan struct{}
|
||||
|
@ -112,30 +112,25 @@ func (a *AWSKinesis) Init(ctx context.Context, metadata bindings.Metadata) error
|
|||
return fmt.Errorf("%s invalid \"mode\" field %s", "aws.kinesis", m.KinesisConsumerMode)
|
||||
}
|
||||
|
||||
client, err := a.getClient(m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
streamName := aws.String(m.StreamName)
|
||||
stream, err := client.DescribeStreamWithContext(ctx, &kinesis.DescribeStreamInput{
|
||||
StreamName: streamName,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if m.KinesisConsumerMode == SharedThroughput {
|
||||
kclConfig := config.NewKinesisClientLibConfigWithCredential(m.ConsumerName,
|
||||
m.StreamName, m.Region, m.ConsumerName,
|
||||
credentials.NewStaticCredentials(m.AccessKey, m.SecretKey, ""))
|
||||
a.workerConfig = kclConfig
|
||||
}
|
||||
|
||||
a.streamARN = stream.StreamDescription.StreamARN
|
||||
a.consumerMode = m.KinesisConsumerMode
|
||||
a.streamName = m.StreamName
|
||||
a.consumerName = m.ConsumerName
|
||||
a.metadata = m
|
||||
a.client = client
|
||||
|
||||
opts := awsAuth.Options{
|
||||
Logger: a.logger,
|
||||
Properties: metadata.Properties,
|
||||
Region: m.Region,
|
||||
AccessKey: m.AccessKey,
|
||||
SecretKey: m.SecretKey,
|
||||
SessionToken: "",
|
||||
}
|
||||
// extra configs needed per component type
|
||||
provider, err := awsAuth.NewProvider(ctx, opts, awsAuth.GetConfig(opts))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.authProvider = provider
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -148,7 +143,7 @@ func (a *AWSKinesis) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*
|
|||
if partitionKey == "" {
|
||||
partitionKey = uuid.New().String()
|
||||
}
|
||||
_, err := a.client.PutRecordWithContext(ctx, &kinesis.PutRecordInput{
|
||||
_, err := a.authProvider.Kinesis().Kinesis.PutRecordWithContext(ctx, &kinesis.PutRecordInput{
|
||||
StreamName: &a.metadata.StreamName,
|
||||
Data: req.Data,
|
||||
PartitionKey: &partitionKey,
|
||||
|
@ -161,16 +156,15 @@ func (a *AWSKinesis) Read(ctx context.Context, handler bindings.Handler) (err er
|
|||
if a.closed.Load() {
|
||||
return errors.New("binding is closed")
|
||||
}
|
||||
|
||||
if a.metadata.KinesisConsumerMode == SharedThroughput {
|
||||
a.worker = worker.NewWorker(a.recordProcessorFactory(ctx, handler), a.workerConfig)
|
||||
a.worker = worker.NewWorker(a.recordProcessorFactory(ctx, handler), a.authProvider.Kinesis().WorkerCfg(ctx, a.streamName, a.consumerName, a.consumerMode))
|
||||
err = a.worker.Start()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if a.metadata.KinesisConsumerMode == ExtendedFanout {
|
||||
var stream *kinesis.DescribeStreamOutput
|
||||
stream, err = a.client.DescribeStream(&kinesis.DescribeStreamInput{StreamName: &a.metadata.StreamName})
|
||||
stream, err = a.authProvider.Kinesis().Kinesis.DescribeStream(&kinesis.DescribeStreamInput{StreamName: &a.metadata.StreamName})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -180,6 +174,10 @@ func (a *AWSKinesis) Read(ctx context.Context, handler bindings.Handler) (err er
|
|||
}
|
||||
}
|
||||
|
||||
stream, err := a.authProvider.Kinesis().Stream(ctx, a.streamName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get kinesis stream arn: %v", err)
|
||||
}
|
||||
// Wait for context cancelation then stop
|
||||
a.wg.Add(1)
|
||||
go func() {
|
||||
|
@ -191,7 +189,7 @@ func (a *AWSKinesis) Read(ctx context.Context, handler bindings.Handler) (err er
|
|||
if a.metadata.KinesisConsumerMode == SharedThroughput {
|
||||
a.worker.Shutdown()
|
||||
} else if a.metadata.KinesisConsumerMode == ExtendedFanout {
|
||||
a.deregisterConsumer(a.streamARN, a.consumerARN)
|
||||
a.deregisterConsumer(ctx, stream, a.consumerARN)
|
||||
}
|
||||
}()
|
||||
|
||||
|
@ -226,8 +224,7 @@ func (a *AWSKinesis) Subscribe(ctx context.Context, streamDesc kinesis.StreamDes
|
|||
return
|
||||
default:
|
||||
}
|
||||
|
||||
sub, err := a.client.SubscribeToShardWithContext(ctx, &kinesis.SubscribeToShardInput{
|
||||
sub, err := a.authProvider.Kinesis().Kinesis.SubscribeToShardWithContext(ctx, &kinesis.SubscribeToShardInput{
|
||||
ConsumerARN: consumerARN,
|
||||
ShardId: s.ShardId,
|
||||
StartingPosition: &kinesis.StartingPosition{Type: aws.String(kinesis.ShardIteratorTypeLatest)},
|
||||
|
@ -269,6 +266,9 @@ func (a *AWSKinesis) Close() error {
|
|||
close(a.closeCh)
|
||||
}
|
||||
a.wg.Wait()
|
||||
if a.authProvider != nil {
|
||||
return a.authProvider.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -276,7 +276,7 @@ func (a *AWSKinesis) ensureConsumer(ctx context.Context, streamARN *string) (*st
|
|||
// Only set timeout on consumer call.
|
||||
conCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
consumer, err := a.client.DescribeStreamConsumerWithContext(conCtx, &kinesis.DescribeStreamConsumerInput{
|
||||
consumer, err := a.authProvider.Kinesis().Kinesis.DescribeStreamConsumerWithContext(conCtx, &kinesis.DescribeStreamConsumerInput{
|
||||
ConsumerName: &a.metadata.ConsumerName,
|
||||
StreamARN: streamARN,
|
||||
})
|
||||
|
@ -288,7 +288,7 @@ func (a *AWSKinesis) ensureConsumer(ctx context.Context, streamARN *string) (*st
|
|||
}
|
||||
|
||||
func (a *AWSKinesis) registerConsumer(ctx context.Context, streamARN *string) (*string, error) {
|
||||
consumer, err := a.client.RegisterStreamConsumerWithContext(ctx, &kinesis.RegisterStreamConsumerInput{
|
||||
consumer, err := a.authProvider.Kinesis().Kinesis.RegisterStreamConsumerWithContext(ctx, &kinesis.RegisterStreamConsumerInput{
|
||||
ConsumerName: &a.metadata.ConsumerName,
|
||||
StreamARN: streamARN,
|
||||
})
|
||||
|
@ -300,7 +300,6 @@ func (a *AWSKinesis) registerConsumer(ctx context.Context, streamARN *string) (*
|
|||
ConsumerName: &a.metadata.ConsumerName,
|
||||
StreamARN: streamARN,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -308,11 +307,11 @@ func (a *AWSKinesis) registerConsumer(ctx context.Context, streamARN *string) (*
|
|||
return consumer.Consumer.ConsumerARN, nil
|
||||
}
|
||||
|
||||
func (a *AWSKinesis) deregisterConsumer(streamARN *string, consumerARN *string) error {
|
||||
func (a *AWSKinesis) deregisterConsumer(ctx context.Context, streamARN *string, consumerARN *string) error {
|
||||
if a.consumerARN != nil {
|
||||
// Use a background context because the running context may have been canceled already
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
_, err := a.client.DeregisterStreamConsumerWithContext(ctx, &kinesis.DeregisterStreamConsumerInput{
|
||||
_, err := a.authProvider.Kinesis().Kinesis.DeregisterStreamConsumerWithContext(ctx, &kinesis.DeregisterStreamConsumerInput{
|
||||
ConsumerARN: consumerARN,
|
||||
StreamARN: streamARN,
|
||||
ConsumerName: &a.metadata.ConsumerName,
|
||||
|
@ -343,7 +342,7 @@ func (a *AWSKinesis) waitUntilConsumerExists(ctx aws.Context, input *kinesis.Des
|
|||
tmp := *input
|
||||
inCpy = &tmp
|
||||
}
|
||||
req, _ := a.client.DescribeStreamConsumerRequest(inCpy)
|
||||
req, _ := a.authProvider.Kinesis().Kinesis.DescribeStreamConsumerRequest(inCpy)
|
||||
req.SetContext(ctx)
|
||||
req.ApplyOptions(opts...)
|
||||
|
||||
|
@ -355,19 +354,9 @@ func (a *AWSKinesis) waitUntilConsumerExists(ctx aws.Context, input *kinesis.Des
|
|||
return w.WaitWithContext(ctx)
|
||||
}
|
||||
|
||||
func (a *AWSKinesis) getClient(metadata *kinesisMetadata) (*kinesis.Kinesis, error) {
|
||||
sess, err := awsAuth.GetClient(metadata.AccessKey, metadata.SecretKey, metadata.SessionToken, metadata.Region, metadata.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
k := kinesis.New(sess)
|
||||
|
||||
return k, nil
|
||||
}
|
||||
|
||||
func (a *AWSKinesis) parseMetadata(meta bindings.Metadata) (*kinesisMetadata, error) {
|
||||
var m kinesisMetadata
|
||||
err := metadata.DecodeMetadata(meta.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(meta.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -418,9 +407,8 @@ func (p *recordProcessor) Shutdown(input *interfaces.ShutdownInput) {
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AWSKinesis) GetComponentMetadata() map[string]string {
|
||||
func (a *AWSKinesis) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := &kinesisMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
)
|
||||
|
@ -35,7 +36,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
}
|
||||
kinesis := AWSKinesis{}
|
||||
meta, err := kinesis.parseMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", meta.AccessKey)
|
||||
assert.Equal(t, "region", meta.Region)
|
||||
assert.Equal(t, "secret", meta.SecretKey)
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
# yaml-language-server: $schema=../../../component-metadata-schema.json
|
||||
schemaVersion: v1
|
||||
type: bindings
|
||||
name: aws.s3
|
||||
version: v1
|
||||
status: stable
|
||||
title: "AWS S3"
|
||||
urls:
|
||||
- title: Reference
|
||||
url: https://docs.dapr.io/reference/components-reference/supported-bindings/s3/
|
||||
binding:
|
||||
output: true
|
||||
operations:
|
||||
- name: create
|
||||
description: "Create blob"
|
||||
- name: get
|
||||
description: "Get blob"
|
||||
- name: delete
|
||||
description: "Delete blob"
|
||||
- name: list
|
||||
description: "List blob"
|
||||
capabilities: []
|
||||
builtinAuthenticationProfiles:
|
||||
- name: "aws"
|
||||
metadata:
|
||||
- name: bucket
|
||||
required: true
|
||||
description: |
|
||||
The name of the S3 bucket to write to.
|
||||
example: '"bucket"'
|
||||
type: string
|
||||
- name: endpoint
|
||||
required: false
|
||||
description: |
|
||||
AWS endpoint for the component to use, to connect to S3-compatible services or emulators.
|
||||
Do not use this when running against production AWS.
|
||||
example: '"http://localhost:4566"'
|
||||
type: string
|
||||
- name: forcePathStyle
|
||||
description: |
|
||||
Currently Amazon S3 SDK supports virtual-hosted-style and path-style access.
|
||||
When false (the default), uses virtual-hosted-style format, i.e.: `https://<your bucket>.<endpoint>/<key>`.
|
||||
When true, uses path-style format, i.e.: `https://<endpoint>/<your bucket>/<key>`.
|
||||
type: bool
|
||||
default: 'false'
|
||||
example: '"true", "false"'
|
||||
- name: decodeBase64
|
||||
description: |
|
||||
Configuration to decode base64 file content before saving to bucket storage.
|
||||
(In case of saving a file with binary content).
|
||||
type: bool
|
||||
default: 'false'
|
||||
example: '"true", "false"'
|
||||
- name: encodeBase64
|
||||
description: |
|
||||
Configuration to encode base64 file content before returning the content.
|
||||
(In case of opening a file with binary content).
|
||||
type: bool
|
||||
default: 'false'
|
||||
example: '"true", "false"'
|
||||
- name: disableSSL
|
||||
description: |
|
||||
Allows to connect to non-`https://` endpoints.
|
||||
type: bool
|
||||
default: 'false'
|
||||
example: '"true", "false"'
|
||||
- name: insecureSSL
|
||||
description: |
|
||||
When connecting to `https://` endpoints, accepts self-signed or invalid certificates.
|
||||
type: bool
|
||||
default: 'false'
|
||||
example: '"true", "false"'
|
|
@ -18,6 +18,7 @@ import (
|
|||
"crypto/tls"
|
||||
b64 "encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
@ -27,17 +28,19 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
awsAuth "github.com/dapr/components-contrib/internal/authentication/aws"
|
||||
"github.com/dapr/components-contrib/internal/utils"
|
||||
awsAuth "github.com/dapr/components-contrib/common/authentication/aws"
|
||||
commonutils "github.com/dapr/components-contrib/common/utils"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
"github.com/dapr/kit/ptr"
|
||||
kitstrings "github.com/dapr/kit/strings"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -45,8 +48,11 @@ const (
|
|||
metadataEncodeBase64 = "encodeBase64"
|
||||
metadataFilePath = "filePath"
|
||||
metadataPresignTTL = "presignTTL"
|
||||
metadataStorageClass = "storageClass"
|
||||
metadataTags = "tags"
|
||||
|
||||
metadataKey = "key"
|
||||
metatadataContentType = "Content-Type"
|
||||
metadataKey = "key"
|
||||
|
||||
defaultMaxResults = 1000
|
||||
presignOperation = "presign"
|
||||
|
@ -54,27 +60,28 @@ const (
|
|||
|
||||
// AWSS3 is a binding for an AWS S3 storage bucket.
|
||||
type AWSS3 struct {
|
||||
metadata *s3Metadata
|
||||
s3Client *s3.S3
|
||||
uploader *s3manager.Uploader
|
||||
downloader *s3manager.Downloader
|
||||
logger logger.Logger
|
||||
metadata *s3Metadata
|
||||
authProvider awsAuth.Provider
|
||||
logger logger.Logger
|
||||
}
|
||||
|
||||
type s3Metadata struct {
|
||||
Region string `json:"region" mapstructure:"region"`
|
||||
// Ignored by metadata parser because included in built-in authentication profile
|
||||
AccessKey string `json:"accessKey" mapstructure:"accessKey" mdignore:"true"`
|
||||
SecretKey string `json:"secretKey" mapstructure:"secretKey" mdignore:"true"`
|
||||
SessionToken string `json:"sessionToken" mapstructure:"sessionToken" mdignore:"true"`
|
||||
|
||||
Region string `json:"region" mapstructure:"region" mapstructurealiases:"awsRegion" mdignore:"true"`
|
||||
Endpoint string `json:"endpoint" mapstructure:"endpoint"`
|
||||
AccessKey string `json:"accessKey" mapstructure:"accessKey"`
|
||||
SecretKey string `json:"secretKey" mapstructure:"secretKey"`
|
||||
SessionToken string `json:"sessionToken" mapstructure:"sessionToken"`
|
||||
Bucket string `json:"bucket" mapstructure:"bucket"`
|
||||
DecodeBase64 bool `json:"decodeBase64,string" mapstructure:"decodeBase64"`
|
||||
EncodeBase64 bool `json:"encodeBase64,string" mapstructure:"encodeBase64"`
|
||||
ForcePathStyle bool `json:"forcePathStyle,string" mapstructure:"forcePathStyle"`
|
||||
DisableSSL bool `json:"disableSSL,string" mapstructure:"disableSSL"`
|
||||
InsecureSSL bool `json:"insecureSSL,string" mapstructure:"insecureSSL"`
|
||||
FilePath string `mapstructure:"filePath"`
|
||||
PresignTTL string `mapstructure:"presignTTL"`
|
||||
FilePath string `json:"filePath" mapstructure:"filePath" mdignore:"true"`
|
||||
PresignTTL string `json:"presignTTL" mapstructure:"presignTTL" mdignore:"true"`
|
||||
StorageClass string `json:"storageClass" mapstructure:"storageClass" mdignore:"true"`
|
||||
}
|
||||
|
||||
type createResponse struct {
|
||||
|
@ -99,23 +106,11 @@ func NewAWSS3(logger logger.Logger) bindings.OutputBinding {
|
|||
return &AWSS3{logger: logger}
|
||||
}
|
||||
|
||||
// Init does metadata parsing and connection creation.
|
||||
func (s *AWSS3) Init(_ context.Context, metadata bindings.Metadata) error {
|
||||
m, err := s.parseMetadata(metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
session, err := s.getSession(m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfg := aws.NewConfig().
|
||||
WithS3ForcePathStyle(m.ForcePathStyle).
|
||||
WithDisableSSL(m.DisableSSL)
|
||||
func (s *AWSS3) getAWSConfig(opts awsAuth.Options) *aws.Config {
|
||||
cfg := awsAuth.GetConfig(opts).WithS3ForcePathStyle(s.metadata.ForcePathStyle).WithDisableSSL(s.metadata.DisableSSL)
|
||||
|
||||
// Use a custom HTTP client to allow self-signed certs
|
||||
if m.InsecureSSL {
|
||||
if s.metadata.InsecureSSL {
|
||||
customTransport := http.DefaultTransport.(*http.Transport).Clone()
|
||||
customTransport.TLSClientConfig = &tls.Config{
|
||||
//nolint:gosec
|
||||
|
@ -125,17 +120,43 @@ func (s *AWSS3) Init(_ context.Context, metadata bindings.Metadata) error {
|
|||
Transport: customTransport,
|
||||
}
|
||||
cfg = cfg.WithHTTPClient(client)
|
||||
}
|
||||
|
||||
s.logger.Infof("aws s3: you are using 'insecureSSL' to skip server config verify which is unsafe!")
|
||||
}
|
||||
return cfg
|
||||
}
|
||||
|
||||
// Init does metadata parsing and connection creation.
|
||||
func (s *AWSS3) Init(ctx context.Context, metadata bindings.Metadata) error {
|
||||
m, err := s.parseMetadata(metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.metadata = m
|
||||
s.s3Client = s3.New(session, cfg)
|
||||
s.downloader = s3manager.NewDownloaderWithClient(s.s3Client)
|
||||
s.uploader = s3manager.NewUploaderWithClient(s.s3Client)
|
||||
|
||||
opts := awsAuth.Options{
|
||||
Logger: s.logger,
|
||||
Properties: metadata.Properties,
|
||||
Region: m.Region,
|
||||
Endpoint: m.Endpoint,
|
||||
AccessKey: m.AccessKey,
|
||||
SecretKey: m.SecretKey,
|
||||
SessionToken: m.SessionToken,
|
||||
}
|
||||
// extra configs needed per component type
|
||||
provider, err := awsAuth.NewProvider(ctx, opts, s.getAWSConfig(opts))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.authProvider = provider
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *AWSS3) Close() error {
|
||||
if s.authProvider != nil {
|
||||
return s.authProvider.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -166,6 +187,20 @@ func (s *AWSS3) create(ctx context.Context, req *bindings.InvokeRequest) (*bindi
|
|||
s.logger.Debugf("s3 binding error: key not found. generating key %s", key)
|
||||
}
|
||||
|
||||
var contentType *string
|
||||
contentTypeStr := strings.TrimSpace(req.Metadata[metatadataContentType])
|
||||
if contentTypeStr != "" {
|
||||
contentType = &contentTypeStr
|
||||
}
|
||||
|
||||
var tagging *string
|
||||
if rawTags, ok := req.Metadata[metadataTags]; ok {
|
||||
tagging, err = s.parseS3Tags(rawTags)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("s3 binding error: parsing tags falied error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var r io.Reader
|
||||
if metadata.FilePath != "" {
|
||||
r, err = os.Open(metadata.FilePath)
|
||||
|
@ -173,17 +208,25 @@ func (s *AWSS3) create(ctx context.Context, req *bindings.InvokeRequest) (*bindi
|
|||
return nil, fmt.Errorf("s3 binding error: file read error: %w", err)
|
||||
}
|
||||
} else {
|
||||
r = strings.NewReader(utils.Unquote(req.Data))
|
||||
r = strings.NewReader(commonutils.Unquote(req.Data))
|
||||
}
|
||||
|
||||
if metadata.DecodeBase64 {
|
||||
r = b64.NewDecoder(b64.StdEncoding, r)
|
||||
}
|
||||
|
||||
resultUpload, err := s.uploader.UploadWithContext(ctx, &s3manager.UploadInput{
|
||||
Bucket: ptr.Of(metadata.Bucket),
|
||||
Key: ptr.Of(key),
|
||||
Body: r,
|
||||
var storageClass *string
|
||||
if metadata.StorageClass != "" {
|
||||
storageClass = aws.String(metadata.StorageClass)
|
||||
}
|
||||
|
||||
resultUpload, err := s.authProvider.S3().Uploader.UploadWithContext(ctx, &s3manager.UploadInput{
|
||||
Bucket: ptr.Of(metadata.Bucket),
|
||||
Key: ptr.Of(key),
|
||||
Body: r,
|
||||
ContentType: contentType,
|
||||
StorageClass: storageClass,
|
||||
Tagging: tagging,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("s3 binding error: uploading failed: %w", err)
|
||||
|
@ -191,7 +234,7 @@ func (s *AWSS3) create(ctx context.Context, req *bindings.InvokeRequest) (*bindi
|
|||
|
||||
var presignURL string
|
||||
if metadata.PresignTTL != "" {
|
||||
url, presignErr := s.presignObject(metadata.Bucket, key, metadata.PresignTTL)
|
||||
url, presignErr := s.presignObject(ctx, metadata.Bucket, key, metadata.PresignTTL)
|
||||
if presignErr != nil {
|
||||
return nil, fmt.Errorf("s3 binding error: %s", presignErr)
|
||||
}
|
||||
|
@ -231,7 +274,7 @@ func (s *AWSS3) presign(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
return nil, fmt.Errorf("s3 binding error: required metadata '%s' missing", metadataPresignTTL)
|
||||
}
|
||||
|
||||
url, err := s.presignObject(metadata.Bucket, key, metadata.PresignTTL)
|
||||
url, err := s.presignObject(ctx, metadata.Bucket, key, metadata.PresignTTL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("s3 binding error: %w", err)
|
||||
}
|
||||
|
@ -248,13 +291,12 @@ func (s *AWSS3) presign(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
}, nil
|
||||
}
|
||||
|
||||
func (s *AWSS3) presignObject(bucket, key, ttl string) (string, error) {
|
||||
func (s *AWSS3) presignObject(ctx context.Context, bucket, key, ttl string) (string, error) {
|
||||
d, err := time.ParseDuration(ttl)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("s3 binding error: cannot parse duration %s: %w", ttl, err)
|
||||
}
|
||||
|
||||
objReq, _ := s.s3Client.GetObjectRequest(&s3.GetObjectInput{
|
||||
objReq, _ := s.authProvider.S3().S3.GetObjectRequest(&s3.GetObjectInput{
|
||||
Bucket: ptr.Of(bucket),
|
||||
Key: ptr.Of(key),
|
||||
})
|
||||
|
@ -278,8 +320,7 @@ func (s *AWSS3) get(ctx context.Context, req *bindings.InvokeRequest) (*bindings
|
|||
}
|
||||
|
||||
buff := &aws.WriteAtBuffer{}
|
||||
|
||||
_, err = s.downloader.DownloadWithContext(ctx,
|
||||
_, err = s.authProvider.S3().Downloader.DownloadWithContext(ctx,
|
||||
buff,
|
||||
&s3.GetObjectInput{
|
||||
Bucket: ptr.Of(s.metadata.Bucket),
|
||||
|
@ -287,6 +328,10 @@ func (s *AWSS3) get(ctx context.Context, req *bindings.InvokeRequest) (*bindings
|
|||
},
|
||||
)
|
||||
if err != nil {
|
||||
var awsErr awserr.Error
|
||||
if errors.As(err, &awsErr) && awsErr.Code() == s3.ErrCodeNoSuchKey {
|
||||
return nil, errors.New("object not found")
|
||||
}
|
||||
return nil, fmt.Errorf("s3 binding error: error downloading S3 object: %w", err)
|
||||
}
|
||||
|
||||
|
@ -309,8 +354,7 @@ func (s *AWSS3) delete(ctx context.Context, req *bindings.InvokeRequest) (*bindi
|
|||
if key == "" {
|
||||
return nil, fmt.Errorf("s3 binding error: required metadata '%s' missing", metadataKey)
|
||||
}
|
||||
|
||||
_, err := s.s3Client.DeleteObjectWithContext(
|
||||
_, err := s.authProvider.S3().S3.DeleteObjectWithContext(
|
||||
ctx,
|
||||
&s3.DeleteObjectInput{
|
||||
Bucket: ptr.Of(s.metadata.Bucket),
|
||||
|
@ -318,6 +362,10 @@ func (s *AWSS3) delete(ctx context.Context, req *bindings.InvokeRequest) (*bindi
|
|||
},
|
||||
)
|
||||
if err != nil {
|
||||
var awsErr awserr.Error
|
||||
if errors.As(err, &awsErr) && awsErr.Code() == s3.ErrCodeNoSuchKey {
|
||||
return nil, errors.New("object not found")
|
||||
}
|
||||
return nil, fmt.Errorf("s3 binding error: delete operation failed: %w", err)
|
||||
}
|
||||
|
||||
|
@ -335,8 +383,7 @@ func (s *AWSS3) list(ctx context.Context, req *bindings.InvokeRequest) (*binding
|
|||
if payload.MaxResults < 1 {
|
||||
payload.MaxResults = defaultMaxResults
|
||||
}
|
||||
|
||||
result, err := s.s3Client.ListObjectsWithContext(ctx, &s3.ListObjectsInput{
|
||||
result, err := s.authProvider.S3().S3.ListObjectsWithContext(ctx, &s3.ListObjectsInput{
|
||||
Bucket: ptr.Of(s.metadata.Bucket),
|
||||
MaxKeys: ptr.Of(int64(payload.MaxResults)),
|
||||
Marker: ptr.Of(payload.Marker),
|
||||
|
@ -376,20 +423,31 @@ func (s *AWSS3) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bindi
|
|||
|
||||
func (s *AWSS3) parseMetadata(md bindings.Metadata) (*s3Metadata, error) {
|
||||
var m s3Metadata
|
||||
err := metadata.DecodeMetadata(md.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(md.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &m, nil
|
||||
}
|
||||
|
||||
func (s *AWSS3) getSession(metadata *s3Metadata) (*session.Session, error) {
|
||||
sess, err := awsAuth.GetClient(metadata.AccessKey, metadata.SecretKey, metadata.SessionToken, metadata.Region, metadata.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// Helper for parsing s3 tags metadata
|
||||
func (s *AWSS3) parseS3Tags(raw string) (*string, error) {
|
||||
tagEntries := strings.Split(raw, ",")
|
||||
pairs := make([]string, 0, len(tagEntries))
|
||||
for _, tagEntry := range tagEntries {
|
||||
kv := strings.SplitN(strings.TrimSpace(tagEntry), "=", 2)
|
||||
isInvalidTag := len(kv) != 2 || strings.TrimSpace(kv[0]) == "" || strings.TrimSpace(kv[1]) == ""
|
||||
if isInvalidTag {
|
||||
return nil, fmt.Errorf("invalid tag format: '%s' (expected key=value)", tagEntry)
|
||||
}
|
||||
pairs = append(pairs, fmt.Sprintf("%s=%s", strings.TrimSpace(kv[0]), strings.TrimSpace(kv[1])))
|
||||
}
|
||||
|
||||
return sess, nil
|
||||
if len(pairs) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return aws.String(strings.Join(pairs, "&")), nil
|
||||
}
|
||||
|
||||
// Helper to merge config and request metadata.
|
||||
|
@ -397,11 +455,11 @@ func (metadata s3Metadata) mergeWithRequestMetadata(req *bindings.InvokeRequest)
|
|||
merged := metadata
|
||||
|
||||
if val, ok := req.Metadata[metadataDecodeBase64]; ok && val != "" {
|
||||
merged.DecodeBase64 = utils.IsTruthy(val)
|
||||
merged.DecodeBase64 = kitstrings.IsTruthy(val)
|
||||
}
|
||||
|
||||
if val, ok := req.Metadata[metadataEncodeBase64]; ok && val != "" {
|
||||
merged.EncodeBase64 = utils.IsTruthy(val)
|
||||
merged.EncodeBase64 = kitstrings.IsTruthy(val)
|
||||
}
|
||||
|
||||
if val, ok := req.Metadata[metadataFilePath]; ok && val != "" {
|
||||
|
@ -412,13 +470,16 @@ func (metadata s3Metadata) mergeWithRequestMetadata(req *bindings.InvokeRequest)
|
|||
merged.PresignTTL = val
|
||||
}
|
||||
|
||||
if val, ok := req.Metadata[metadataStorageClass]; ok && val != "" {
|
||||
merged.StorageClass = val
|
||||
}
|
||||
|
||||
return merged, nil
|
||||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (s *AWSS3) GetComponentMetadata() map[string]string {
|
||||
func (s *AWSS3) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := s3Metadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
|
|
@ -14,10 +14,10 @@ limitations under the License.
|
|||
package s3
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/kit/logger"
|
||||
|
@ -40,16 +40,34 @@ func TestParseMetadata(t *testing.T) {
|
|||
s3 := AWSS3{}
|
||||
meta, err := s3.parseMetadata(m)
|
||||
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", meta.AccessKey)
|
||||
assert.Equal(t, "region", meta.Region)
|
||||
assert.Equal(t, "secret", meta.SecretKey)
|
||||
assert.Equal(t, "test", meta.Bucket)
|
||||
assert.Equal(t, "endpoint", meta.Endpoint)
|
||||
assert.Equal(t, "token", meta.SessionToken)
|
||||
assert.Equal(t, true, meta.ForcePathStyle)
|
||||
assert.Equal(t, true, meta.DisableSSL)
|
||||
assert.Equal(t, true, meta.InsecureSSL)
|
||||
assert.True(t, meta.ForcePathStyle)
|
||||
assert.True(t, meta.DisableSSL)
|
||||
assert.True(t, meta.InsecureSSL)
|
||||
})
|
||||
}
|
||||
|
||||
func TestParseS3Tags(t *testing.T) {
|
||||
t.Run("Has parsed s3 tags", func(t *testing.T) {
|
||||
request := bindings.InvokeRequest{}
|
||||
request.Metadata = map[string]string{
|
||||
"decodeBase64": "yes",
|
||||
"encodeBase64": "false",
|
||||
"filePath": "/usr/vader.darth",
|
||||
"storageClass": "STANDARD_IA",
|
||||
"tags": "project=myproject,year=2024",
|
||||
}
|
||||
s3 := AWSS3{}
|
||||
parsedTags, err := s3.parseS3Tags(request.Metadata["tags"])
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "project=myproject&year=2024", *parsedTags)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -67,14 +85,14 @@ func TestMergeWithRequestMetadata(t *testing.T) {
|
|||
}
|
||||
s3 := AWSS3{}
|
||||
meta, err := s3.parseMetadata(m)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", meta.AccessKey)
|
||||
assert.Equal(t, "region", meta.Region)
|
||||
assert.Equal(t, "secret", meta.SecretKey)
|
||||
assert.Equal(t, "test", meta.Bucket)
|
||||
assert.Equal(t, "endpoint", meta.Endpoint)
|
||||
assert.Equal(t, "token", meta.SessionToken)
|
||||
assert.Equal(t, true, meta.ForcePathStyle)
|
||||
assert.True(t, meta.ForcePathStyle)
|
||||
|
||||
request := bindings.InvokeRequest{}
|
||||
request.Metadata = map[string]string{
|
||||
|
@ -82,22 +100,24 @@ func TestMergeWithRequestMetadata(t *testing.T) {
|
|||
"encodeBase64": "false",
|
||||
"filePath": "/usr/vader.darth",
|
||||
"presignTTL": "15s",
|
||||
"storageClass": "STANDARD_IA",
|
||||
}
|
||||
|
||||
mergedMeta, err := meta.mergeWithRequestMetadata(&request)
|
||||
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", mergedMeta.AccessKey)
|
||||
assert.Equal(t, "region", mergedMeta.Region)
|
||||
assert.Equal(t, "secret", mergedMeta.SecretKey)
|
||||
assert.Equal(t, "test", mergedMeta.Bucket)
|
||||
assert.Equal(t, "endpoint", mergedMeta.Endpoint)
|
||||
assert.Equal(t, "token", mergedMeta.SessionToken)
|
||||
assert.Equal(t, true, meta.ForcePathStyle)
|
||||
assert.Equal(t, true, mergedMeta.DecodeBase64)
|
||||
assert.Equal(t, false, mergedMeta.EncodeBase64)
|
||||
assert.True(t, meta.ForcePathStyle)
|
||||
assert.True(t, mergedMeta.DecodeBase64)
|
||||
assert.False(t, mergedMeta.EncodeBase64)
|
||||
assert.Equal(t, "/usr/vader.darth", mergedMeta.FilePath)
|
||||
assert.Equal(t, "15s", mergedMeta.PresignTTL)
|
||||
assert.Equal(t, "STANDARD_IA", mergedMeta.StorageClass)
|
||||
})
|
||||
|
||||
t.Run("Has invalid merged metadata decodeBase64", func(t *testing.T) {
|
||||
|
@ -113,14 +133,14 @@ func TestMergeWithRequestMetadata(t *testing.T) {
|
|||
}
|
||||
s3 := AWSS3{}
|
||||
meta, err := s3.parseMetadata(m)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", meta.AccessKey)
|
||||
assert.Equal(t, "region", meta.Region)
|
||||
assert.Equal(t, "secret", meta.SecretKey)
|
||||
assert.Equal(t, "test", meta.Bucket)
|
||||
assert.Equal(t, "endpoint", meta.Endpoint)
|
||||
assert.Equal(t, "token", meta.SessionToken)
|
||||
assert.Equal(t, true, meta.ForcePathStyle)
|
||||
assert.True(t, meta.ForcePathStyle)
|
||||
|
||||
request := bindings.InvokeRequest{}
|
||||
request.Metadata = map[string]string{
|
||||
|
@ -129,7 +149,7 @@ func TestMergeWithRequestMetadata(t *testing.T) {
|
|||
|
||||
mergedMeta, err := meta.mergeWithRequestMetadata(&request)
|
||||
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, mergedMeta.DecodeBase64)
|
||||
})
|
||||
|
||||
|
@ -146,14 +166,14 @@ func TestMergeWithRequestMetadata(t *testing.T) {
|
|||
}
|
||||
s3 := AWSS3{}
|
||||
meta, err := s3.parseMetadata(m)
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "key", meta.AccessKey)
|
||||
assert.Equal(t, "region", meta.Region)
|
||||
assert.Equal(t, "secret", meta.SecretKey)
|
||||
assert.Equal(t, "test", meta.Bucket)
|
||||
assert.Equal(t, "endpoint", meta.Endpoint)
|
||||
assert.Equal(t, "token", meta.SessionToken)
|
||||
assert.Equal(t, true, meta.ForcePathStyle)
|
||||
assert.True(t, meta.ForcePathStyle)
|
||||
|
||||
request := bindings.InvokeRequest{}
|
||||
request.Metadata = map[string]string{
|
||||
|
@ -162,7 +182,7 @@ func TestMergeWithRequestMetadata(t *testing.T) {
|
|||
|
||||
mergedMeta, err := meta.mergeWithRequestMetadata(&request)
|
||||
|
||||
assert.NoError(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, mergedMeta.EncodeBase64)
|
||||
})
|
||||
}
|
||||
|
@ -173,8 +193,8 @@ func TestGetOption(t *testing.T) {
|
|||
|
||||
t.Run("return error if key is missing", func(t *testing.T) {
|
||||
r := bindings.InvokeRequest{}
|
||||
_, err := s3.get(context.Background(), &r)
|
||||
assert.Error(t, err)
|
||||
_, err := s3.get(t.Context(), &r)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -184,7 +204,7 @@ func TestDeleteOption(t *testing.T) {
|
|||
|
||||
t.Run("return error if key is missing", func(t *testing.T) {
|
||||
r := bindings.InvokeRequest{}
|
||||
_, err := s3.delete(context.Background(), &r)
|
||||
assert.Error(t, err)
|
||||
_, err := s3.delete(t.Context(), &r)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -15,20 +15,20 @@ package ses
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
|
||||
awsAuth "github.com/dapr/components-contrib/internal/authentication/aws"
|
||||
|
||||
"github.com/aws/aws-sdk-go/service/ses"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
awsAuth "github.com/dapr/components-contrib/common/authentication/aws"
|
||||
contribMetadata "github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -38,9 +38,9 @@ const (
|
|||
|
||||
// AWSSES is an AWS SNS binding.
|
||||
type AWSSES struct {
|
||||
metadata *sesMetadata
|
||||
logger logger.Logger
|
||||
svc *ses.SES
|
||||
authProvider awsAuth.Provider
|
||||
metadata *sesMetadata
|
||||
logger logger.Logger
|
||||
}
|
||||
|
||||
type sesMetadata struct {
|
||||
|
@ -61,19 +61,29 @@ func NewAWSSES(logger logger.Logger) bindings.OutputBinding {
|
|||
}
|
||||
|
||||
// Init does metadata parsing.
|
||||
func (a *AWSSES) Init(_ context.Context, metadata bindings.Metadata) error {
|
||||
func (a *AWSSES) Init(ctx context.Context, metadata bindings.Metadata) error {
|
||||
// Parse input metadata
|
||||
meta, err := a.parseMetadata(metadata)
|
||||
m, err := a.parseMetadata(metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
svc, err := a.getClient(meta)
|
||||
a.metadata = m
|
||||
|
||||
opts := awsAuth.Options{
|
||||
Logger: a.logger,
|
||||
Properties: metadata.Properties,
|
||||
Region: m.Region,
|
||||
AccessKey: m.AccessKey,
|
||||
SecretKey: m.SecretKey,
|
||||
SessionToken: "",
|
||||
}
|
||||
// extra configs needed per component type
|
||||
provider, err := awsAuth.NewProvider(ctx, opts, awsAuth.GetConfig(opts))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.metadata = meta
|
||||
a.svc = svc
|
||||
a.authProvider = provider
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -84,7 +94,7 @@ func (a *AWSSES) Operations() []bindings.OperationKind {
|
|||
|
||||
func (a *AWSSES) parseMetadata(meta bindings.Metadata) (*sesMetadata, error) {
|
||||
m := sesMetadata{}
|
||||
contribMetadata.DecodeMetadata(meta.Properties, &m)
|
||||
kitmd.DecodeMetadata(meta.Properties, &m)
|
||||
|
||||
return &m, nil
|
||||
}
|
||||
|
@ -93,13 +103,13 @@ func (a *AWSSES) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
metadata := a.metadata.mergeWithRequestMetadata(req)
|
||||
|
||||
if metadata.EmailFrom == "" {
|
||||
return nil, fmt.Errorf("SES binding error: emailFrom property not supplied in configuration- or request-metadata")
|
||||
return nil, errors.New("SES binding error: emailFrom property not supplied in configuration- or request-metadata")
|
||||
}
|
||||
if metadata.EmailTo == "" {
|
||||
return nil, fmt.Errorf("SES binding error: emailTo property not supplied in configuration- or request-metadata")
|
||||
return nil, errors.New("SES binding error: emailTo property not supplied in configuration- or request-metadata")
|
||||
}
|
||||
if metadata.Subject == "" {
|
||||
return nil, fmt.Errorf("SES binding error: subject property not supplied in configuration- or request-metadata")
|
||||
return nil, errors.New("SES binding error: subject property not supplied in configuration- or request-metadata")
|
||||
}
|
||||
|
||||
body, err := strconv.Unquote(string(req.Data))
|
||||
|
@ -141,7 +151,7 @@ func (a *AWSSES) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
}
|
||||
|
||||
// Attempt to send the email.
|
||||
result, err := a.svc.SendEmail(input)
|
||||
result, err := a.authProvider.Ses().Ses.SendEmail(input)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("SES binding error. Sending email failed: %w", err)
|
||||
}
|
||||
|
@ -154,26 +164,20 @@ func (a *AWSSES) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
// Helper to merge config and request metadata.
|
||||
func (metadata sesMetadata) mergeWithRequestMetadata(req *bindings.InvokeRequest) sesMetadata {
|
||||
merged := metadata
|
||||
contribMetadata.DecodeMetadata(req.Metadata, &merged)
|
||||
kitmd.DecodeMetadata(req.Metadata, &merged)
|
||||
return merged
|
||||
}
|
||||
|
||||
func (a *AWSSES) getClient(metadata *sesMetadata) (*ses.SES, error) {
|
||||
sess, err := awsAuth.GetClient(metadata.AccessKey, metadata.SecretKey, metadata.SessionToken, metadata.Region, "")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("SES binding error: error creating AWS session %w", err)
|
||||
}
|
||||
|
||||
// Create an SES instance
|
||||
svc := ses.New(sess)
|
||||
|
||||
return svc, nil
|
||||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AWSSES) GetComponentMetadata() map[string]string {
|
||||
func (a *AWSSES) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := sesMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (a *AWSSES) Close() error {
|
||||
if a.authProvider != nil {
|
||||
return a.authProvider.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/kit/logger"
|
||||
|
@ -40,7 +41,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
}
|
||||
r := AWSSES{logger: logger}
|
||||
smtpMeta, err := r.parseMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "myRegionForSES", smtpMeta.Region)
|
||||
assert.Equal(t, "myAccessKeyForSES", smtpMeta.AccessKey)
|
||||
assert.Equal(t, "mySecretKeyForSES", smtpMeta.SecretKey)
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
# yaml-language-server: $schema=../../../component-metadata-schema.json
|
||||
schemaVersion: v1
|
||||
type: bindings
|
||||
name: aws.sns
|
||||
version: v1
|
||||
status: alpha
|
||||
title: "AWS SNS"
|
||||
urls:
|
||||
- title: Reference
|
||||
url: https://docs.dapr.io/reference/components-reference/supported-bindings/sns/
|
||||
binding:
|
||||
output: true
|
||||
operations:
|
||||
- name: create
|
||||
description: "Create a new subscription"
|
||||
capabilities: []
|
||||
builtinAuthenticationProfiles:
|
||||
- name: "aws"
|
||||
metadata:
|
||||
- name: topicArn
|
||||
required: true
|
||||
description: |
|
||||
The SNS topic name.
|
||||
example: '"arn:::topicarn"'
|
||||
type: string
|
||||
- name: endpoint
|
||||
required: false
|
||||
description: |
|
||||
AWS endpoint for the component to use, to connect to SNS-compatible services or emulators.
|
||||
Do not use this when running against production AWS.
|
||||
example: '"http://localhost:4566"'
|
||||
type: string
|
|
@ -22,26 +22,30 @@ import (
|
|||
"github.com/aws/aws-sdk-go/service/sns"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
awsAuth "github.com/dapr/components-contrib/internal/authentication/aws"
|
||||
awsAuth "github.com/dapr/components-contrib/common/authentication/aws"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
// AWSSNS is an AWS SNS binding.
|
||||
type AWSSNS struct {
|
||||
client *sns.SNS
|
||||
topicARN string
|
||||
authProvider awsAuth.Provider
|
||||
topicARN string
|
||||
|
||||
logger logger.Logger
|
||||
}
|
||||
|
||||
type snsMetadata struct {
|
||||
TopicArn string `json:"topicArn"`
|
||||
Region string `json:"region"`
|
||||
Endpoint string `json:"endpoint"`
|
||||
AccessKey string `json:"accessKey"`
|
||||
SecretKey string `json:"secretKey"`
|
||||
SessionToken string `json:"sessionToken"`
|
||||
// Ignored by metadata parser because included in built-in authentication profile
|
||||
AccessKey string `json:"accessKey" mapstructure:"accessKey" mdignore:"true"`
|
||||
SecretKey string `json:"secretKey" mapstructure:"secretKey" mdignore:"true"`
|
||||
SessionToken string `json:"sessionToken" mapstructure:"sessionToken" mdignore:"true"`
|
||||
|
||||
TopicArn string `json:"topicArn"`
|
||||
// TODO: in Dapr 1.17 rm the alias on region as we remove the aws prefix on these fields
|
||||
Region string `json:"region" mapstructure:"region" mapstructurealiases:"awsRegion" mdignore:"true"`
|
||||
Endpoint string `json:"endpoint"`
|
||||
}
|
||||
|
||||
type dataPayload struct {
|
||||
|
@ -55,16 +59,27 @@ func NewAWSSNS(logger logger.Logger) bindings.OutputBinding {
|
|||
}
|
||||
|
||||
// Init does metadata parsing.
|
||||
func (a *AWSSNS) Init(_ context.Context, metadata bindings.Metadata) error {
|
||||
func (a *AWSSNS) Init(ctx context.Context, metadata bindings.Metadata) error {
|
||||
m, err := a.parseMetadata(metadata)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
client, err := a.getClient(m)
|
||||
|
||||
opts := awsAuth.Options{
|
||||
Logger: a.logger,
|
||||
Properties: metadata.Properties,
|
||||
Region: m.Region,
|
||||
Endpoint: m.Endpoint,
|
||||
AccessKey: m.AccessKey,
|
||||
SecretKey: m.SecretKey,
|
||||
SessionToken: m.SessionToken,
|
||||
}
|
||||
// extra configs needed per component type
|
||||
provider, err := awsAuth.NewProvider(ctx, opts, awsAuth.GetConfig(opts))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.client = client
|
||||
a.authProvider = provider
|
||||
a.topicARN = m.TopicArn
|
||||
|
||||
return nil
|
||||
|
@ -72,7 +87,7 @@ func (a *AWSSNS) Init(_ context.Context, metadata bindings.Metadata) error {
|
|||
|
||||
func (a *AWSSNS) parseMetadata(meta bindings.Metadata) (*snsMetadata, error) {
|
||||
m := snsMetadata{}
|
||||
err := metadata.DecodeMetadata(meta.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(meta.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -80,16 +95,6 @@ func (a *AWSSNS) parseMetadata(meta bindings.Metadata) (*snsMetadata, error) {
|
|||
return &m, nil
|
||||
}
|
||||
|
||||
func (a *AWSSNS) getClient(metadata *snsMetadata) (*sns.SNS, error) {
|
||||
sess, err := awsAuth.GetClient(metadata.AccessKey, metadata.SecretKey, metadata.SessionToken, metadata.Region, metadata.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c := sns.New(sess)
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (a *AWSSNS) Operations() []bindings.OperationKind {
|
||||
return []bindings.OperationKind{bindings.CreateOperation}
|
||||
}
|
||||
|
@ -104,7 +109,7 @@ func (a *AWSSNS) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
msg := fmt.Sprintf("%v", payload.Message)
|
||||
subject := fmt.Sprintf("%v", payload.Subject)
|
||||
|
||||
_, err = a.client.PublishWithContext(ctx, &sns.PublishInput{
|
||||
_, err = a.authProvider.Sns().Sns.PublishWithContext(ctx, &sns.PublishInput{
|
||||
Message: &msg,
|
||||
Subject: &subject,
|
||||
TopicArn: &a.topicARN,
|
||||
|
@ -117,9 +122,15 @@ func (a *AWSSNS) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bind
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AWSSNS) GetComponentMetadata() map[string]string {
|
||||
func (a *AWSSNS) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := snsMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (a *AWSSNS) Close() error {
|
||||
if a.authProvider != nil {
|
||||
return a.authProvider.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
)
|
||||
|
@ -28,7 +29,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
}
|
||||
s := AWSSNS{}
|
||||
snsM, err := s.parseMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", snsM.TopicArn)
|
||||
assert.Equal(t, "a", snsM.Region)
|
||||
assert.Equal(t, "a", snsM.AccessKey)
|
||||
|
|
|
@ -25,20 +25,20 @@ import (
|
|||
"github.com/aws/aws-sdk-go/service/sqs"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
awsAuth "github.com/dapr/components-contrib/internal/authentication/aws"
|
||||
awsAuth "github.com/dapr/components-contrib/common/authentication/aws"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
// AWSSQS allows receiving and sending data to/from AWS SQS.
|
||||
type AWSSQS struct {
|
||||
Client *sqs.SQS
|
||||
QueueURL *string
|
||||
|
||||
logger logger.Logger
|
||||
wg sync.WaitGroup
|
||||
closeCh chan struct{}
|
||||
closed atomic.Bool
|
||||
authProvider awsAuth.Provider
|
||||
queueName string
|
||||
logger logger.Logger
|
||||
wg sync.WaitGroup
|
||||
closeCh chan struct{}
|
||||
closed atomic.Bool
|
||||
}
|
||||
|
||||
type sqsMetadata struct {
|
||||
|
@ -65,21 +65,22 @@ func (a *AWSSQS) Init(ctx context.Context, metadata bindings.Metadata) error {
|
|||
return err
|
||||
}
|
||||
|
||||
client, err := a.getClient(m)
|
||||
opts := awsAuth.Options{
|
||||
Logger: a.logger,
|
||||
Properties: metadata.Properties,
|
||||
Region: m.Region,
|
||||
Endpoint: m.Endpoint,
|
||||
AccessKey: m.AccessKey,
|
||||
SecretKey: m.SecretKey,
|
||||
SessionToken: m.SessionToken,
|
||||
}
|
||||
// extra configs needed per component type
|
||||
provider, err := awsAuth.NewProvider(ctx, opts, awsAuth.GetConfig(opts))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
queueName := m.QueueName
|
||||
resultURL, err := client.GetQueueUrlWithContext(ctx, &sqs.GetQueueUrlInput{
|
||||
QueueName: aws.String(queueName),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
a.QueueURL = resultURL.QueueUrl
|
||||
a.Client = client
|
||||
a.authProvider = provider
|
||||
a.queueName = m.QueueName
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -90,9 +91,14 @@ func (a *AWSSQS) Operations() []bindings.OperationKind {
|
|||
|
||||
func (a *AWSSQS) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
|
||||
msgBody := string(req.Data)
|
||||
_, err := a.Client.SendMessageWithContext(ctx, &sqs.SendMessageInput{
|
||||
url, err := a.authProvider.Sqs().QueueURL(ctx, a.queueName)
|
||||
if err != nil {
|
||||
a.logger.Errorf("failed to get queue url: %v", err)
|
||||
}
|
||||
|
||||
_, err = a.authProvider.Sqs().Sqs.SendMessageWithContext(ctx, &sqs.SendMessageInput{
|
||||
MessageBody: &msgBody,
|
||||
QueueUrl: a.QueueURL,
|
||||
QueueUrl: url,
|
||||
})
|
||||
|
||||
return nil, err
|
||||
|
@ -112,9 +118,13 @@ func (a *AWSSQS) Read(ctx context.Context, handler bindings.Handler) error {
|
|||
if ctx.Err() != nil || a.closed.Load() {
|
||||
return
|
||||
}
|
||||
url, err := a.authProvider.Sqs().QueueURL(ctx, a.queueName)
|
||||
if err != nil {
|
||||
a.logger.Errorf("failed to get queue url: %v", err)
|
||||
}
|
||||
|
||||
result, err := a.Client.ReceiveMessageWithContext(ctx, &sqs.ReceiveMessageInput{
|
||||
QueueUrl: a.QueueURL,
|
||||
result, err := a.authProvider.Sqs().Sqs.ReceiveMessageWithContext(ctx, &sqs.ReceiveMessageInput{
|
||||
QueueUrl: url,
|
||||
AttributeNames: aws.StringSlice([]string{
|
||||
"SentTimestamp",
|
||||
}),
|
||||
|
@ -125,7 +135,7 @@ func (a *AWSSQS) Read(ctx context.Context, handler bindings.Handler) error {
|
|||
WaitTimeSeconds: aws.Int64(20),
|
||||
})
|
||||
if err != nil {
|
||||
a.logger.Errorf("Unable to receive message from queue %q, %v.", *a.QueueURL, err)
|
||||
a.logger.Errorf("Unable to receive message from queue %q, %v.", url, err)
|
||||
}
|
||||
|
||||
if len(result.Messages) > 0 {
|
||||
|
@ -139,8 +149,8 @@ func (a *AWSSQS) Read(ctx context.Context, handler bindings.Handler) error {
|
|||
msgHandle := m.ReceiptHandle
|
||||
|
||||
// Use a background context here because ctx may be canceled already
|
||||
a.Client.DeleteMessageWithContext(context.Background(), &sqs.DeleteMessageInput{
|
||||
QueueUrl: a.QueueURL,
|
||||
a.authProvider.Sqs().Sqs.DeleteMessageWithContext(context.Background(), &sqs.DeleteMessageInput{
|
||||
QueueUrl: url,
|
||||
ReceiptHandle: msgHandle,
|
||||
})
|
||||
}
|
||||
|
@ -163,12 +173,15 @@ func (a *AWSSQS) Close() error {
|
|||
close(a.closeCh)
|
||||
}
|
||||
a.wg.Wait()
|
||||
if a.authProvider != nil {
|
||||
return a.authProvider.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AWSSQS) parseSQSMetadata(meta bindings.Metadata) (*sqsMetadata, error) {
|
||||
m := sqsMetadata{}
|
||||
err := metadata.DecodeMetadata(meta.Properties, &m)
|
||||
err := kitmd.DecodeMetadata(meta.Properties, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -176,20 +189,9 @@ func (a *AWSSQS) parseSQSMetadata(meta bindings.Metadata) (*sqsMetadata, error)
|
|||
return &m, nil
|
||||
}
|
||||
|
||||
func (a *AWSSQS) getClient(metadata *sqsMetadata) (*sqs.SQS, error) {
|
||||
sess, err := awsAuth.GetClient(metadata.AccessKey, metadata.SecretKey, metadata.SessionToken, metadata.Region, metadata.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c := sqs.New(sess)
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AWSSQS) GetComponentMetadata() map[string]string {
|
||||
func (a *AWSSQS) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := sqsMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
)
|
||||
|
@ -28,7 +29,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
}
|
||||
s := AWSSQS{}
|
||||
sqsM, err := s.parseSQSMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", sqsM.QueueName)
|
||||
assert.Equal(t, "a", sqsM.Region)
|
||||
assert.Equal(t, "a", sqsM.AccessKey)
|
||||
|
|
|
@ -25,12 +25,13 @@ import (
|
|||
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/blob"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/bloberror"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/blockblob"
|
||||
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/container"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
storageinternal "github.com/dapr/components-contrib/internal/component/azure/blobstorage"
|
||||
storagecommon "github.com/dapr/components-contrib/common/component/azure/blobstorage"
|
||||
contribMetadata "github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
"github.com/dapr/kit/ptr"
|
||||
|
@ -46,6 +47,8 @@ const (
|
|||
metadataKeyMarker = "marker"
|
||||
// The number of blobs that will be returned in a list operation.
|
||||
metadataKeyNumber = "number"
|
||||
// Defines the response metadata key for the number of pages traversed in a list response.
|
||||
metadataKeyPagesTraversed = "pagesTraversed"
|
||||
// Defines if the user defined metadata should be returned in the get operation.
|
||||
metadataKeyIncludeMetadata = "includeMetadata"
|
||||
// Defines the delete snapshots option for the delete operation.
|
||||
|
@ -62,7 +65,7 @@ var ErrMissingBlobName = errors.New("blobName is a required attribute")
|
|||
|
||||
// AzureBlobStorage allows saving blobs to an Azure Blob Storage account.
|
||||
type AzureBlobStorage struct {
|
||||
metadata *storageinternal.BlobStorageMetadata
|
||||
metadata *storagecommon.BlobStorageMetadata
|
||||
containerClient *container.Client
|
||||
|
||||
logger logger.Logger
|
||||
|
@ -96,7 +99,7 @@ func NewAzureBlobStorage(logger logger.Logger) bindings.OutputBinding {
|
|||
// Init performs metadata parsing.
|
||||
func (a *AzureBlobStorage) Init(ctx context.Context, metadata bindings.Metadata) error {
|
||||
var err error
|
||||
a.containerClient, a.metadata, err = storageinternal.CreateContainerStorageClient(ctx, a.logger, metadata.Properties)
|
||||
a.containerClient, a.metadata, err = storagecommon.CreateContainerStorageClient(ctx, a.logger, metadata.Properties)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -125,7 +128,7 @@ func (a *AzureBlobStorage) create(ctx context.Context, req *bindings.InvokeReque
|
|||
blobName = id.String()
|
||||
}
|
||||
|
||||
blobHTTPHeaders, err := storageinternal.CreateBlobHTTPHeadersFromRequest(req.Metadata, nil, a.logger)
|
||||
blobHTTPHeaders, err := storagecommon.CreateBlobHTTPHeadersFromRequest(req.Metadata, nil, a.logger)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -144,14 +147,13 @@ func (a *AzureBlobStorage) create(ctx context.Context, req *bindings.InvokeReque
|
|||
}
|
||||
|
||||
uploadOptions := azblob.UploadBufferOptions{
|
||||
Metadata: storageinternal.SanitizeMetadata(a.logger, req.Metadata),
|
||||
Metadata: storagecommon.SanitizeMetadata(a.logger, req.Metadata),
|
||||
HTTPHeaders: &blobHTTPHeaders,
|
||||
TransactionalContentMD5: blobHTTPHeaders.BlobContentMD5,
|
||||
}
|
||||
|
||||
blockBlobClient := a.containerClient.NewBlockBlobClient(blobName)
|
||||
_, err = blockBlobClient.UploadBuffer(ctx, req.Data, &uploadOptions)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error uploading az blob: %w", err)
|
||||
}
|
||||
|
@ -188,6 +190,9 @@ func (a *AzureBlobStorage) get(ctx context.Context, req *bindings.InvokeRequest)
|
|||
|
||||
blobDownloadResponse, err := blockBlobClient.DownloadStream(ctx, &downloadOptions)
|
||||
if err != nil {
|
||||
if bloberror.HasCode(err, bloberror.BlobNotFound) {
|
||||
return nil, errors.New("blob not found")
|
||||
}
|
||||
return nil, fmt.Errorf("error downloading az blob: %w", err)
|
||||
}
|
||||
reader := blobDownloadResponse.Body
|
||||
|
@ -254,6 +259,10 @@ func (a *AzureBlobStorage) delete(ctx context.Context, req *bindings.InvokeReque
|
|||
blockBlobClient = a.containerClient.NewBlockBlobClient(val)
|
||||
_, err := blockBlobClient.Delete(ctx, &deleteOptions)
|
||||
|
||||
if bloberror.HasCode(err, bloberror.BlobNotFound) {
|
||||
return nil, errors.New("blob not found")
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -295,30 +304,34 @@ func (a *AzureBlobStorage) list(ctx context.Context, req *bindings.InvokeRequest
|
|||
}
|
||||
options.Marker = &initialMarker
|
||||
|
||||
metadata := map[string]string{}
|
||||
metadata := make(map[string]string, 3)
|
||||
blobs := []*container.BlobItem{}
|
||||
pager := a.containerClient.NewListBlobsFlatPager(&options)
|
||||
|
||||
metadata[metadataKeyMarker] = ""
|
||||
numBlobs := 0
|
||||
pagesTraversed := 0
|
||||
for pager.More() {
|
||||
resp, err := pager.NextPage(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error listing blobs: %w", err)
|
||||
}
|
||||
pagesTraversed++
|
||||
|
||||
blobs = append(blobs, resp.Segment.BlobItems...)
|
||||
numBlobs := len(blobs)
|
||||
metadata[metadataKeyNumber] = strconv.FormatInt(int64(numBlobs), 10)
|
||||
metadata[metadataKeyMarker] = ""
|
||||
numBlobs += len(resp.Segment.BlobItems)
|
||||
if resp.Marker != nil {
|
||||
metadata[metadataKeyMarker] = *resp.Marker
|
||||
} else {
|
||||
metadata[metadataKeyMarker] = ""
|
||||
}
|
||||
|
||||
if *options.MaxResults-maxResults > 0 {
|
||||
*options.MaxResults -= maxResults
|
||||
} else {
|
||||
if numBlobs >= int(*options.MaxResults) {
|
||||
break
|
||||
}
|
||||
}
|
||||
metadata[metadataKeyNumber] = strconv.FormatInt(int64(numBlobs), 10)
|
||||
metadata[metadataKeyPagesTraversed] = strconv.FormatInt(int64(pagesTraversed), 10)
|
||||
|
||||
jsonResponse, err := json.Marshal(blobs)
|
||||
if err != nil {
|
||||
|
@ -358,9 +371,12 @@ func (a *AzureBlobStorage) isValidDeleteSnapshotsOptionType(accessType azblob.De
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AzureBlobStorage) GetComponentMetadata() map[string]string {
|
||||
metadataStruct := storageinternal.BlobStorageMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
func (a *AzureBlobStorage) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := storagecommon.BlobStorageMetadata{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (a *AzureBlobStorage) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -14,10 +14,9 @@ limitations under the License.
|
|||
package blobstorage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/kit/logger"
|
||||
|
@ -28,10 +27,9 @@ func TestGetOption(t *testing.T) {
|
|||
|
||||
t.Run("return error if blobName is missing", func(t *testing.T) {
|
||||
r := bindings.InvokeRequest{}
|
||||
_, err := blobStorage.get(context.Background(), &r)
|
||||
if assert.Error(t, err) {
|
||||
assert.Equal(t, ErrMissingBlobName, err)
|
||||
}
|
||||
_, err := blobStorage.get(t.Context(), &r)
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, ErrMissingBlobName)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -40,10 +38,9 @@ func TestDeleteOption(t *testing.T) {
|
|||
|
||||
t.Run("return error if blobName is missing", func(t *testing.T) {
|
||||
r := bindings.InvokeRequest{}
|
||||
_, err := blobStorage.delete(context.Background(), &r)
|
||||
if assert.Error(t, err) {
|
||||
assert.Equal(t, ErrMissingBlobName, err)
|
||||
}
|
||||
_, err := blobStorage.delete(t.Context(), &r)
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, ErrMissingBlobName)
|
||||
})
|
||||
|
||||
t.Run("return error for invalid deleteSnapshots", func(t *testing.T) {
|
||||
|
@ -52,7 +49,7 @@ func TestDeleteOption(t *testing.T) {
|
|||
"blobName": "foo",
|
||||
"deleteSnapshots": "invalid",
|
||||
}
|
||||
_, err := blobStorage.delete(context.Background(), &r)
|
||||
assert.Error(t, err)
|
||||
_, err := blobStorage.delete(t.Context(), &r)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -12,13 +12,13 @@ binding:
|
|||
output: true
|
||||
operations:
|
||||
- name: create
|
||||
description: "Create blob."
|
||||
description: "Create blob"
|
||||
- name: get
|
||||
description: "Get blob."
|
||||
description: "Get blob"
|
||||
- name: delete
|
||||
description: "Delete blob."
|
||||
description: "Delete blob"
|
||||
- name: list
|
||||
description: "List blob."
|
||||
description: "List blob"
|
||||
capabilities: []
|
||||
builtinAuthenticationProfiles:
|
||||
- name: "azuread"
|
||||
|
@ -87,4 +87,9 @@ metadata:
|
|||
example: '3'
|
||||
description: |
|
||||
Specifies the maximum number of HTTP requests that will be made to retry blob operations.
|
||||
A value of zero means that no additional attempts will be made after a failure.
|
||||
A value of zero means that no additional attempts will be made after a failure.
|
||||
- name: disableEntityManagement
|
||||
description: "Disable entity management. Skips the attempt to create the specified storage container. This is useful when operating with minimal Azure AD permissions."
|
||||
example: "true"
|
||||
default: '"false"'
|
||||
type: bool
|
|
@ -16,6 +16,7 @@ package cosmosdb
|
|||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
@ -25,9 +26,10 @@ import (
|
|||
"github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/components-contrib/internal/authentication/azure"
|
||||
"github.com/dapr/components-contrib/common/authentication/azure"
|
||||
contribMetadata "github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
// CosmosDB allows performing state operations on collections.
|
||||
|
@ -113,7 +115,7 @@ func (c *CosmosDB) Init(ctx context.Context, metadata bindings.Metadata) error {
|
|||
|
||||
func (c *CosmosDB) parseMetadata(metadata bindings.Metadata) (*cosmosDBCredentials, error) {
|
||||
creds := cosmosDBCredentials{}
|
||||
err := contribMetadata.DecodeMetadata(metadata.Properties, &creds)
|
||||
err := kitmd.DecodeMetadata(metadata.Properties, &creds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -157,7 +159,7 @@ func (c *CosmosDB) getPartitionKeyValue(key string, obj interface{}) (string, er
|
|||
}
|
||||
val, ok := valI.(string)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("partition key is not a string")
|
||||
return "", errors.New("partition key is not a string")
|
||||
}
|
||||
|
||||
if val == "" {
|
||||
|
@ -171,11 +173,9 @@ func (c *CosmosDB) lookup(m map[string]interface{}, ks []string) (val interface{
|
|||
var ok bool
|
||||
|
||||
if len(ks) == 0 {
|
||||
return nil, fmt.Errorf("needs at least one key")
|
||||
return nil, errors.New("needs at least one key")
|
||||
}
|
||||
|
||||
c.logger.Infof("%s, %s", ks[0], m[ks[0]])
|
||||
|
||||
if val, ok = m[ks[0]]; !ok {
|
||||
return nil, fmt.Errorf("key not found %v", ks[0])
|
||||
}
|
||||
|
@ -194,9 +194,12 @@ func (c *CosmosDB) lookup(m map[string]interface{}, ks []string) (val interface{
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (c *CosmosDB) GetComponentMetadata() map[string]string {
|
||||
func (c *CosmosDB) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := cosmosDBCredentials{}
|
||||
metadataInfo := map[string]string{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (c *CosmosDB) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/kit/logger"
|
||||
|
@ -28,7 +29,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
m.Properties = map[string]string{"Collection": "a", "Database": "a", "MasterKey": "a", "PartitionKey": "a", "URL": "a"}
|
||||
cosmosDB := CosmosDB{logger: logger.NewLogger("test")}
|
||||
meta, err := cosmosDB.parseMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", meta.Collection)
|
||||
assert.Equal(t, "a", meta.Database)
|
||||
assert.Equal(t, "a", meta.MasterKey)
|
||||
|
@ -46,32 +47,32 @@ func TestPartitionKeyValue(t *testing.T) {
|
|||
|
||||
// Valid single partition key
|
||||
val, err := cosmosDB.getPartitionKeyValue("name", obj)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "name", val)
|
||||
|
||||
// Not existing key
|
||||
_, err = cosmosDB.getPartitionKeyValue("notexists", obj)
|
||||
assert.NotNil(t, err)
|
||||
require.Error(t, err)
|
||||
|
||||
// // Empty value for the key
|
||||
_, err = cosmosDB.getPartitionKeyValue("empty", obj)
|
||||
assert.NotNil(t, err)
|
||||
require.Error(t, err)
|
||||
|
||||
// Valid nested partition key
|
||||
val, err = cosmosDB.getPartitionKeyValue("address.zip", obj)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "zipcode", val)
|
||||
|
||||
// Valid nested three level partition key
|
||||
val, err = cosmosDB.getPartitionKeyValue("address.planet.name", obj)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "earth", val)
|
||||
|
||||
// Invalid nested partition key
|
||||
_, err = cosmosDB.getPartitionKeyValue("address.notexists", obj)
|
||||
assert.NotNil(t, err)
|
||||
require.Error(t, err)
|
||||
|
||||
// Empty key is passed
|
||||
_, err = cosmosDB.getPartitionKeyValue("", obj)
|
||||
assert.NotNil(t, err)
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ binding:
|
|||
output: true
|
||||
operations:
|
||||
- name: create
|
||||
description: "Create an item."
|
||||
description: "Create an item"
|
||||
capabilities: []
|
||||
builtinAuthenticationProfiles:
|
||||
- name: "azuread"
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -45,7 +46,7 @@ const (
|
|||
// CosmosDBGremlinAPI allows performing state operations on collections.
|
||||
type CosmosDBGremlinAPI struct {
|
||||
metadata *cosmosDBGremlinAPICredentials
|
||||
client *gremcos.Cosmos
|
||||
client gremcos.Cosmos
|
||||
logger logger.Logger
|
||||
}
|
||||
|
||||
|
@ -76,14 +77,14 @@ func (c *CosmosDBGremlinAPI) Init(_ context.Context, metadata bindings.Metadata)
|
|||
return errors.New("CosmosDBGremlinAPI Error: failed to create the Cosmos Graph DB connector")
|
||||
}
|
||||
|
||||
c.client = &client
|
||||
c.client = client
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *CosmosDBGremlinAPI) parseMetadata(meta bindings.Metadata) (*cosmosDBGremlinAPICredentials, error) {
|
||||
creds := cosmosDBGremlinAPICredentials{}
|
||||
err := metadata.DecodeMetadata(meta.Properties, &creds)
|
||||
err := kitmd.DecodeMetadata(meta.Properties, &creds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -115,7 +116,7 @@ func (c *CosmosDBGremlinAPI) Invoke(_ context.Context, req *bindings.InvokeReque
|
|||
respStartTimeKey: startTime.Format(time.RFC3339Nano),
|
||||
},
|
||||
}
|
||||
d, err := (*c.client).Execute(gq)
|
||||
d, err := c.client.Execute(gq)
|
||||
if err != nil {
|
||||
return nil, errors.New("CosmosDBGremlinAPI Error:error excuting gremlin")
|
||||
}
|
||||
|
@ -130,9 +131,15 @@ func (c *CosmosDBGremlinAPI) Invoke(_ context.Context, req *bindings.InvokeReque
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (c *CosmosDBGremlinAPI) GetComponentMetadata() map[string]string {
|
||||
func (c *CosmosDBGremlinAPI) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := cosmosDBGremlinAPICredentials{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
||||
func (c *CosmosDBGremlinAPI) Close() error {
|
||||
if c.client != nil {
|
||||
return c.client.Stop()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
"github.com/dapr/kit/logger"
|
||||
|
@ -27,7 +28,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
m.Properties = map[string]string{"Url": "a", "masterKey": "a", "username": "a"}
|
||||
cosmosdbgremlinapi := CosmosDBGremlinAPI{logger: logger.NewLogger("test")}
|
||||
im, err := cosmosdbgremlinapi.parseMetadata(m)
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", im.URL)
|
||||
assert.Equal(t, "a", im.MasterKey)
|
||||
assert.Equal(t, "a", im.Username)
|
||||
|
|
|
@ -12,7 +12,7 @@ binding:
|
|||
output: true
|
||||
operations:
|
||||
- name: query
|
||||
description: "Perform a query."
|
||||
description: "Perform a query"
|
||||
capabilities: []
|
||||
authenticationProfiles:
|
||||
- title: "Master key"
|
||||
|
|
|
@ -37,10 +37,11 @@ import (
|
|||
"github.com/valyala/fasthttp"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
azauth "github.com/dapr/components-contrib/common/authentication/azure"
|
||||
"github.com/dapr/components-contrib/contenttype"
|
||||
azauth "github.com/dapr/components-contrib/internal/authentication/azure"
|
||||
"github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
kitmd "github.com/dapr/kit/metadata"
|
||||
"github.com/dapr/kit/ptr"
|
||||
)
|
||||
|
||||
|
@ -355,7 +356,7 @@ func (a *AzureEventGrid) ensureOutputBindingMetadata() error {
|
|||
|
||||
func (a *AzureEventGrid) parseMetadata(md bindings.Metadata) (*azureEventGridMetadata, error) {
|
||||
var eventGridMetadata azureEventGridMetadata
|
||||
err := metadata.DecodeMetadata(md.Properties, &eventGridMetadata)
|
||||
err := kitmd.DecodeMetadata(md.Properties, &eventGridMetadata)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error decoding metadata: %w", err)
|
||||
}
|
||||
|
@ -535,9 +536,8 @@ func (a *AzureEventGrid) subscriptionNeedsUpdating(res armeventgrid.EventSubscri
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AzureEventGrid) GetComponentMetadata() map[string]string {
|
||||
func (a *AzureEventGrid) GetComponentMetadata() (metadataInfo metadata.MetadataMap) {
|
||||
metadataStruct := azureEventGridMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
metadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, metadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ func TestParseMetadata(t *testing.T) {
|
|||
err = eh.ensureInputBindingMetadata()
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Nil(t, err)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", meta.azureTenantID)
|
||||
assert.Equal(t, "b", meta.azureSubscriptionID)
|
||||
assert.Equal(t, "c", meta.azureClientID)
|
||||
|
|
|
@ -13,7 +13,7 @@ binding:
|
|||
output: true
|
||||
operations:
|
||||
- name: create
|
||||
description: "Create an event subscription."
|
||||
description: "Create an event subscription"
|
||||
capabilities: []
|
||||
builtinAuthenticationProfiles:
|
||||
- name: "azuread"
|
||||
|
@ -27,7 +27,7 @@ metadata:
|
|||
output: false
|
||||
description: |
|
||||
The HTTPS endpoint of the webhook Event Grid sends events (formatted as
|
||||
Cloud Events) to. If you’re not re-writing URLs on ingress, it should be
|
||||
Cloud Events) to. If you're not re-writing URLs on ingress, it should be
|
||||
in the form of: `"https://[YOUR HOSTNAME]/<path>"` If testing on your
|
||||
local machine, you can use something like `ngrok` to create a public
|
||||
endpoint.
|
||||
|
|
|
@ -20,7 +20,7 @@ import (
|
|||
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs"
|
||||
|
||||
"github.com/dapr/components-contrib/bindings"
|
||||
impl "github.com/dapr/components-contrib/internal/component/azure/eventhubs"
|
||||
impl "github.com/dapr/components-contrib/common/component/azure/eventhubs"
|
||||
contribMetadata "github.com/dapr/components-contrib/metadata"
|
||||
"github.com/dapr/kit/logger"
|
||||
"github.com/dapr/kit/ptr"
|
||||
|
@ -81,13 +81,16 @@ func (a *AzureEventHubs) Invoke(ctx context.Context, req *bindings.InvokeRequest
|
|||
func (a *AzureEventHubs) Read(ctx context.Context, handler bindings.Handler) error {
|
||||
// Start the subscription
|
||||
// This is non-blocking
|
||||
return a.AzureEventHubs.Subscribe(ctx, a.AzureEventHubs.EventHubName(), false, func(ctx context.Context, data []byte, metadata map[string]string) error {
|
||||
res := bindings.ReadResponse{
|
||||
Data: data,
|
||||
Metadata: metadata,
|
||||
}
|
||||
_, hErr := handler(ctx, &res)
|
||||
return hErr
|
||||
topic := a.AzureEventHubs.EventHubName()
|
||||
bindingsHandler := a.AzureEventHubs.GetBindingsHandlerFunc(topic, false, handler)
|
||||
// Setting `maxBulkSubCount` to 1 as bindings are not supported for bulk subscriptions
|
||||
// Setting `CheckPointFrequencyPerPartition` to default value of 1
|
||||
return a.AzureEventHubs.Subscribe(ctx, impl.SubscribeConfig{
|
||||
Topic: topic,
|
||||
MaxBulkSubCount: 1,
|
||||
MaxBulkSubAwaitDurationMs: impl.DefaultMaxBulkSubAwaitDurationMs,
|
||||
CheckPointFrequencyPerPartition: impl.DefaultCheckpointFrequencyPerPartition,
|
||||
Handler: bindingsHandler,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -96,9 +99,8 @@ func (a *AzureEventHubs) Close() error {
|
|||
}
|
||||
|
||||
// GetComponentMetadata returns the metadata of the component.
|
||||
func (a *AzureEventHubs) GetComponentMetadata() map[string]string {
|
||||
func (a *AzureEventHubs) GetComponentMetadata() (metadataInfo contribMetadata.MetadataMap) {
|
||||
metadataStruct := impl.AzureEventHubsMetadata{}
|
||||
metadataInfo := map[string]string{}
|
||||
contribMetadata.GetMetadataInfoFromStructType(reflect.TypeOf(metadataStruct), &metadataInfo, contribMetadata.BindingType)
|
||||
return metadataInfo
|
||||
return
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue