AWS S3 Bindings Certification Tests (#2536)

Signed-off-by: Roberto J Rojas <robertojrojas@gmail.com>
Signed-off-by: Bernd Verst <github@bernd.dev>
Co-authored-by: Bernd Verst <github@bernd.dev>
Co-authored-by: Artur Souza <artursouza.ms@outlook.com>
This commit is contained in:
Roberto Rojas 2023-04-27 18:17:01 -04:00 committed by GitHub
parent e099a548fb
commit 33934396a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 986 additions and 18 deletions

View File

@ -0,0 +1,15 @@
version: "3.8"
services:
localstack:
container_name: "conformance-aws-dynamodb"
image: localstack/localstack:1.4.0
ports:
- "127.0.0.1:4566:4566"
environment:
- DEBUG=1
- DOCKER_HOST=unix:///var/run/docker.sock
volumes:
- "${PWD}/.github/scripts/docker-compose-init/init-conformance-state-aws-dynamodb.sh:/etc/localstack/init/ready.d/init-aws.sh" # ready hook
- "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack"
- "/var/run/docker.sock:/var/run/docker.sock"

View File

@ -0,0 +1,15 @@
version: "3.8"
services:
localstack:
container_name: "conformance-aws-s3"
image: localstack/localstack:1.4.0
ports:
- "127.0.0.1:4566:4566"
environment:
- DEBUG=1
- DOCKER_HOST=unix:///var/run/docker.sock
volumes:
- "${PWD}/.github/scripts/docker-compose-init/init-conformance-bindings-aws-s3.sh:/etc/localstack/init/ready.d/init-aws.sh" # ready hook
- "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack"
- "/var/run/docker.sock:/var/run/docker.sock"

View File

@ -1,10 +1,10 @@
version: '2'
version: "3.8"
services:
snssqs:
image: localstack/localstack:0.13.3
container_name: snssqs
environment:
SERVICES: "sns,sqs,sts"
DEBUG: 1
localstack:
container_name: "conformance-aws-snssqs"
image: localstack/localstack:1.4.0
ports:
- 4566:4566
- "127.0.0.1:4566:4566"
environment:
- DEBUG=1

View File

@ -0,0 +1,45 @@
terraform {
required_version = ">=0.13"
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 4.0"
}
}
}
variable "TIMESTAMP" {
type = string
description = "Timestamp of the github worklow run."
}
variable "UNIQUE_ID" {
type = string
description = "Unique Id of the github worklow run."
}
provider "aws" {
region = "us-east-1"
default_tags {
tags = {
Purpose = "AutomatedTesting"
Timestamp = "${var.TIMESTAMP}"
}
}
}
resource "aws_s3_bucket" "dapr_bucket" {
bucket = "dapr-cert-test-${var.UNIQUE_ID}"
force_destroy = true
tags = {
dapr-topic-name = "dapr-cert-test-${var.UNIQUE_ID}"
}
}
resource "aws_s3_bucket_ownership_controls" "dapr_bucket" {
bucket = aws_s3_bucket.dapr_bucket.id
rule {
object_ownership = "BucketOwnerPreferred"
}
}

View File

@ -0,0 +1,45 @@
terraform {
required_version = ">=0.13"
required_providers {
aws = {
source = "hashicorp/aws"
version = "~> 4.0"
}
}
}
variable "TIMESTAMP" {
type = string
description = "Timestamp of the github worklow run."
}
variable "UNIQUE_ID" {
type = string
description = "Unique Id of the github worklow run."
}
provider "aws" {
region = "us-east-1"
default_tags {
tags = {
Purpose = "AutomatedTesting"
Timestamp = "${var.TIMESTAMP}"
}
}
}
resource "aws_s3_bucket" "dapr_bucket" {
bucket = "dapr-conformance-test-${var.UNIQUE_ID}"
force_destroy = true
tags = {
dapr-topic-name = "dapr-conformance-test-${var.UNIQUE_ID}"
}
}
resource "aws_s3_bucket_ownership_controls" "dapr_bucket" {
bucket = aws_s3_bucket.dapr_bucket.id
rule {
object_ownership = "BucketOwnerPreferred"
}
}

View File

@ -0,0 +1,9 @@
#!/bin/sh
set +e
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/certification/bindings/aws/s3"
# Run Terraform
terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,16 @@
#!/bin/sh
set -e
# Set variables for GitHub Actions
echo "AWS_REGION=us-east-1" >> $GITHUB_ENV
echo "BINDINGS_AWS_S3_BUCKET=dapr-cert-test-$UNIQUE_ID" >> $GITHUB_ENV
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/certification/bindings/aws/s3"
# Run Terraform
terraform init
terraform validate -no-color
terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,9 @@
#!/bin/sh
set +e
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/conformance/bindings/aws/s3"
# Run Terraform
terraform destroy -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,16 @@
#!/bin/sh
set -e
# Set variables for GitHub Actions
echo "AWS_REGION=us-east-1" >> $GITHUB_ENV
echo "BINDINGS_AWS_S3_BUCKET=dapr-conformance-test-$UNIQUE_ID" >> $GITHUB_ENV
# Navigate to the Terraform directory
cd ".github/infrastructure/terraform/conformance/bindings/aws/s3"
# Run Terraform
terraform init
terraform validate -no-color
terraform plan -no-color -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"
terraform apply -auto-approve -var="UNIQUE_ID=$UNIQUE_ID" -var="TIMESTAMP=$CURRENT_TIME"

View File

@ -0,0 +1,3 @@
#!/bin/bash
awslocal s3 mb s3://conformance-test-docker

View File

@ -0,0 +1,7 @@
#!/bin/bash
awslocal dynamodb create-table \
--table-name conformance-test-docker \
--attribute-definitions AttributeName=key,AttributeType=S \
--key-schema AttributeName=key,KeyType=HASH \
--provisioned-throughput ReadCapacityUnits=1,WriteCapacityUnits=1

View File

@ -94,6 +94,25 @@ const components = {
'AzureBlobStorageQueue',
],
},
'bindings.aws.s3': {
certification: true,
requireAWSCredentials: true,
requireTerraform: true,
certificationSetup: 'certification-bindings.aws.s3-setup.sh',
certificationDestroy: 'certification-bindings.aws.s3-destroy.sh',
},
// 'bindings.aws.s3.docker': {
// conformance: true,
// requireDocker: true,
// conformanceSetup: 'docker-compose.sh s3',
// },
'bindings.aws.s3.terraform': {
conformance: true,
requireAWSCredentials: true,
requireTerraform: true,
conformanceSetup: 'conformance-bindings.aws.s3.terraform-setup.sh',
conformanceDestroy: 'conformance-bindings.aws.s3.terraform-destroy.sh',
},
'bindings.cron': {
conformance: true,
certification: true,
@ -218,11 +237,12 @@ const components = {
certificationSetup: 'certification-pubsub.aws.snssqs-setup.sh',
certificationDestroy: 'certification-pubsub.aws.snssqs-destroy.sh',
},
'pubsub.aws.snssqs.docker': {
conformance: true,
conformanceSetup: 'docker-compose.sh snssqs',
sourcePkg: 'pubsub/aws/snssqs',
},
// 'pubsub.aws.snssqs.docker': {
// conformance: true,
// requireDocker: true,
// conformanceSetup: 'docker-compose.sh snssqs',
// sourcePkg: 'pubsub/aws/snssqs',
// },
'pubsub.aws.snssqs.terraform': {
conformance: true,
requireAWSCredentials: true,
@ -422,6 +442,11 @@ const components = {
certificationSetup: 'certification-state.aws.dynamodb-setup.sh',
certificationDestroy: 'certification-state.aws.dynamodb-destroy.sh',
},
// 'state.aws.dynamodb.docker': {
// conformance: true,
// requireDocker: true,
// conformanceSetup: 'docker-compose.sh dynamodb',
// },
'state.aws.dynamodb.terraform': {
conformance: true,
requireAWSCredentials: true,
@ -678,6 +703,11 @@ function GenerateMatrix(testKind, enableCloudTests) {
) {
continue
}
} else {
// For conformance tests, avoid running Docker and Cloud Tests together.
if (comp.conformance && comp.requireDocker) {
continue
}
}
if (comp.sourcePkg) {
@ -737,7 +767,9 @@ if (argv.length < 4 || !['true', 'false'].includes(argv[3])) {
exit(1)
}
const matrixObj = GenerateMatrix(argv[2], argv[3] == 'true')
const testKind = argv[2]
const enableCloudTests = argv[3] == 'true'
const matrixObj = GenerateMatrix(testKind, enableCloudTests)
console.log('Generated matrix:\n\n' + JSON.stringify(matrixObj, null, ' '))
writeFileSync(env.GITHUB_OUTPUT, 'test-matrix=' + JSON.stringify(matrixObj))

View File

@ -210,6 +210,9 @@ func (s *AWSS3) create(ctx context.Context, req *bindings.InvokeRequest) (*bindi
return &bindings.InvokeResponse{
Data: jsonResponse,
Metadata: map[string]string{
metadataKey: key,
},
}, nil
}
@ -322,10 +325,11 @@ func (s *AWSS3) delete(ctx context.Context, req *bindings.InvokeRequest) (*bindi
}
func (s *AWSS3) list(ctx context.Context, req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
var payload listPayload
err := json.Unmarshal(req.Data, &payload)
if err != nil {
return nil, err
payload := listPayload{}
if req.Data != nil {
if err := json.Unmarshal(req.Data, &payload); err != nil {
return nil, fmt.Errorf("s3 binding (List Operation) - unable to parse Data property - %v", err)
}
}
if payload.MaxResults < 1 {

View File

@ -0,0 +1,25 @@
# AWS S3 Binding certification testing
This project aims to test the AWS S3 binding component under various conditions.
## Test plan
### Functional tests
- Create request:
- key/Filename specified and missing
- With ForcePathStyle True/False
- Verifies automatic base64 decode option
- Get request:
- Successful Get Request
- Delete request:
- Successful deletion
- List request:
- Successful List Request
### Running the tests
This must be run in the GitHub Actions Workflow configured for test infrastructure setup.

View File

@ -0,0 +1,9 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: envvar-secret-store
namespace: default
spec:
type: secretstores.local.env
version: v1
metadata:

View File

@ -0,0 +1,37 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: s3-cert-tests
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
secretKeyRef:
name: BINDINGS_AWS_S3_BUCKET
key: BINDINGS_AWS_S3_BUCKET
- name: accessKey
secretKeyRef:
name: AWS_ACCESS_KEY_ID
key: AWS_ACCESS_KEY_ID
- name: secretKey
secretKeyRef:
name: AWS_SECRET_ACCESS_KEY
key: AWS_SECRET_ACCESS_KEY
- name: region
secretKeyRef:
name: AWS_REGION
key: AWS_REGION
- name: decodeBase64
value: "false"
- name: encodeBase64
value: "false"
- name: forcePathStyle
value: "false"
- name: disableSSL
value: "false"
- name: insecureSSL
value: "false"
auth:
secretStore: envvar-secret-store

View File

@ -0,0 +1,9 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: envvar-secret-store
namespace: default
spec:
type: secretstores.local.env
version: v1
metadata:

View File

@ -0,0 +1,37 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: s3-cert-tests
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
secretKeyRef:
name: BINDINGS_AWS_S3_BUCKET
key: BINDINGS_AWS_S3_BUCKET
- name: accessKey
secretKeyRef:
name: AWS_ACCESS_KEY_ID
key: AWS_ACCESS_KEY_ID
- name: secretKey
secretKeyRef:
name: AWS_SECRET_ACCESS_KEY
key: AWS_SECRET_ACCESS_KEY
- name: region
secretKeyRef:
name: AWS_REGION
key: AWS_REGION
- name: decodeBase64
value: true
- name: encodeBase64
value: "false"
- name: forcePathStyle
value: "false"
- name: disableSSL
value: "false"
- name: insecureSSL
value: "false"
auth:
secretStore: envvar-secret-store

View File

@ -0,0 +1,9 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: envvar-secret-store
namespace: default
spec:
type: secretstores.local.env
version: v1
metadata:

View File

@ -0,0 +1,37 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: s3-cert-tests
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
secretKeyRef:
name: BINDINGS_AWS_S3_BUCKET
key: BINDINGS_AWS_S3_BUCKET
- name: accessKey
secretKeyRef:
name: AWS_ACCESS_KEY_ID
key: AWS_ACCESS_KEY_ID
- name: secretKey
secretKeyRef:
name: AWS_SECRET_ACCESS_KEY
key: AWS_SECRET_ACCESS_KEY
- name: region
secretKeyRef:
name: AWS_REGION
key: AWS_REGION
- name: decodeBase64
value: "false"
- name: encodeBase64
value: true
- name: forcePathStyle
value: "false"
- name: disableSSL
value: "false"
- name: insecureSSL
value: "false"
auth:
secretStore: envvar-secret-store

View File

@ -0,0 +1,9 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: envvar-secret-store
namespace: default
spec:
type: secretstores.local.env
version: v1
metadata:

View File

@ -0,0 +1,33 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: s3-cert-tests
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
secretKeyRef:
name: BINDINGS_AWS_S3_BUCKET
key: BINDINGS_AWS_S3_BUCKET
- name: accessKey
secretKeyRef:
name: AWS_ACCESS_KEY_ID
key: AWS_ACCESS_KEY_ID
- name: secretKey
secretKeyRef:
name: AWS_SECRET_ACCESS_KEY
key: AWS_SECRET_ACCESS_KEY
- name: region
secretKeyRef:
name: AWS_REGION
key: AWS_REGION
- name: decodeBase64
value: "false"
- name: encodeBase64
value: "false"
- name: forcePathStyle
value: "false"
auth:
secretStore: envvar-secret-store

View File

@ -0,0 +1,9 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: envvar-secret-store
namespace: default
spec:
type: secretstores.local.env
version: v1
metadata:

View File

@ -0,0 +1,33 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: s3-cert-tests
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
secretKeyRef:
name: BINDINGS_AWS_S3_BUCKET
key: BINDINGS_AWS_S3_BUCKET
- name: accessKey
secretKeyRef:
name: AWS_ACCESS_KEY_ID
key: AWS_ACCESS_KEY_ID
- name: secretKey
secretKeyRef:
name: AWS_SECRET_ACCESS_KEY
key: AWS_SECRET_ACCESS_KEY
- name: region
secretKeyRef:
name: AWS_REGION
key: AWS_REGION
- name: decodeBase64
value: "false"
- name: encodeBase64
value: "false"
- name: forcePathStyle
value: "true"
auth:
secretStore: envvar-secret-store

View File

@ -0,0 +1,6 @@
apiVersion: dapr.io/v1alpha1
kind: Configuration
metadata:
name: awss3bindingconfig
spec:
features:

View File

@ -0,0 +1,426 @@
/*
Copyright 2023 The Dapr Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package awss3binding_test
import (
"encoding/base64"
"encoding/json"
"fmt"
"github.com/stretchr/testify/assert"
"os"
"testing"
bindings_s3 "github.com/dapr/components-contrib/bindings/aws/s3"
secretstore_env "github.com/dapr/components-contrib/secretstores/local/env"
bindings_loader "github.com/dapr/dapr/pkg/components/bindings"
secretstores_loader "github.com/dapr/dapr/pkg/components/secretstores"
"github.com/dapr/dapr/pkg/runtime"
dapr_testing "github.com/dapr/dapr/pkg/testing"
daprsdk "github.com/dapr/go-sdk/client"
"github.com/dapr/kit/logger"
"github.com/dapr/components-contrib/tests/certification/embedded"
"github.com/dapr/components-contrib/tests/certification/flow"
"github.com/dapr/components-contrib/tests/certification/flow/sidecar"
"github.com/aws/aws-sdk-go/service/s3"
)
const (
sidecarName = "bindings-s3-sidecar"
bindingsMetadataName = "s3-cert-tests"
)
var bucketName = "bucketName"
func init() {
if envVal := os.Getenv("BINDINGS_AWS_S3_BUCKET"); envVal != "" {
bucketName = envVal
}
}
func TestAWSS3CertificationTests(t *testing.T) {
defer teardown(t)
t.Run("S3SBasic", func(t *testing.T) {
S3SBasic(t)
})
t.Run("S3SForcePathStyle", func(t *testing.T) {
S3SForcePathStyle(t)
})
t.Run("S3SBase64", func(t *testing.T) {
S3SBase64(t)
})
}
// createObjectRequest is used to make a common binding request for create operation.
func createObjectRequest(ctx flow.Context, client daprsdk.Client, dataBytes []byte, invokeCreateMetadata map[string]string) (*daprsdk.BindingEvent, error) {
invokeCreateRequest := &daprsdk.InvokeBindingRequest{
Name: bindingsMetadataName,
Operation: "create",
Data: dataBytes,
Metadata: invokeCreateMetadata,
}
return client.InvokeBinding(ctx, invokeCreateRequest)
}
// listObjectRequest is used to make a common binding request for the list operation.
func listObjectRequest(ctx flow.Context, client daprsdk.Client) (out *daprsdk.BindingEvent, err error) {
invokeRequest := &daprsdk.InvokeBindingRequest{
Name: bindingsMetadataName,
Operation: "list",
Data: nil,
Metadata: nil,
}
out, invokeErr := client.InvokeBinding(ctx, invokeRequest)
if invokeErr != nil {
return nil, fmt.Errorf("%w", invokeErr)
}
return out, nil
}
// getObjectRequest is used to make a common binding request for the get operation.
func getObjectRequest(ctx flow.Context, client daprsdk.Client, name string, isBase64 bool) (out *daprsdk.BindingEvent, err error) {
invokeGetMetadata := map[string]string{
"key": name,
"encodeBase64": fmt.Sprintf("%t", isBase64),
}
return getObjectRequestWithMetadata(ctx, client, invokeGetMetadata)
}
// getObjectRequest is used to make a common binding request for the get operation passing metadata.
func getObjectRequestWithMetadata(ctx flow.Context, client daprsdk.Client, invokeGetMetadata map[string]string) (out *daprsdk.BindingEvent, err error) {
invokeGetRequest := &daprsdk.InvokeBindingRequest{
Name: bindingsMetadataName,
Operation: "get",
Data: nil,
Metadata: invokeGetMetadata,
}
out, invokeGetErr := client.InvokeBinding(ctx, invokeGetRequest)
if invokeGetErr != nil {
return nil, fmt.Errorf("%w", invokeGetErr)
}
return out, nil
}
// deleteObjectRequest is used to make a common binding request for the delete operation.
func deleteObjectRequest(ctx flow.Context, client daprsdk.Client, name string) (out *daprsdk.BindingEvent, err error) {
invokeDeleteMetadata := map[string]string{
"key": name,
}
invokeGetRequest := &daprsdk.InvokeBindingRequest{
Name: bindingsMetadataName,
Operation: "delete",
Data: nil,
Metadata: invokeDeleteMetadata,
}
out, invokeDeleteErr := client.InvokeBinding(ctx, invokeGetRequest)
if invokeDeleteErr != nil {
return nil, fmt.Errorf("%w", invokeDeleteErr)
}
return out, nil
}
// Verify S3 Basic Binding Support (Create, Get, List, Delete)
func S3SBasic(t *testing.T) {
ports, err := dapr_testing.GetFreePorts(2)
assert.NoError(t, err)
currentGRPCPort := ports[0]
currentHTTPPort := ports[1]
objectName := "filename.txt"
testCreateGetListDelete := func(ctx flow.Context) error {
client, clientErr := daprsdk.NewClientWithPort(fmt.Sprint(currentGRPCPort))
if clientErr != nil {
panic(clientErr)
}
defer client.Close()
input := "some example content"
dataBytes := []byte(input)
invokeCreateMetadata := map[string]string{
"key": objectName,
}
_, invokeCreateErr := createObjectRequest(ctx, client, dataBytes, invokeCreateMetadata)
assert.NoError(t, invokeCreateErr)
invokeGetMetadata := map[string]string{
"key": objectName,
}
invokeGetRequest := &daprsdk.InvokeBindingRequest{
Name: bindingsMetadataName,
Operation: "get",
Data: nil,
Metadata: invokeGetMetadata,
}
out, invokeGetErr := client.InvokeBinding(ctx, invokeGetRequest)
assert.NoError(t, invokeGetErr)
assert.Equal(t, input, string(out.Data))
out, invokeErr := listObjectRequest(ctx, client)
assert.NoError(t, invokeErr)
var output s3.ListObjectsOutput
unmarshalErr := json.Unmarshal(out.Data, &output)
assert.NoError(t, unmarshalErr)
found := false
for _, item := range output.Contents {
if *item.Key == objectName {
found = true
break
}
}
assert.True(t, found)
out, invokeDeleteErr := deleteObjectRequest(ctx, client, objectName)
assert.NoError(t, invokeDeleteErr)
assert.Empty(t, out.Data)
// confirm the deletion.
_, invokeSecondGetErr := getObjectRequest(ctx, client, objectName, false)
assert.Error(t, invokeSecondGetErr)
assert.Contains(t, invokeSecondGetErr.Error(), "error downloading S3 object")
return nil
}
flow.New(t, "AWS S3 binding basic").
Step(sidecar.Run(sidecarName,
embedded.WithoutApp(),
embedded.WithComponentsPath("./components/basic"),
embedded.WithDaprGRPCPort(currentGRPCPort),
embedded.WithDaprHTTPPort(currentHTTPPort),
componentRuntimeOptions(),
)).
Step("Create/Get/List/Delete S3 Object", testCreateGetListDelete).
Run()
}
// Verify forcePathStyle
func S3SForcePathStyle(t *testing.T) {
ports, err := dapr_testing.GetFreePorts(2)
assert.NoError(t, err)
currentGRPCPort := ports[0]
currentHTTPPort := ports[1]
objectName := "filename.txt"
locationForcePathStyleFalse := fmt.Sprintf("https://%s.s3.amazonaws.com/%s", bucketName, objectName)
locationForcePathStyleTrue := fmt.Sprintf("https://s3.amazonaws.com/%s/%s", bucketName, objectName)
testForcePathStyle := func(forcePathStyle string) func(ctx flow.Context) error {
return func(ctx flow.Context) error {
client, clientErr := daprsdk.NewClientWithPort(fmt.Sprint(currentGRPCPort))
if clientErr != nil {
panic(clientErr)
}
defer client.Close()
input := "some example content"
dataBytes := []byte(input)
invokeCreateMetadata := map[string]string{
"key": objectName,
}
cout, invokeCreateErr := createObjectRequest(ctx, client, dataBytes, invokeCreateMetadata)
assert.NoError(t, invokeCreateErr)
var createResponse struct {
Location string `json:"location"`
VersionID *string `json:"versionID"`
PresignURL string `json:"presignURL,omitempty"`
}
unmarshalErr := json.Unmarshal(cout.Data, &createResponse)
assert.NoError(t, unmarshalErr)
assert.Equal(t, createResponse.Location, forcePathStyle)
out, invokeDeleteErr := deleteObjectRequest(ctx, client, objectName)
assert.NoError(t, invokeDeleteErr)
assert.Empty(t, out.Data)
// confirm the deletion.
_, invokeSecondGetErr := getObjectRequest(ctx, client, objectName, false)
assert.Error(t, invokeSecondGetErr)
assert.Contains(t, invokeSecondGetErr.Error(), "error downloading S3 object")
return nil
}
}
flow.New(t, "AWS S3 binding with forcePathStyle True").
Step(sidecar.Run(sidecarName,
embedded.WithoutApp(),
embedded.WithComponentsPath("./components/forcePathStyleTrue"),
embedded.WithDaprGRPCPort(currentGRPCPort),
embedded.WithDaprHTTPPort(currentHTTPPort),
componentRuntimeOptions(),
)).
Step("Create/Delete S3 Object forcePathStyle True", testForcePathStyle(locationForcePathStyleTrue)).
Run()
flow.New(t, "AWS S3 binding with forcePathStyleFalse").
Step(sidecar.Run(sidecarName,
embedded.WithoutApp(),
embedded.WithComponentsPath("./components/forcePathStyleFalse"),
embedded.WithDaprGRPCPort(currentGRPCPort),
embedded.WithDaprHTTPPort(currentHTTPPort),
componentRuntimeOptions(),
)).
Step("Create/Delete S3 Object forcePathStyle False", testForcePathStyle(locationForcePathStyleFalse)).
Run()
}
// Verify Base64 (Encode/Decode)
func S3SBase64(t *testing.T) {
ports, err := dapr_testing.GetFreePorts(2)
assert.NoError(t, err)
currentGRPCPort := ports[0]
currentHTTPPort := ports[1]
testCreateBase64FromFile := func() func(ctx flow.Context) error {
return func(ctx flow.Context) error {
client, clientErr := daprsdk.NewClientWithPort(fmt.Sprint(currentGRPCPort))
if clientErr != nil {
panic(clientErr)
}
defer client.Close()
dataBytes := []byte(base64.StdEncoding.EncodeToString([]byte("somecontent")))
invokeCreateMetadata := map[string]string{
"decodeBase64": "true",
}
out, invokeCreateErr := createObjectRequest(ctx, client, dataBytes, invokeCreateMetadata)
assert.NoError(t, invokeCreateErr)
genKey := out.Metadata["key"]
isBase64 := true
out, invokeGetErr := getObjectRequest(ctx, client, genKey, isBase64)
assert.NoError(t, invokeGetErr)
assert.Equal(t, out.Data, dataBytes)
assert.Empty(t, out.Metadata)
out, invokeDeleteErr := deleteObjectRequest(ctx, client, genKey)
assert.NoError(t, invokeDeleteErr)
assert.Empty(t, out.Data)
// confirm the deletion.
_, invokeSecondGetErr := getObjectRequest(ctx, client, genKey, false)
assert.Error(t, invokeSecondGetErr)
return nil
}
}
testCreateFromFileGetEncodeBase64 := func() func(ctx flow.Context) error {
return func(ctx flow.Context) error {
client, clientErr := daprsdk.NewClientWithPort(fmt.Sprint(currentGRPCPort))
if clientErr != nil {
panic(clientErr)
}
defer client.Close()
dataBytes := []byte("somecontent not base64 encoded")
b64EncodedDataBytes := []byte(base64.StdEncoding.EncodeToString(dataBytes))
invokeCreateMetadata := map[string]string{}
out, invokeCreateErr := createObjectRequest(ctx, client, dataBytes, invokeCreateMetadata)
assert.NoError(t, invokeCreateErr)
genKey := out.Metadata["key"]
invokeGetMetadata := map[string]string{
"key": genKey,
}
out, invokeGetErr := getObjectRequestWithMetadata(ctx, client, invokeGetMetadata)
assert.NoError(t, invokeGetErr)
assert.Equal(t, out.Data, b64EncodedDataBytes)
assert.Empty(t, out.Metadata)
out, invokeDeleteErr := deleteObjectRequest(ctx, client, genKey)
assert.NoError(t, invokeDeleteErr)
assert.Empty(t, out.Data)
// confirm the deletion.
_, invokeSecondGetErr := getObjectRequest(ctx, client, genKey, false)
assert.Error(t, invokeSecondGetErr)
return nil
}
}
flow.New(t, "decode base64 option for binary").
Step(sidecar.Run(sidecarName,
embedded.WithoutApp(),
embedded.WithComponentsPath("./components/decodeBase64"),
embedded.WithDaprGRPCPort(currentGRPCPort),
embedded.WithDaprHTTPPort(currentHTTPPort),
componentRuntimeOptions(),
)).
Step("Create blob from file", testCreateBase64FromFile()).
Run()
flow.New(t, "upload regular file get as encode base64").
Step(sidecar.Run(sidecarName,
embedded.WithoutApp(),
embedded.WithComponentsPath("./components/encodeBase64"),
embedded.WithDaprGRPCPort(currentGRPCPort),
embedded.WithDaprHTTPPort(currentHTTPPort),
componentRuntimeOptions(),
)).
Step("Create blob from file get encode base64", testCreateFromFileGetEncodeBase64()).
Run()
}
func componentRuntimeOptions() []runtime.Option {
log := logger.NewLogger("dapr.components")
bindingsRegistry := bindings_loader.NewRegistry()
bindingsRegistry.Logger = log
bindingsRegistry.RegisterOutputBinding(bindings_s3.NewAWSS3, "aws.s3")
secretstoreRegistry := secretstores_loader.NewRegistry()
secretstoreRegistry.Logger = log
secretstoreRegistry.RegisterComponent(secretstore_env.NewEnvSecretStore, "local.env")
return []runtime.Option{
runtime.WithBindings(bindingsRegistry),
runtime.WithSecretStores(secretstoreRegistry),
}
}
func teardown(t *testing.T) {
t.Logf("AWS S3 Binding CertificationTests teardown...")
//Dapr runtime automatically creates the following queues, topics
//so here they get deleted.
t.Logf("AWS S3 Binding CertificationTests teardown...done!")
}

View File

@ -0,0 +1,29 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: aws-s3
namespace: default
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
value: conformance-test-docker
- name: endpoint
value: "http://localhost:4566"
- name: accessKey
value: "my-access"
- name: secretKey
value: "my-secret"
- name: region
value: "us-east-1"
- name: decodeBase64
value: false
- name: encodeBase64
value: false
- name: forcePathStyle
value: true
- name: disableSSL
value: true
- name: insecureSSL
value: true

View File

@ -0,0 +1,27 @@
apiVersion: dapr.io/v1alpha1
kind: Component
metadata:
name: aws-s3
namespace: default
spec:
type: bindings.aws.s3
version: v1
metadata:
- name: bucket
value: ${{BINDINGS_AWS_S3_BUCKET}}
- name: accessKey
value: ${{AWS_ACCESS_KEY_ID}}
- name: secretKey
value: ${{AWS_SECRET_ACCESS_KEY}}
- name: region
value: ${{AWS_REGION}}
- name: decodeBase64
value: false
- name: encodeBase64
value: false
- name: forcePathStyle
value: true
- name: disableSSL
value: true
- name: insecureSSL
value: true

View File

@ -76,3 +76,15 @@ components:
- component: postgres
allOperations: false
operations: [ "exec", "query", "close", "operations" ]
- component: aws.s3.docker
operations: ["create", "operations", "get", "list"]
config:
output:
key: $((uuid))
outputData: '{"data": {"maxResults": 10}}'
- component: aws.s3.terraform
operations: ["create", "operations", "get", "list"]
config:
output:
key: $((uuid))
outputData: '{"data": {"maxResults": 10}}'

View File

@ -43,6 +43,7 @@ import (
"github.com/dapr/components-contrib/workflows"
"github.com/dapr/kit/logger"
b_aws_s3 "github.com/dapr/components-contrib/bindings/aws/s3"
b_azure_blobstorage "github.com/dapr/components-contrib/bindings/azure/blobstorage"
b_azure_cosmosdb "github.com/dapr/components-contrib/bindings/azure/cosmosdb"
b_azure_eventgrid "github.com/dapr/components-contrib/bindings/azure/eventgrid"
@ -656,6 +657,10 @@ func loadOutputBindings(tc TestComponent) bindings.OutputBinding {
binding = b_kubemq.NewKubeMQ(testLogger)
case "postgres":
binding = b_postgres.NewPostgres(testLogger)
case "aws.s3.docker":
binding = b_aws_s3.NewAWSS3(testLogger)
case "aws.s3.terraform":
binding = b_aws_s3.NewAWSS3(testLogger)
default:
return nil
}