ci: standard linter config (#1102)

* ci: standard linter config

Signed-off-by: Long <long.dai@intel.com>

* Update utils.go

* fix pulsar issue

Signed-off-by: Long <long.dai@intel.com>

Co-authored-by: Artur Souza <artursouza.ms@outlook.com>
This commit is contained in:
Long Dai 2021-09-22 06:21:59 +08:00 committed by GitHub
parent c425f1319a
commit 988fed05ab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
246 changed files with 1273 additions and 1080 deletions

View File

@ -97,7 +97,7 @@ linters-settings:
goimports:
# put imports beginning with prefix after 3rd-party packages;
# it's a comma-separated list of prefixes
local-prefixes:
local-prefixes: github.com/dapr/
gocyclo:
# minimal code complexity to report, 30 by default (but we recommend 10-20)
min-complexity: 10
@ -114,13 +114,15 @@ linters-settings:
# minimal length of string constant, 3 by default
min-len: 3
# minimal occurrences count to trigger, 3 by default
min-occurrences: 3
min-occurrences: 5
depguard:
list-type: blacklist
include-go-root: false
packages:
- github.com/Sirupsen/logrus
packages-with-error-messages:
# specify an error message to output when a blacklisted package is used
github.com/sirupsen/logrus: "logging is allowed only by logutils.Log"
github.com/Sirupsen/logrus: "must use github.com/dapr/kit/logger"
misspell:
# Correct spellings using locale preferences for US or UK.
# Default is to use a neutral variety of English.
@ -172,6 +174,7 @@ linters-settings:
- hugeParam
- ifElseChain
- singleCaseSwitch
- exitAfterDefer
# Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` to see all tags and checks.
# Empty list by default. See https://github.com/go-critic/go-critic#usage -> section "Tags".
@ -188,6 +191,12 @@ linters-settings:
- NOTE
- OPTIMIZE # marks code that should be optimized before merging
- HACK # marks hack-arounds that should be removed before merging
godot:
exclude:
- 'nosec'
- '\}'
capital: false
scope: all
dogsled:
# checks assignments with too many blank identifiers; default is 2
max-blank-identifiers: 2
@ -218,14 +227,22 @@ linters:
- dupl
- errcheck
- funlen
- gochecknoglobals
- gochecknoinits
- gocyclo
- gocognit
- godox
- interfacer
- lll
- maligned
- scopelint
- unparam
- wsl
- gomnd
- godot
- testpackage
- goerr113
- nestif
- nlreturn
- exhaustive
- noctx
- gci

View File

@ -72,7 +72,7 @@ func (s EnvironmentSettings) GetAzureEnvironment() (*azure.Environment, error) {
// GetAuthorizer creates an Authorizer retrieved from, in order:
// 1. Client credentials
// 2. Client certificate
// 3. MSI
// 3. MSI.
func (s EnvironmentSettings) GetAuthorizer() (autorest.Authorizer, error) {
spt, err := s.GetServicePrincipalToken()
if err != nil {
@ -85,7 +85,7 @@ func (s EnvironmentSettings) GetAuthorizer() (autorest.Authorizer, error) {
// GetServicePrincipalToken returns a Service Principal Token retrieved from, in order:
// 1. Client credentials
// 2. Client certificate
// 3. MSI
// 3. MSI.
func (s EnvironmentSettings) GetServicePrincipalToken() (*adal.ServicePrincipalToken, error) {
// 1. Client credentials
if c, e := s.GetClientCredentials(); e == nil {
@ -154,7 +154,7 @@ func (s EnvironmentSettings) GetMSI() MSIConfig {
return config
}
// CredentialsConfig provides the options to get a bearer authorizer from client credentials
// CredentialsConfig provides the options to get a bearer authorizer from client credentials.
type CredentialsConfig struct {
*auth.ClientCredentialsConfig
}

View File

@ -18,7 +18,7 @@ const (
fakeTenantID = "14bec2db-7f9a-4f3d-97ca-2d384ac83389"
fakeClientID = "04bec2db-7f9a-4f3d-97ca-3d384ac83389"
// Base64 encoded test pfx cert - Expire date: 09/19/2119
// Base64 encoded test pfx cert - Expire date: 09/19/2119.
testCert = "MIIKTAIBAzCCCgwGCSqGSIb3DQEHAaCCCf0Eggn5MIIJ9TCCBhYGCSqGSIb3DQEHAaCCBgcEggYDMIIF/zCCBfsGCyqGSIb3DQEMCgECoIIE/jCCBPowHAYKKoZIhvcNAQwBAzAOBAifAbe5KAL7IwICB9AEggTYZ3dAdDNqi5GoGJ/VfZhh8dxIIERUaC/SO5vKFhDfNu9VCQKF7Azr3eJ4cjzQmicfLd6FxJpB6d+8fbQuCcYPpTAdqf5zmLtZWMDWW8YZE0pV7b6sDZSw/NbT2zFhsx2uife6NnLK//Pj+GeALUDPfhVfqfLCfWZlCHxlbOipVZv9U4+TCVO2vyrGUq2XesT78cT+LhbHYkcrxTCsXNLWAvSJ9zXOIVA5HNS3Qv8pQJSSbqYVBbLk6FEbt5B3pk0xoA1hhM7dlCoGvPJ/ajvN3wAcEB5kmjJ4q59s2HeXloa7aAhXTFEkL2rZH+acgr1AO/DwcGXUqzJ2ooGYBfoqmgaXjydzyVLzYNccBGbzBR4Q0crMW6zDBXDlwvnLxmqZ7p05Ix9ZqISQyTm/DboNwQk1erOJd0fe6Brg1Dw4td6Uh/AXfM8m+XCGJFn79ZMCtd4rP8w9l008m8xe7rczSkMW0aRJVr0j3fFheene83jOHEB0q3KMKsVTkPWehnTGPj4TrsL+WwrmJpqrSloXMyaqvS9hvqAfPal0JI9taz6R5HFONaO6oi/ajpX3tYSX0rafQPKHmJpFLtJHYPopFYgP4akq8wKOCjq1IDg3ZW59G9nh8Vcw3IrAnr+C9iMgzPUvCHCinQK24cmbn5px6S0U0ARhY90KrSMFRyjvxNpZzc+A/AAaQ/wwuLVy1GyuZ2sRFyVSCTRMC6ZfXAUs+OijDO/B++BCdmqm5p5/aZpQYf1cb681AaDc/5XTHtCC3setYfpviMe1grvp4jaPVrjnG85pVenZJ0d+Xo7BnD38Ec5RsKpvtXIieiRIbnGqzTzxj/OU/cdglrKy8MLo6IJigXA6N3x14o4e3akq7cvLPRQZqlWyLqjlGnJdZKJlemFlOnDSluzwGBwwKF+PpXuRVSDhi/ARN3g8L+wVAQQMEylWJfK7sNDun41rimE8wGFjqlfZNVg/pCBKvw3p90pCkxVUEZBRrP1vaGzrIvOsMU/rrJqQU7Imv9y6nUrvHdcoRFUdbgWVWZus6VwTrgwRkfnPiLZo0r5Vh4kComH0+Tc4kgwbnnuQQWzn8J9Ur4Nu0MkknC/1jDwulq2XOIBPclmEPg9CSSwfKonyaRxz+3GoPy0kGdHwsOcXIq5qBIyiYAtM1g1cQLtOT16OCjapus+GIOLnItP2OAhO70dsTMUlsQSNEH+KxUxFb1pFuQGXnStmgZtHYI4LvC/d820tY0m0I6SgfabnoQpIXa6iInIt970awwyUP1P/6m9ie5bCRDWCj4R0bNiNQBjq9tHfO4xeGK+fUTyeU4OEBgiyisNVhijf6GlfPHKWwkInAN0WbS3UHHACjkP0jmRb70b/3VbWon/+K5S6bk2ohIDsbPPVolTvfMehRwKatqQTbTXlnDIHJQzk9SfHHWJzkrQXEIbXgGxHSHm5CmNetR/MYGlivjtGRVxOLr7Y1tK0GGEDMs9nhiSvlwWjAEuwIN+72T6Kx7hPRld1BvaTYLRYXfjnedo7D2AoR+8tGLWjU31rHJVua/JILjGC84ARCjk5LOFHOXUjOP1jJomh8ebjlVijNWP0gLUC14AE8UJsJ1Xi6xiNOTeMpeOIJl2kX81uvnNbQ0j4WajfXlox5eV+0iJ1yNfw5jGB6TATBgkqhkiG9w0BCRUxBgQEAQAAADBXBgkqhkiG9w0BCRQxSh5IADgAZABlADYANgA5AGEAYQAtADUAZgAyAGMALQA0ADIANgBmAC0AYQA3ADAANwAtADIANgBmADkAOAAwADAANAAwAGEAYQAwMHkGCSsGAQQBgjcRATFsHmoATQBpAGMAcgBvAHMAbwBmAHQAIABFAG4AaABhAG4AYwBlAGQAIABSAFMAQQAgAGEAbgBkACAAQQBFAFMAIABDAHIAeQBwAHQAbwBnAHIAYQBwAGgAaQBjACAAUAByAG8AdgBpAGQAZQByMIID1wYJKoZIhvcNAQcGoIIDyDCCA8QCAQAwggO9BgkqhkiG9w0BBwEwHAYKKoZIhvcNAQwBBjAOBAiT1ngppOJy/gICB9CAggOQt9iTz9CmP/3+EBQv3WM80jLHHyrkJM5nIckr+4fmcl3frhbZZajSf1eigjOaqWpz1cAu9KtSAb0Fa35AKr7r9du5SXwBxyYS6XzXsWekSrdvh3Dui0abXo/yh+lIfI/61sJLv5Gc7/DbJrwlHHOD1DR/ohmncAiSjGUYaO9/Y9xUV3cbzjZypqKkkbahaWVMC8+D9zUSkH64RUuLvSi5X5QKFsICNouBL1j/C2s3VZoyR9F0ajRCEMFnQsMfJ/1fP2iW/wwFIARBjphj1SaEaP3XkxQadslR0cwhf6Ujj/tXyd1zV5oI8rJ54r8eN5Vu8NxEX3kl+A7gCc9ACEC0klZ18mQUjb6eDpUSFM63/wx7ISDKaD7gyWCul1JwlUmYzvrRw8sAwjVEyXzc+n0oIOlk0lE6vk3mybkfcOxafRkdr0zVnd5L+XtV/V38sd3ExNojQgUDNy905PNTHdeVnvHt6E8XGNgGX7a/tB1r7Un3soL5Vjcuf/HMdyR57CF2lxFSrdZ1bNnw7Z1GJbQZHago2AovNw+BbBJfey0iuIRP+dgkIfle0nzl3E7T9jU0r2+GEQfN7YYjRL19XFX4n8kNpiTDDRxdNj/yKQDfC7f8prZY/yP8bJLaFBd+uoH+D4QKmWk7plwXTOLiNno9cOTrLYT48HCEghtBbnTgZglOg8eDZd35MR5KcCNWxVy/enEj3/BEtkH7qnJsxlFMu1WwAQzaVYK1u1sGCD8NGH2wtiJi0O5q+YsQItv7ia2x9lSL1JPagtRhxnIZbC5HaIx87bSrVY9XTrWlj9X0H+YSdbUrszRse+LLJkw6h8wXqBvrBKsxnPrfJyQWs3zqehk0FPF1pi+spoJzp7//nmZ5a7knRXYkxV++TiuX+RQSNR/cFxezEwR+2WUAJaJfPpSf06dp5M/gJNVJQGMNiLHCMc9w6CPLUFQA1FG5YdK8nFrSo0iclX7wAHWpCjkqHj7PgOT+Ia5qiOb2dN2GBWPh5N94PO15BLlS/9UUvGxvmWqmG3lpr3hP5B6OZdQl8lxBGc8KTq4GdoJrQ+Jmfej3LQa33mV5VZwJqdbH9iEHvUH2VYC8ru7r5drXBqP5IlZrkdIL5uzzaoHsnWtu0OKgjwRwXaAF24zM0GVXbueGXLXH3vwBwoO4GnDfJ0wN0qFEJBRexRdPP9JKjPfVmwbi89sx1zJMId3nCmetq5yGMDcwHzAHBgUrDgMCGgQUmQChLB4WJjopytxl4LNQ9NuCbPkEFO+tI0n+7a6hwK9hqzq7tghkXp08"
)

View File

@ -5,7 +5,7 @@
package azure
// MetadataKeys : Keys for all metadata properties
// MetadataKeys : Keys for all metadata properties.
var MetadataKeys = map[string][]string{ // nolint: gochecknoglobals
// clientId, clientSecret, tenantId are supported for backwards-compatibility as they're used by some components, but should be considered deprecated
@ -29,5 +29,5 @@ var MetadataKeys = map[string][]string{ // nolint: gochecknoglobals
"AzureEnvironment": {"azureEnvironment"},
}
// Default Azure environment
// Default Azure environment.
const DefaultAzureEnvironment = "AZUREPUBLICCLOUD"

View File

@ -27,7 +27,7 @@ func init() {
}
}
// GetKubeClient returns a kubernetes client
// GetKubeClient returns a kubernetes client.
func GetKubeClient() (*kubernetes.Clientset, error) {
flag.Parse()
conf, err := rest.InClusterConfig()

View File

@ -72,7 +72,7 @@ func NewDingTalkWebhook(l logger.Logger) *DingTalkWebhook {
}
}
// Init performs metadata parsing
// Init performs metadata parsing.
func (t *DingTalkWebhook) Init(metadata bindings.Metadata) error {
var err error
if err = t.settings.Decode(metadata.Properties); err != nil {
@ -85,7 +85,7 @@ func (t *DingTalkWebhook) Init(metadata bindings.Metadata) error {
return nil
}
// Read triggers the outgoing webhook, not yet production ready
// Read triggers the outgoing webhook, not yet production ready.
func (t *DingTalkWebhook) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
t.logger.Debugf("dingtalk webhook: start read input binding")
@ -100,7 +100,7 @@ func (t *DingTalkWebhook) Read(handler func(*bindings.ReadResponse) ([]byte, err
return nil
}
// Operations returns list of operations supported by dingtalk webhook binding
// Operations returns list of operations supported by dingtalk webhook binding.
func (t *DingTalkWebhook) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation, bindings.GetOperation}
}

View File

@ -13,12 +13,13 @@ import (
"strings"
"time"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/nacos-group/nacos-sdk-go/clients"
"github.com/nacos-group/nacos-sdk-go/clients/config_client"
"github.com/nacos-group/nacos-sdk-go/common/constant"
"github.com/nacos-group/nacos-sdk-go/vo"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
const (
@ -29,13 +30,13 @@ const (
metadataConfigOnchange = "config-onchange"
)
// Config type
// Config type.
type configParam struct {
dataID string
group string
}
// Nacos allows reading/writing to a Nacos server
// Nacos allows reading/writing to a Nacos server.
type Nacos struct {
settings Settings
config configParam
@ -46,12 +47,12 @@ type Nacos struct {
readHandler func(response *bindings.ReadResponse) ([]byte, error)
}
// NewNacos returns a new Nacos instance
// NewNacos returns a new Nacos instance.
func NewNacos(logger logger.Logger) *Nacos {
return &Nacos{logger: logger} //nolint:exhaustivestruct
}
// Init implements InputBinding/OutputBinding's Init method
// Init implements InputBinding/OutputBinding's Init method.
func (n *Nacos) Init(metadata bindings.Metadata) error {
n.settings = Settings{
Timeout: defaultTimeout,
@ -125,7 +126,7 @@ func (n *Nacos) createConfigClient() error {
return nil
}
// Read implements InputBinding's Read method
// Read implements InputBinding's Read method.
func (n *Nacos) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
n.readHandler = handler
@ -143,7 +144,7 @@ func (n *Nacos) Close() error {
return nil
}
// Invoke implements OutputBinding's Invoke method
// Invoke implements OutputBinding's Invoke method.
func (n *Nacos) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
switch req.Operation {
case bindings.CreateOperation:
@ -157,7 +158,7 @@ func (n *Nacos) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, e
}
}
// Operations implements OutputBinding's Operations method
// Operations implements OutputBinding's Operations method.
func (n *Nacos) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation, bindings.GetOperation}
}

View File

@ -13,9 +13,10 @@ import (
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/require"
)
func TestInputBindingRead(t *testing.T) { //nolint:paralleltest

View File

@ -10,12 +10,13 @@ import (
"encoding/json"
"github.com/aliyun/aliyun-oss-go-sdk/oss"
"github.com/google/uuid"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/google/uuid"
)
// AliCloudOSS is a binding for an AliCloud OSS storage bucket
// AliCloudOSS is a binding for an AliCloud OSS storage bucket.
type AliCloudOSS struct {
metadata *ossMetadata
client *oss.Client
@ -29,12 +30,12 @@ type ossMetadata struct {
Bucket string `json:"bucket"`
}
// NewAliCloudOSS returns a new instance
// NewAliCloudOSS returns a new instance.
func NewAliCloudOSS(logger logger.Logger) *AliCloudOSS {
return &AliCloudOSS{logger: logger}
}
// Init does metadata parsing and connection creation
// Init does metadata parsing and connection creation.
func (s *AliCloudOSS) Init(metadata bindings.Metadata) error {
m, err := s.parseMetadata(metadata)
if err != nil {

View File

@ -8,8 +8,9 @@ package oss
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -18,6 +18,7 @@ import (
mqc "github.com/apache/rocketmq-client-go/v2/consumer"
"github.com/apache/rocketmq-client-go/v2/primitive"
mqw "github.com/cinience/go_rocketmq"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/dapr/kit/retry"
@ -42,7 +43,7 @@ func NewAliCloudRocketMQ(l logger.Logger) *AliCloudRocketMQ {
}
}
// Init performs metadata parsing
// Init performs metadata parsing.
func (a *AliCloudRocketMQ) Init(metadata bindings.Metadata) error {
var err error
if err = a.settings.Decode(metadata.Properties); err != nil {
@ -68,7 +69,7 @@ func (a *AliCloudRocketMQ) Init(metadata bindings.Metadata) error {
return nil
}
// Read triggers the rocketmq subscription
// Read triggers the rocketmq subscription.
func (a *AliCloudRocketMQ) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
a.logger.Debugf("binding rocketmq: start read input binding")
@ -183,7 +184,7 @@ func (a *AliCloudRocketMQ) setupPublisher() (mqw.Producer, error) {
return nil, errors.New("binding-rocketmq error: cannot found rocketmq producer")
}
// Operations returns list of operations supported by rocketmq binding
// Operations returns list of operations supported by rocketmq binding.
func (a *AliCloudRocketMQ) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation}
}

View File

@ -10,10 +10,11 @@ import (
"strings"
rocketmq "github.com/cinience/go_rocketmq"
"github.com/dapr/kit/config"
)
// rocketmq
// rocketmq.
const (
metadataRocketmqTopic = "rocketmq-topic"
metadataRocketmqTag = "rocketmq-tag"

View File

@ -10,9 +10,10 @@ import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestTableStoreMetadata(t *testing.T) {

View File

@ -15,9 +15,10 @@ import (
"net/http"
"sync"
jsoniter "github.com/json-iterator/go"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
jsoniter "github.com/json-iterator/go"
)
const (

View File

@ -12,10 +12,11 @@ import (
"strings"
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
jsoniter "github.com/json-iterator/go"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
const (

View File

@ -9,8 +9,9 @@ import (
"sync"
"time"
"github.com/dapr/kit/logger"
"github.com/golang-jwt/jwt"
"github.com/dapr/kit/logger"
)
// The "issued at" timestamp in the JWT must be within one hour from the

View File

@ -11,12 +11,13 @@ import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/dynamodb"
"github.com/aws/aws-sdk-go/service/dynamodb/dynamodbattribute"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// DynamoDB allows performing stateful operations on AWS DynamoDB
// DynamoDB allows performing stateful operations on AWS DynamoDB.
type DynamoDB struct {
client *dynamodb.DynamoDB
table string
@ -32,12 +33,12 @@ type dynamoDBMetadata struct {
Table string `json:"table"`
}
// NewDynamoDB returns a new DynamoDB instance
// NewDynamoDB returns a new DynamoDB instance.
func NewDynamoDB(logger logger.Logger) *DynamoDB {
return &DynamoDB{logger: logger}
}
// Init performs connection parsing for DynamoDB
// Init performs connection parsing for DynamoDB.
func (d *DynamoDB) Init(metadata bindings.Metadata) error {
meta, err := d.getDynamoDBMetadata(metadata)
if err != nil {

View File

@ -8,8 +8,9 @@ package dynamodb
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -19,16 +19,17 @@ import (
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/service/kinesis"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/google/uuid"
"github.com/vmware/vmware-go-kcl/clientlibrary/config"
"github.com/vmware/vmware-go-kcl/clientlibrary/interfaces"
"github.com/vmware/vmware-go-kcl/clientlibrary/worker"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// AWSKinesis allows receiving and sending data to/from AWS Kinesis stream
// AWSKinesis allows receiving and sending data to/from AWS Kinesis stream.
type AWSKinesis struct {
client *kinesis.Kinesis
metadata *kinesisMetadata
@ -55,16 +56,16 @@ type kinesisMetadata struct {
type kinesisConsumerMode string
const (
// ExtendedFanout - dedicated throughput through data stream api
// ExtendedFanout - dedicated throughput through data stream api.
ExtendedFanout kinesisConsumerMode = "extended"
// SharedThroughput - shared throughput using checkpoint and monitoring
// SharedThroughput - shared throughput using checkpoint and monitoring.
SharedThroughput kinesisConsumerMode = "shared"
partitionKeyName = "partitionKey"
)
// recordProcessorFactory
// recordProcessorFactory.
type recordProcessorFactory struct {
logger logger.Logger
handler func(*bindings.ReadResponse) ([]byte, error)
@ -75,12 +76,12 @@ type recordProcessor struct {
handler func(*bindings.ReadResponse) ([]byte, error)
}
// NewAWSKinesis returns a new AWS Kinesis instance
// NewAWSKinesis returns a new AWS Kinesis instance.
func NewAWSKinesis(logger logger.Logger) *AWSKinesis {
return &AWSKinesis{logger: logger}
}
// Init does metadata parsing and connection creation
// Init does metadata parsing and connection creation.
func (a *AWSKinesis) Init(metadata bindings.Metadata) error {
m, err := a.parseMetadata(metadata)
if err != nil {
@ -169,7 +170,7 @@ func (a *AWSKinesis) Read(handler func(*bindings.ReadResponse) ([]byte, error))
return nil
}
// Subscribe to all shards
// Subscribe to all shards.
func (a *AWSKinesis) Subscribe(ctx context.Context, streamDesc kinesis.StreamDescription, handler func(*bindings.ReadResponse) ([]byte, error)) error {
consumerARN, err := a.ensureConsumer(streamDesc.StreamARN)
if err != nil {

View File

@ -8,8 +8,9 @@ package kinesis
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -16,10 +16,11 @@ import (
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/google/uuid"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/google/uuid"
)
const (
@ -31,7 +32,7 @@ const (
maxResults = 1000
)
// AWSS3 is a binding for an AWS S3 storage bucket
// AWSS3 is a binding for an AWS S3 storage bucket.
type AWSS3 struct {
metadata *s3Metadata
s3Client *s3.S3
@ -63,12 +64,12 @@ type listPayload struct {
Delimiter string `json:"delimiter"`
}
// NewAWSS3 returns a new AWSS3 instance
// NewAWSS3 returns a new AWSS3 instance.
func NewAWSS3(logger logger.Logger) *AWSS3 {
return &AWSS3{logger: logger}
}
// Init does metadata parsing and connection creation
// Init does metadata parsing and connection creation.
func (s *AWSS3) Init(metadata bindings.Metadata) error {
m, err := s.parseMetadata(metadata)
if err != nil {
@ -277,7 +278,7 @@ func (s *AWSS3) getSession(metadata *s3Metadata) (*session.Session, error) {
return sess, nil
}
// Helper to merge config and request metadata
// Helper to merge config and request metadata.
func (metadata s3Metadata) mergeWithRequestMetadata(req *bindings.InvokeRequest) (s3Metadata, error) {
merged := metadata

View File

@ -8,9 +8,10 @@ package s3
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -13,9 +13,11 @@ import (
"strings"
"github.com/aws/aws-sdk-go/aws"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/aws/aws-sdk-go/service/ses"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
@ -25,7 +27,7 @@ const (
CharSet = "UTF-8"
)
// AWSSES is an AWS SNS binding
// AWSSES is an AWS SNS binding.
type AWSSES struct {
metadata *sesMetadata
logger logger.Logger
@ -44,12 +46,12 @@ type sesMetadata struct {
EmailBcc string `json:"emailBcc"`
}
// NewAWSSES creates a new AWSSES binding instance
// NewAWSSES creates a new AWSSES binding instance.
func NewAWSSES(logger logger.Logger) *AWSSES {
return &AWSSES{logger: logger}
}
// Init does metadata parsing
// Init does metadata parsing.
func (a *AWSSES) Init(metadata bindings.Metadata) error {
// Parse input metadata
meta, err := a.parseMetadata(metadata)
@ -153,7 +155,7 @@ func (a *AWSSES) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse,
return nil, nil
}
// Helper to merge config and request metadata
// Helper to merge config and request metadata.
func (metadata sesMetadata) mergeWithRequestMetadata(req *bindings.InvokeRequest) sesMetadata {
merged := metadata

View File

@ -8,9 +8,10 @@ package ses
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -10,12 +10,13 @@ import (
"fmt"
"github.com/aws/aws-sdk-go/service/sns"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// AWSSNS is an AWS SNS binding
// AWSSNS is an AWS SNS binding.
type AWSSNS struct {
client *sns.SNS
topicARN string
@ -37,12 +38,12 @@ type dataPayload struct {
Subject interface{} `json:"subject"`
}
// NewAWSSNS creates a new AWSSNS binding instance
// NewAWSSNS creates a new AWSSNS binding instance.
func NewAWSSNS(logger logger.Logger) *AWSSNS {
return &AWSSNS{logger: logger}
}
// Init does metadata parsing
// Init does metadata parsing.
func (a *AWSSNS) Init(metadata bindings.Metadata) error {
m, err := a.parseMetadata(metadata)
if err != nil {

View File

@ -8,8 +8,9 @@ package sns
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -11,12 +11,13 @@ import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/sqs"
aws_auth "github.com/dapr/components-contrib/authentication/aws"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// AWSSQS allows receiving and sending data to/from AWS SQS
// AWSSQS allows receiving and sending data to/from AWS SQS.
type AWSSQS struct {
Client *sqs.SQS
QueueURL *string
@ -33,12 +34,12 @@ type sqsMetadata struct {
SessionToken string `json:"sessionToken"`
}
// NewAWSSQS returns a new AWS SQS instance
// NewAWSSQS returns a new AWS SQS instance.
func NewAWSSQS(logger logger.Logger) *AWSSQS {
return &AWSSQS{logger: logger}
}
// Init does metadata parsing and connection creation
// Init does metadata parsing and connection creation.
func (a *AWSSQS) Init(metadata bindings.Metadata) error {
m, err := a.parseSQSMetadata(metadata)
if err != nil {

View File

@ -8,8 +8,9 @@ package sqs
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -16,22 +16,23 @@ import (
"strconv"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/google/uuid"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/google/uuid"
)
const (
// Used to reference the blob relative to the container
// Used to reference the blob relative to the container.
metadataKeyBlobName = "blobName"
// A string value that identifies the portion of the list to be returned with the next list operation.
// The operation returns a marker value within the response body if the list returned was not complete. The marker
// value may then be used in a subsequent call to request the next set of list items.
// See: https://docs.microsoft.com/en-us/rest/api/storageservices/list-blobs#uri-parameters
metadataKeyMarker = "marker"
// The number of blobs that will be returned in a list operation
// The number of blobs that will be returned in a list operation.
metadataKeyNumber = "number"
// Defines if the user defined metadata should be returned in the get operation
// Defines if the user defined metadata should be returned in the get operation.
metadataKeyIncludeMetadata = "includeMetadata"
// Defines the delete snapshots option for the delete operation.
// See: https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob#request-headers
@ -45,7 +46,7 @@ const (
metadataKeyContentDisposition = "contentDisposition"
meatdataKeyCacheControl = "cacheControl"
// Specifies the maximum number of HTTP GET requests that will be made while reading from a RetryReader. A value
// of zero means that no additional HTTP GET requests will be made
// of zero means that no additional HTTP GET requests will be made.
defaultGetBlobRetryCount = 10
// Specifies the maximum number of blobs to return, including all BlobPrefix elements. If the request does not
// specify maxresults the server will return up to 5,000 items.
@ -65,7 +66,7 @@ const (
var ErrMissingBlobName = errors.New("blobName is a required attribute")
// AzureBlobStorage allows saving blobs to an Azure Blob Storage account
// AzureBlobStorage allows saving blobs to an Azure Blob Storage account.
type AzureBlobStorage struct {
metadata *blobStorageMetadata
containerURL azblob.ContainerURL
@ -101,12 +102,12 @@ type listPayload struct {
Include listInclude `json:"include"`
}
// NewAzureBlobStorage returns a new Azure Blob Storage instance
// NewAzureBlobStorage returns a new Azure Blob Storage instance.
func NewAzureBlobStorage(logger logger.Logger) *AzureBlobStorage {
return &AzureBlobStorage{logger: logger}
}
// Init performs metadata parsing
// Init performs metadata parsing.
func (a *AzureBlobStorage) Init(metadata bindings.Metadata) error {
m, err := a.parseMetadata(metadata)
if err != nil {

View File

@ -9,9 +9,10 @@ import (
"testing"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -11,12 +11,14 @@ import (
"strings"
"github.com/a8m/documentdb"
"github.com/dapr/components-contrib/authentication/azure"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// CosmosDB allows performing state operations on collections
// CosmosDB allows performing state operations on collections.
type CosmosDB struct {
client *documentdb.DocumentDB
collection *documentdb.Collection
@ -34,12 +36,12 @@ type cosmosDBCredentials struct {
PartitionKey string `json:"partitionKey"`
}
// NewCosmosDB returns a new CosmosDB instance
// NewCosmosDB returns a new CosmosDB instance.
func NewCosmosDB(logger logger.Logger) *CosmosDB {
return &CosmosDB{logger: logger}
}
// Init performs CosmosDB connection parsing and connecting
// Init performs CosmosDB connection parsing and connecting.
func (c *CosmosDB) Init(metadata bindings.Metadata) error {
m, err := c.parseMetadata(metadata)
if err != nil {

View File

@ -9,9 +9,10 @@ import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -11,18 +11,19 @@ import (
"fmt"
"time"
gremcos "github.com/supplyon/gremcos"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
gremcos "github.com/supplyon/gremcos"
)
const (
queryOperation bindings.OperationKind = "query"
// keys from request's Data
// keys from request's Data.
commandGremlinKey = "gremlin"
// keys from response's Data
// keys from response's Data.
respGremlinKey = "gremlin"
respOpKey = "operation"
respStartTimeKey = "start-time"
@ -30,7 +31,7 @@ const (
respDurationKey = "duration"
)
// CosmosGraphDB allows performing state operations on collections
// CosmosGraphDB allows performing state operations on collections.
type CosmosGraphDB struct {
metadata *cosmosGraphDBCredentials
client *gremcos.Cosmos
@ -43,12 +44,12 @@ type cosmosGraphDBCredentials struct {
Username string `json:"username"`
}
// NewCosmosGraphDB returns a new CosmosGraphDB instance
// NewCosmosGraphDB returns a new CosmosGraphDB instance.
func NewCosmosGraphDB(logger logger.Logger) *CosmosGraphDB {
return &CosmosGraphDB{logger: logger}
}
// Init performs CosmosDB connection parsing and connecting
// Init performs CosmosDB connection parsing and connecting.
func (c *CosmosGraphDB) Init(metadata bindings.Metadata) error {
c.logger.Debug("Initializing Cosmos Graph DB binding")

View File

@ -8,9 +8,10 @@ package cosmosgraphdb
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -15,12 +15,13 @@ import (
"github.com/Azure/azure-sdk-for-go/services/preview/eventgrid/mgmt/2020-04-01-preview/eventgrid"
"github.com/Azure/go-autorest/autorest/azure/auth"
"github.com/valyala/fasthttp"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/valyala/fasthttp"
)
// AzureEventGrid allows sending/receiving Azure Event Grid events
// AzureEventGrid allows sending/receiving Azure Event Grid events.
type AzureEventGrid struct {
metadata *azureEventGridMetadata
logger logger.Logger
@ -47,12 +48,12 @@ type azureEventGridMetadata struct {
TopicEndpoint string `json:"topicEndpoint"`
}
// NewAzureEventGrid returns a new Azure Event Grid instance
// NewAzureEventGrid returns a new Azure Event Grid instance.
func NewAzureEventGrid(logger logger.Logger) *AzureEventGrid {
return &AzureEventGrid{logger: logger}
}
// Init performs metadata init
// Init performs metadata init.
func (a *AzureEventGrid) Init(metadata bindings.Metadata) error {
m, err := a.parseMetadata(metadata)
if err != nil {

View File

@ -8,8 +8,9 @@ package eventgrid
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -20,32 +20,33 @@ import (
"github.com/Azure/azure-event-hubs-go/v3/storage"
"github.com/Azure/azure-storage-blob-go/azblob"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
const (
// metadata
// metadata.
connectionString = "connectionString"
// required by subscriber
// required by subscriber.
consumerGroup = "consumerGroup"
storageAccountName = "storageAccountName"
storageAccountKey = "storageAccountKey"
storageContainerName = "storageContainerName"
// optional
// optional.
partitionKeyName = "partitionKey"
partitionIDName = "partitionID"
// errors
// errors.
missingConnectionStringErrorMsg = "error: connectionString is a required attribute"
missingStorageAccountNameErrorMsg = "error: storageAccountName is a required attribute"
missingStorageAccountKeyErrorMsg = "error: storageAccountKey is a required attribute"
missingStorageContainerNameErrorMsg = "error: storageContainerName is a required attribute"
missingConsumerGroupErrorMsg = "error: consumerGroup is a required attribute"
// Event Hubs SystemProperties names for metadata passthrough
// Event Hubs SystemProperties names for metadata passthrough.
sysPropSequenceNumber = "x-opt-sequence-number"
sysPropEnqueuedTime = "x-opt-enqueued-time"
sysPropOffset = "x-opt-offset"
@ -98,7 +99,7 @@ func readHandler(e *eventhub.Event, handler func(*bindings.ReadResponse) ([]byte
return err
}
// AzureEventHubs allows sending/receiving Azure Event Hubs events
// AzureEventHubs allows sending/receiving Azure Event Hubs events.
type AzureEventHubs struct {
hub *eventhub.Hub
metadata *azureEventHubsMetadata
@ -120,12 +121,12 @@ func (m azureEventHubsMetadata) partitioned() bool {
return m.partitionID != ""
}
// NewAzureEventHubs returns a new Azure Event hubs instance
// NewAzureEventHubs returns a new Azure Event hubs instance.
func NewAzureEventHubs(logger logger.Logger) *AzureEventHubs {
return &AzureEventHubs{logger: logger}
}
// Init performs metadata init
// Init performs metadata init.
func (a *AzureEventHubs) Init(metadata bindings.Metadata) error {
m, err := parseMetadata(metadata)
if err != nil {
@ -197,7 +198,7 @@ func (a *AzureEventHubs) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation}
}
// Write posts an event hubs message
// Write posts an event hubs message.
func (a *AzureEventHubs) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
event := &eventhub.Event{
Data: req.Data,
@ -221,7 +222,7 @@ func (a *AzureEventHubs) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeRe
return nil, nil
}
// Read gets messages from eventhubs in a non-blocking fashion
// Read gets messages from eventhubs in a non-blocking fashion.
func (a *AzureEventHubs) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
if !a.metadata.partitioned() {
if err := a.RegisterEventProcessor(handler); err != nil {
@ -243,7 +244,7 @@ func (a *AzureEventHubs) Read(handler func(*bindings.ReadResponse) ([]byte, erro
return nil
}
// RegisterPartitionedEventProcessor - receive eventhub messages by partitionID
// RegisterPartitionedEventProcessor - receive eventhub messages by partitionID.
func (a *AzureEventHubs) RegisterPartitionedEventProcessor(handler func(*bindings.ReadResponse) ([]byte, error)) error {
ctx := context.Background()
@ -292,7 +293,7 @@ func contains(arr []string, str string) bool {
}
// RegisterEventProcessor - receive eventhub messages by eventprocessor
// host by balancing partitions
// host by balancing partitions.
func (a *AzureEventHubs) RegisterEventProcessor(handler func(*bindings.ReadResponse) ([]byte, error)) error {
cred, err := azblob.NewSharedKeyCredential(a.metadata.storageAccountName, a.metadata.storageAccountKey)
if err != nil {

View File

@ -8,8 +8,9 @@ package eventhubs
import (
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func TestParseMetadata(t *testing.T) {

View File

@ -11,6 +11,7 @@ import (
"time"
servicebus "github.com/Azure/azure-service-bus-go"
"github.com/dapr/components-contrib/bindings"
contrib_metadata "github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
@ -25,7 +26,7 @@ const (
AzureServiceBusDefaultMessageTimeToLive = time.Hour * 24 * 14
)
// AzureServiceBusQueues is an input/output binding reading from and sending events to Azure Service Bus queues
// AzureServiceBusQueues is an input/output binding reading from and sending events to Azure Service Bus queues.
type AzureServiceBusQueues struct {
metadata *serviceBusQueuesMetadata
client *servicebus.Queue
@ -39,12 +40,12 @@ type serviceBusQueuesMetadata struct {
ttl time.Duration
}
// NewAzureServiceBusQueues returns a new AzureServiceBusQueues instance
// NewAzureServiceBusQueues returns a new AzureServiceBusQueues instance.
func NewAzureServiceBusQueues(logger logger.Logger) *AzureServiceBusQueues {
return &AzureServiceBusQueues{logger: logger}
}
// Init parses connection properties and creates a new Service Bus Queue client
// Init parses connection properties and creates a new Service Bus Queue client.
func (a *AzureServiceBusQueues) Init(metadata bindings.Metadata) error {
meta, err := a.parseMetadata(metadata)
if err != nil {

View File

@ -9,10 +9,11 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -14,10 +14,11 @@ import (
"strings"
"time"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/golang-jwt/jwt"
"github.com/pkg/errors"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
const (
@ -29,7 +30,7 @@ const (
userKey = "user"
)
// NewSignalR creates a new pub/sub based on Azure SignalR
// NewSignalR creates a new pub/sub based on Azure SignalR.
func NewSignalR(logger logger.Logger) *SignalR {
return &SignalR{
tokens: make(map[string]signalrCachedToken),
@ -43,7 +44,7 @@ type signalrCachedToken struct {
expiration time.Time
}
// SignalR is an output binding for Azure SignalR
// SignalR is an output binding for Azure SignalR.
type SignalR struct {
endpoint string
accessKey string
@ -55,7 +56,7 @@ type SignalR struct {
logger logger.Logger
}
// Init is responsible for initializing the SignalR output based on the metadata
// Init is responsible for initializing the SignalR output based on the metadata.
func (s *SignalR) Init(metadata bindings.Metadata) error {
connectionString, ok := metadata.Properties[connectionStringKey]
if !ok || connectionString == "" {

View File

@ -9,9 +9,10 @@ import (
"sync/atomic"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestConfigurationValid(t *testing.T) {

View File

@ -19,6 +19,7 @@ import (
"time"
"github.com/Azure/azure-storage-queue-go/azqueue"
"github.com/dapr/components-contrib/bindings"
contrib_metadata "github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
@ -32,14 +33,14 @@ type consumer struct {
callback func(*bindings.ReadResponse) ([]byte, error)
}
// QueueHelper enables injection for testnig
// QueueHelper enables injection for testnig.
type QueueHelper interface {
Init(accountName string, accountKey string, queueName string, decodeBase64 bool) error
Write(data []byte, ttl *time.Duration) error
Read(ctx context.Context, consumer *consumer) error
}
// AzureQueueHelper concrete impl of queue helper
// AzureQueueHelper concrete impl of queue helper.
type AzureQueueHelper struct {
credential *azqueue.SharedKeyCredential
queueURL azqueue.QueueURL
@ -48,7 +49,7 @@ type AzureQueueHelper struct {
decodeBase64 bool
}
// Init sets up this helper
// Init sets up this helper.
func (d *AzureQueueHelper) Init(accountName string, accountKey string, queueName string, decodeBase64 bool) error {
credential, err := azqueue.NewSharedKeyCredential(accountName, accountKey)
if err != nil {
@ -124,7 +125,7 @@ func (d *AzureQueueHelper) Read(ctx context.Context, consumer *consumer) error {
return nil
}
// NewAzureQueueHelper creates new helper
// NewAzureQueueHelper creates new helper.
func NewAzureQueueHelper(logger logger.Logger) QueueHelper {
return &AzureQueueHelper{
reqURI: "https://%s.queue.core.windows.net/%s",
@ -132,7 +133,7 @@ func NewAzureQueueHelper(logger logger.Logger) QueueHelper {
}
}
// AzureStorageQueues is an input/output binding reading from and sending events to Azure Storage queues
// AzureStorageQueues is an input/output binding reading from and sending events to Azure Storage queues.
type AzureStorageQueues struct {
metadata *storageQueuesMetadata
helper QueueHelper
@ -148,12 +149,12 @@ type storageQueuesMetadata struct {
ttl *time.Duration
}
// NewAzureStorageQueues returns a new AzureStorageQueues instance
// NewAzureStorageQueues returns a new AzureStorageQueues instance.
func NewAzureStorageQueues(logger logger.Logger) *AzureStorageQueues {
return &AzureStorageQueues{helper: NewAzureQueueHelper(logger), logger: logger}
}
// Init parses connection properties and creates a new Storage Queue client
// Init parses connection properties and creates a new Storage Queue client.
func (a *AzureStorageQueues) Init(metadata bindings.Metadata) error {
meta, err := a.parseMetadata(metadata)
if err != nil {

View File

@ -12,11 +12,12 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
type MockHelper struct {
@ -196,6 +197,7 @@ func TestReadQueueDecode(t *testing.T) {
}
// Uncomment this function to test reding from local queue
//nolint:godot
/* func TestReadLocalQueue(t *testing.T) {
a := AzureStorageQueues{helper: &AzureQueueHelper{reqURI: "http://127.0.0.1:10001/%s/%s"}}

View File

@ -9,13 +9,14 @@ import (
"fmt"
"time"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/pkg/errors"
"github.com/robfig/cron/v3"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// Binding represents Cron input binding
// Binding represents Cron input binding.
type Binding struct {
logger logger.Logger
schedule string
@ -25,7 +26,7 @@ type Binding struct {
var _ = bindings.InputBinding(&Binding{})
// NewCron returns a new Cron event input binding
// NewCron returns a new Cron event input binding.
func NewCron(logger logger.Logger) *Binding {
return &Binding{
logger: logger,
@ -54,7 +55,7 @@ func (b *Binding) Init(metadata bindings.Metadata) error {
return nil
}
// Read triggers the Cron scheduler
// Read triggers the Cron scheduler.
func (b *Binding) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
c := cron.New(cron.WithParser(b.parser))
id, err := c.AddFunc(b.schedule, func() {
@ -78,7 +79,7 @@ func (b *Binding) Read(handler func(*bindings.ReadResponse) ([]byte, error)) err
return nil
}
// Invoke exposes way to stop previously started cron
// Invoke exposes way to stop previously started cron.
func (b *Binding) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
b.logger.Debugf("operation: %v", req.Operation)
if req.Operation != bindings.DeleteOperation {
@ -95,7 +96,7 @@ func (b *Binding) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse,
}, nil
}
// Operations method returns the supported operations by this binding
// Operations method returns the supported operations by this binding.
func (b *Binding) Operations() []bindings.OperationKind {
return []bindings.OperationKind{
bindings.DeleteOperation,

View File

@ -9,9 +9,10 @@ import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func getTestMetadata(schedule string) bindings.Metadata {
@ -32,7 +33,7 @@ func getNewCron() *Binding {
return NewCron(l)
}
// go test -v -timeout 15s -count=1 ./bindings/cron/
// go test -v -timeout 15s -count=1 ./bindings/cron/.
func TestCronInitSuccess(t *testing.T) {
c := getNewCron()
err := c.Init(getTestMetadata("@every 1h"))
@ -52,7 +53,7 @@ func TestCronInitFailure(t *testing.T) {
}
// TestLongRead
// go test -v -count=1 -timeout 15s -run TestLongRead ./bindings/cron/
// go test -v -count=1 -timeout 15s -run TestLongRead ./bindings/cron/.
func TestCronReadWithDeleteInvoke(t *testing.T) {
c := getNewCron()
schedule := "@every 1s"

View File

@ -10,13 +10,14 @@ import (
"encoding/json"
"cloud.google.com/go/storage"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/google/uuid"
"google.golang.org/api/option"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// GCPStorage allows saving data to GCP bucket storage
// GCPStorage allows saving data to GCP bucket storage.
type GCPStorage struct {
metadata gcpMetadata
client *storage.Client
@ -37,12 +38,12 @@ type gcpMetadata struct {
ClientCertURL string `json:"client_x509_cert_url"`
}
// NewGCPStorage returns a new GCP storage instance
// NewGCPStorage returns a new GCP storage instance.
func NewGCPStorage(logger logger.Logger) *GCPStorage {
return &GCPStorage{logger: logger}
}
// Init performs connection parsing
// Init performs connection parsing.
func (g *GCPStorage) Init(metadata bindings.Metadata) error {
b, err := g.parseMetadata(metadata)
if err != nil {

View File

@ -9,9 +9,10 @@ import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestInit(t *testing.T) {

View File

@ -11,9 +11,10 @@ import (
"fmt"
"cloud.google.com/go/pubsub"
"google.golang.org/api/option"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"google.golang.org/api/option"
)
const (
@ -22,7 +23,7 @@ const (
topic = "topic"
)
// GCPPubSub is an input/output binding for GCP Pub Sub
// GCPPubSub is an input/output binding for GCP Pub Sub.
type GCPPubSub struct {
client *pubsub.Client
metadata *pubSubMetadata
@ -44,12 +45,12 @@ type pubSubMetadata struct {
ClientCertURL string `json:"client_x509_cert_url"`
}
// NewGCPPubSub returns a new GCPPubSub instance
// NewGCPPubSub returns a new GCPPubSub instance.
func NewGCPPubSub(logger logger.Logger) *GCPPubSub {
return &GCPPubSub{logger: logger}
}
// Init parses metadata and creates a new Pub Sub client
// Init parses metadata and creates a new Pub Sub client.
func (g *GCPPubSub) Init(metadata bindings.Metadata) error {
b, err := g.parseMetadata(metadata)
if err != nil {

View File

@ -9,9 +9,10 @@ import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestInit(t *testing.T) {

View File

@ -13,20 +13,21 @@ import (
"strings"
"time"
graphql "github.com/machinebox/graphql"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
graphql "github.com/machinebox/graphql"
)
const (
// configurations to connect to GraphQL
// configurations to connect to GraphQL.
connectionEndPointKey = "endpoint"
// keys from request's metadata
// keys from request's metadata.
commandQuery = "query"
commandMutation = "mutation"
// keys from response's metadata
// keys from response's metadata.
respOpKey = "operation"
respStartTimeKey = "start-time"
respEndTimeKey = "end-time"
@ -36,7 +37,7 @@ const (
MutationOperation bindings.OperationKind = "mutation"
)
// GraphQL represents GraphQL output bindings
// GraphQL represents GraphQL output bindings.
type GraphQL struct {
client *graphql.Client
header map[string]string
@ -45,12 +46,12 @@ type GraphQL struct {
var _ = bindings.OutputBinding(&GraphQL{})
// NewGraphQL returns a new GraphQL binding instance
// NewGraphQL returns a new GraphQL binding instance.
func NewGraphQL(logger logger.Logger) *GraphQL {
return &GraphQL{logger: logger}
}
// Init initializes the GraphQL binding
// Init initializes the GraphQL binding.
func (gql *GraphQL) Init(metadata bindings.Metadata) error {
gql.logger.Debug("GraphQL Error: Initializing GraphQL binding")
@ -74,7 +75,7 @@ func (gql *GraphQL) Init(metadata bindings.Metadata) error {
return nil
}
// Operations returns list of operations supported by GraphQL binding
// Operations returns list of operations supported by GraphQL binding.
func (gql *GraphQL) Operations() []bindings.OperationKind {
return []bindings.OperationKind{
QueryOperation,
@ -82,7 +83,7 @@ func (gql *GraphQL) Operations() []bindings.OperationKind {
}
}
// Invoke handles all invoke operations
// Invoke handles all invoke operations.
func (gql *GraphQL) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
if req == nil {
return nil, fmt.Errorf("GraphQL Error: Invoke request required")

View File

@ -36,12 +36,12 @@ type httpMetadata struct {
URL string `mapstructure:"url"`
}
// NewHTTP returns a new HTTPSource
// NewHTTP returns a new HTTPSource.
func NewHTTP(logger logger.Logger) *HTTPSource {
return &HTTPSource{logger: logger}
}
// Init performs metadata parsing
// Init performs metadata parsing.
func (h *HTTPSource) Init(metadata bindings.Metadata) error {
if err := mapstructure.Decode(metadata.Properties, &h.metadata); err != nil {
return err

View File

@ -12,10 +12,11 @@ import (
"strings"
"testing"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/dapr/kit/logger"
"github.com/dapr/components-contrib/bindings"
binding_http "github.com/dapr/components-contrib/bindings/http"
)

View File

@ -11,13 +11,14 @@ import (
"errors"
"fmt"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
influxdb2 "github.com/influxdata/influxdb-client-go"
"github.com/influxdata/influxdb-client-go/api"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// Influx allows writing to InfluxDB
// Influx allows writing to InfluxDB.
type Influx struct {
metadata *influxMetadata
client influxdb2.Client
@ -32,12 +33,12 @@ type influxMetadata struct {
Bucket string `json:"bucket"`
}
// NewInflux returns a new kafka binding instance
// NewInflux returns a new kafka binding instance.
func NewInflux(logger logger.Logger) *Influx {
return &Influx{logger: logger}
}
// Init does metadata parsing and connection establishment
// Init does metadata parsing and connection establishment.
func (i *Influx) Init(metadata bindings.Metadata) error {
influxMeta, err := i.getInfluxMetadata(metadata)
if err != nil {
@ -68,7 +69,7 @@ func (i *Influx) Init(metadata bindings.Metadata) error {
return nil
}
// GetInfluxMetadata returns new Influx metadata
// GetInfluxMetadata returns new Influx metadata.
func (i *Influx) getInfluxMetadata(metadata bindings.Metadata) (*influxMetadata, error) {
b, err := json.Marshal(metadata.Properties)
if err != nil {
@ -84,12 +85,12 @@ func (i *Influx) getInfluxMetadata(metadata bindings.Metadata) (*influxMetadata,
return &iMetadata, nil
}
// Operations returns supported operations
// Operations returns supported operations.
func (i *Influx) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation}
}
// Invoke called on supported operations
// Invoke called on supported operations.
func (i *Influx) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
var jsonPoint map[string]interface{}
err := json.Unmarshal(req.Data, &jsonPoint)

View File

@ -8,9 +8,10 @@ package influx
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -5,7 +5,7 @@
package bindings
// InputBinding is the interface to define a binding that triggers on incoming events
// InputBinding is the interface to define a binding that triggers on incoming events.
type InputBinding interface {
// Init passes connection and properties metadata to the binding implementation
Init(metadata Metadata) error

View File

@ -18,6 +18,7 @@ import (
"syscall"
"github.com/Shopify/sarama"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
@ -26,7 +27,7 @@ const (
key = "partitionKey"
)
// Kafka allows reading/writing to a Kafka consumer group
// Kafka allows reading/writing to a Kafka consumer group.
type Kafka struct {
producer sarama.SyncProducer
topics []string
@ -78,12 +79,12 @@ func (consumer *consumer) Setup(sarama.ConsumerGroupSession) error {
return nil
}
// NewKafka returns a new kafka binding instance
// NewKafka returns a new kafka binding instance.
func NewKafka(logger logger.Logger) *Kafka {
return &Kafka{logger: logger}
}
// Init does metadata parsing and connection establishment
// Init does metadata parsing and connection establishment.
func (k *Kafka) Init(metadata bindings.Metadata) error {
meta, err := k.getKafkaMetadata(metadata)
if err != nil {
@ -133,7 +134,7 @@ func (k *Kafka) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, e
return nil, nil
}
// GetKafkaMetadata returns new Kafka metadata
// GetKafkaMetadata returns new Kafka metadata.
func (k *Kafka) getKafkaMetadata(metadata bindings.Metadata) (*kafkaMetadata, error) {
meta := kafkaMetadata{}
meta.ConsumerGroup = metadata.Properties["consumerGroup"]

View File

@ -9,11 +9,13 @@ import (
"errors"
"testing"
"github.com/Shopify/sarama"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/Shopify/sarama"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
func TestParseMetadata(t *testing.T) {

View File

@ -14,13 +14,14 @@ import (
"syscall"
"time"
kubeclient "github.com/dapr/components-contrib/authentication/kubernetes"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/cache"
kubeclient "github.com/dapr/components-contrib/authentication/kubernetes"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
type kubernetesInput struct {
@ -38,7 +39,7 @@ type EventResponse struct {
var _ = bindings.InputBinding(&kubernetesInput{})
// NewKubernetes returns a new Kubernetes event input binding
// NewKubernetes returns a new Kubernetes event input binding.
func NewKubernetes(logger logger.Logger) bindings.InputBinding {
return &kubernetesInput{logger: logger}
}

View File

@ -9,9 +9,10 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -16,22 +16,23 @@ import (
"strconv"
securejoin "github.com/cyphar/filepath-securejoin"
"github.com/google/uuid"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/google/uuid"
)
const (
fileNameMetadataKey = "fileName"
)
// LocalStorage allows saving files to disk
// LocalStorage allows saving files to disk.
type LocalStorage struct {
metadata *Metadata
logger logger.Logger
}
// Metadata defines the metadata
// Metadata defines the metadata.
type Metadata struct {
RootPath string `json:"rootPath"`
}
@ -40,12 +41,12 @@ type createResponse struct {
FileName string `json:"fileName"`
}
// NewLocalStorage returns a new LocalStorage instance
// NewLocalStorage returns a new LocalStorage instance.
func NewLocalStorage(logger logger.Logger) *LocalStorage {
return &LocalStorage{logger: logger}
}
// Init performs metadata parsing
// Init performs metadata parsing.
func (ls *LocalStorage) Init(metadata bindings.Metadata) error {
m, err := ls.parseMetadata(metadata)
if err != nil {
@ -77,7 +78,7 @@ func (ls *LocalStorage) parseMetadata(metadata bindings.Metadata) (*Metadata, er
return &m, nil
}
// Operations enumerates supported binding operations
// Operations enumerates supported binding operations.
func (ls *LocalStorage) Operations() []bindings.OperationKind {
return []bindings.OperationKind{
bindings.CreateOperation,
@ -231,7 +232,7 @@ func walkPath(root string) ([]string, error) {
return files, err
}
// Invoke is called for output bindings
// Invoke is called for output bindings.
func (ls *LocalStorage) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
filename := ""
if val, ok := req.Metadata[fileNameMetadataKey]; ok && val != "" {

View File

@ -8,9 +8,10 @@ package localstorage
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -5,7 +5,7 @@
package bindings
// Metadata represents a set of binding specific properties
// Metadata represents a set of binding specific properties.
type Metadata struct {
Name string
Properties map[string]string `json:"properties"`

View File

@ -27,7 +27,7 @@ import (
)
const (
// Keys
// Keys.
mqttURL = "url"
mqttTopic = "topic"
mqttQOS = "qos"
@ -39,17 +39,17 @@ const (
mqttClientKey = "clientKey"
mqttBackOffMaxRetries = "backOffMaxRetries"
// errors
// errors.
errorMsgPrefix = "mqtt binding error:"
// Defaults
// Defaults.
defaultQOS = 0
defaultRetain = false
defaultWait = 3 * time.Second
defaultCleanSession = true
)
// MQTT allows sending and receiving data to/from an MQTT broker
// MQTT allows sending and receiving data to/from an MQTT broker.
type MQTT struct {
producer mqtt.Client
consumer mqtt.Client
@ -61,7 +61,7 @@ type MQTT struct {
backOff backoff.BackOff
}
// NewMQTT returns a new MQTT instance
// NewMQTT returns a new MQTT instance.
func NewMQTT(logger logger.Logger) *MQTT {
return &MQTT{logger: logger}
}
@ -153,7 +153,7 @@ func parseMQTTMetaData(md bindings.Metadata) (*metadata, error) {
return &m, nil
}
// Init does MQTT connection parsing
// Init does MQTT connection parsing.
func (m *MQTT) Init(metadata bindings.Metadata) error {
mqttMeta, err := parseMQTTMetaData(metadata)
if err != nil {

View File

@ -11,8 +11,9 @@ import (
"errors"
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
)
func getFakeProperties() map[string]string {

View File

@ -17,10 +17,11 @@ import (
"strconv"
"time"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/go-sql-driver/mysql"
"github.com/pkg/errors"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
const (
@ -29,7 +30,7 @@ const (
queryOperation bindings.OperationKind = "query"
closeOperation bindings.OperationKind = "close"
// configurations to connect to Mysql, either a data source name represent by URL
// configurations to connect to Mysql, either a data source name represent by URL.
connectionURLKey = "url"
// To connect to MySQL running in Azure over SSL you have to download a
@ -38,19 +39,19 @@ const (
// When the user provides a pem path their connection string must end with
// &tls=custom
// The connection string should be in the following format
// "%s:%s@tcp(%s:3306)/%s?allowNativePasswords=true&tls=custom",'myadmin@mydemoserver', 'yourpassword', 'mydemoserver.mysql.database.azure.com', 'targetdb'
// "%s:%s@tcp(%s:3306)/%s?allowNativePasswords=true&tls=custom",'myadmin@mydemoserver', 'yourpassword', 'mydemoserver.mysql.database.azure.com', 'targetdb'.
pemPathKey = "pemPath"
// other general settings for DB connections
// other general settings for DB connections.
maxIdleConnsKey = "maxIdleConns"
maxOpenConnsKey = "maxOpenConns"
connMaxLifetimeKey = "connMaxLifetime"
connMaxIdleTimeKey = "connMaxIdleTime"
// keys from request's metadata
// keys from request's metadata.
commandSQLKey = "sql"
// keys from response's metadata
// keys from response's metadata.
respOpKey = "operation"
respSQLKey = "sql"
respStartTimeKey = "start-time"
@ -59,7 +60,7 @@ const (
respDurationKey = "duration"
)
// Mysql represents MySQL output bindings
// Mysql represents MySQL output bindings.
type Mysql struct {
db *sql.DB
logger logger.Logger
@ -67,12 +68,12 @@ type Mysql struct {
var _ = bindings.OutputBinding(&Mysql{})
// NewMysql returns a new MySQL output binding
// NewMysql returns a new MySQL output binding.
func NewMysql(logger logger.Logger) *Mysql {
return &Mysql{logger: logger}
}
// Init initializes the MySQL binding
// Init initializes the MySQL binding.
func (m *Mysql) Init(metadata bindings.Metadata) error {
m.logger.Debug("Initializing MySql binding")
@ -117,7 +118,7 @@ func (m *Mysql) Init(metadata bindings.Metadata) error {
return nil
}
// Invoke handles all invoke operations
// Invoke handles all invoke operations.
func (m *Mysql) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
if req == nil {
return nil, errors.Errorf("invoke request required")
@ -174,7 +175,7 @@ func (m *Mysql) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, e
return resp, nil
}
// Operations returns list of operations supported by Mysql binding
// Operations returns list of operations supported by Mysql binding.
func (m *Mysql) Operations() []bindings.OperationKind {
return []bindings.OperationKind{
execOperation,
@ -183,7 +184,7 @@ func (m *Mysql) Operations() []bindings.OperationKind {
}
}
// Close will close the DB
// Close will close the DB.
func (m *Mysql) Close() error {
if m.db != nil {
return m.db.Close()

View File

@ -12,9 +12,10 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
const (

View File

@ -7,9 +7,10 @@ import (
"time"
"github.com/DATA-DOG/go-sqlmock"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestQuery(t *testing.T) {

View File

@ -5,7 +5,7 @@
package bindings
// OutputBinding is the interface for an output binding, allowing users to invoke remote systems with optional payloads
// OutputBinding is the interface for an output binding, allowing users to invoke remote systems with optional payloads.
type OutputBinding interface {
Init(metadata Metadata) error
Invoke(req *InvokeRequest) (*InvokeResponse, error)

View File

@ -11,10 +11,11 @@ import (
"strconv"
"time"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/jackc/pgx/v4/pgxpool"
"github.com/pkg/errors"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// List of operations.
@ -27,7 +28,7 @@ const (
commandSQLKey = "sql"
)
// Postgres represents PostgreSQL output binding
// Postgres represents PostgreSQL output binding.
type Postgres struct {
logger logger.Logger
db *pgxpool.Pool
@ -35,12 +36,12 @@ type Postgres struct {
var _ = bindings.OutputBinding(&Postgres{})
// NewPostgres returns a new PostgreSQL output binding
// NewPostgres returns a new PostgreSQL output binding.
func NewPostgres(logger logger.Logger) *Postgres {
return &Postgres{logger: logger}
}
// Init initializes the PostgreSql binding
// Init initializes the PostgreSql binding.
func (p *Postgres) Init(metadata bindings.Metadata) error {
url, ok := metadata.Properties[connectionURLKey]
if !ok || url == "" {
@ -60,7 +61,7 @@ func (p *Postgres) Init(metadata bindings.Metadata) error {
return nil
}
// Operations returns list of operations supported by PostgreSql binding
// Operations returns list of operations supported by PostgreSql binding.
func (p *Postgres) Operations() []bindings.OperationKind {
return []bindings.OperationKind{
execOperation,
@ -69,7 +70,7 @@ func (p *Postgres) Operations() []bindings.OperationKind {
}
}
// Invoke handles all invoke operations
// Invoke handles all invoke operations.
func (p *Postgres) Invoke(req *bindings.InvokeRequest) (resp *bindings.InvokeResponse, err error) {
if req == nil {
return nil, errors.Errorf("invoke request required")
@ -129,7 +130,7 @@ func (p *Postgres) Invoke(req *bindings.InvokeRequest) (resp *bindings.InvokeRes
return resp, nil
}
// Close close PostgreSql instance
// Close close PostgreSql instance.
func (p *Postgres) Close() error {
if p.db == nil {
return nil

View File

@ -11,9 +11,10 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
const (

View File

@ -10,18 +10,19 @@ import (
"fmt"
"strconv"
"github.com/keighl/postmark"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/keighl/postmark"
)
// Postmark allows sending of emails using the 3rd party Postmark service
// Postmark allows sending of emails using the 3rd party Postmark service.
type Postmark struct {
metadata postmarkMetadata
logger logger.Logger
}
// Our metadata holds standard email properties
// Our metadata holds standard email properties.
type postmarkMetadata struct {
ServerToken string `json:"serverToken"`
AccountToken string `json:"accountToken"`
@ -32,12 +33,12 @@ type postmarkMetadata struct {
EmailBcc string `json:"emailBcc"`
}
// NewPostmark returns a new Postmark bindings instance
// NewPostmark returns a new Postmark bindings instance.
func NewPostmark(logger logger.Logger) *Postmark {
return &Postmark{logger: logger}
}
// Helper to parse metadata
// Helper to parse metadata.
func (p *Postmark) parseMetadata(meta bindings.Metadata) (postmarkMetadata, error) {
pMeta := postmarkMetadata{}
@ -63,7 +64,7 @@ func (p *Postmark) parseMetadata(meta bindings.Metadata) (postmarkMetadata, erro
return pMeta, nil
}
// Init does metadata parsing and not much else :)
// Init does metadata parsing and not much else :).
func (p *Postmark) Init(metadata bindings.Metadata) error {
// Parse input metadata
meta, err := p.parseMetadata(metadata)
@ -77,12 +78,12 @@ func (p *Postmark) Init(metadata bindings.Metadata) error {
return nil
}
// Operations returns list of operations supported by Postmark binding
// Operations returns list of operations supported by Postmark binding.
func (p *Postmark) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation}
}
// Invoke does the work of sending message to Postmark API
// Invoke does the work of sending message to Postmark API.
func (p *Postmark) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
// We allow two possible sources of the properties we need,
// the component metadata or request metadata, request takes priority if present

View File

@ -8,9 +8,10 @@ package postmark
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -12,10 +12,11 @@ import (
"strconv"
"time"
"github.com/streadway/amqp"
"github.com/dapr/components-contrib/bindings"
contrib_metadata "github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
"github.com/streadway/amqp"
)
const (
@ -32,7 +33,7 @@ const (
defaultBitSize = 0
)
// RabbitMQ allows sending/receiving data to/from RabbitMQ
// RabbitMQ allows sending/receiving data to/from RabbitMQ.
type RabbitMQ struct {
connection *amqp.Connection
channel *amqp.Channel
@ -41,7 +42,7 @@ type RabbitMQ struct {
queue amqp.Queue
}
// Metadata is the rabbitmq config
// Metadata is the rabbitmq config.
type rabbitMQMetadata struct {
Host string `json:"host"`
QueueName string `json:"queueName"`
@ -53,12 +54,12 @@ type rabbitMQMetadata struct {
defaultQueueTTL *time.Duration
}
// NewRabbitMQ returns a new rabbitmq instance
// NewRabbitMQ returns a new rabbitmq instance.
func NewRabbitMQ(logger logger.Logger) *RabbitMQ {
return &RabbitMQ{logger: logger}
}
// Init does metadata parsing and connection creation
// Init does metadata parsing and connection creation.
func (r *RabbitMQ) Init(metadata bindings.Metadata) error {
err := r.parseMetadata(metadata)
if err != nil {

View File

@ -9,10 +9,11 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -17,7 +17,7 @@ import (
"github.com/dapr/kit/logger"
)
// Redis is a redis output binding
// Redis is a redis output binding.
type Redis struct {
client redis.UniversalClient
clientSettings *rediscomponent.Settings
@ -27,12 +27,12 @@ type Redis struct {
cancel context.CancelFunc
}
// NewRedis returns a new redis bindings instance
// NewRedis returns a new redis bindings instance.
func NewRedis(logger logger.Logger) *Redis {
return &Redis{logger: logger}
}
// Init performs metadata parsing and connection creation
// Init performs metadata parsing and connection creation.
func (r *Redis) Init(meta bindings.Metadata) (err error) {
r.client, r.clientSettings, err = rediscomponent.ParseClientFromProperties(meta.Properties, nil)
if err != nil {

View File

@ -10,14 +10,14 @@ import (
"strconv"
)
// InvokeRequest is the object given to a dapr output binding
// InvokeRequest is the object given to a dapr output binding.
type InvokeRequest struct {
Data []byte `json:"data"`
Metadata map[string]string `json:"metadata"`
Operation OperationKind `json:"operation"`
}
// OperationKind defines an output binding operation
// OperationKind defines an output binding operation.
type OperationKind string
// Non exhaustive list of operations. A binding can add operations that are not in this list.
@ -28,7 +28,7 @@ const (
ListOperation OperationKind = "list"
)
// GetMetadataAsBool parses metadata as bool
// GetMetadataAsBool parses metadata as bool.
func (r *InvokeRequest) GetMetadataAsBool(key string) (bool, error) {
if val, ok := r.Metadata[key]; ok {
boolVal, err := strconv.ParseBool(val)
@ -42,7 +42,7 @@ func (r *InvokeRequest) GetMetadataAsBool(key string) (bool, error) {
return false, nil
}
// GetMetadataAsInt64 parses metadata as int64
// GetMetadataAsInt64 parses metadata as int64.
func (r *InvokeRequest) GetMetadataAsInt64(key string, bitSize int) (int64, error) {
if val, ok := r.Metadata[key]; ok {
intVal, err := strconv.ParseInt(val, 10, bitSize)

View File

@ -9,13 +9,13 @@ import (
"github.com/dapr/components-contrib/state"
)
// ReadResponse is the return object from an dapr input binding
// ReadResponse is the return object from an dapr input binding.
type ReadResponse struct {
Data []byte `json:"data"`
Metadata map[string]string `json:"metadata"`
}
// AppResponse is the object describing the response from user code after a bindings event
// AppResponse is the object describing the response from user code after a bindings event.
type AppResponse struct {
Data interface{} `json:"data"`
To []string `json:"to"`
@ -24,7 +24,7 @@ type AppResponse struct {
Concurrency string `json:"concurrency"`
}
// InvokeResponse is the response object returned from an output binding
// InvokeResponse is the response object returned from an output binding.
type InvokeResponse struct {
Data []byte `json:"data"`
Metadata map[string]string `json:"metadata"`

View File

@ -12,9 +12,10 @@ import (
"time"
r "github.com/dancannon/gorethink"
"github.com/pkg/errors"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/pkg/errors"
)
// Binding represents RethinkDB change change state input binding which fires handler with
@ -26,7 +27,7 @@ type Binding struct {
stopCh chan bool
}
// StateConfig is the binding config
// StateConfig is the binding config.
type StateConfig struct {
r.ConnectOpts
Table string `json:"table"`
@ -34,7 +35,7 @@ type StateConfig struct {
var _ = bindings.InputBinding(&Binding{})
// NewRethinkDBStateChangeBinding returns a new RethinkDB actor event input binding
// NewRethinkDBStateChangeBinding returns a new RethinkDB actor event input binding.
func NewRethinkDBStateChangeBinding(logger logger.Logger) *Binding {
return &Binding{
logger: logger,
@ -42,7 +43,7 @@ func NewRethinkDBStateChangeBinding(logger logger.Logger) *Binding {
}
}
// Init initializes the RethinkDB binding
// Init initializes the RethinkDB binding.
func (b *Binding) Init(metadata bindings.Metadata) error {
cfg, err := metadataToConfig(metadata.Properties, b.logger)
if err != nil {
@ -59,7 +60,7 @@ func (b *Binding) Init(metadata bindings.Metadata) error {
return nil
}
// Read triggers the RethinkDB scheduler
// Read triggers the RethinkDB scheduler.
func (b *Binding) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
b.logger.Infof("subscribing to state changes in %s.%s...", b.config.Database, b.config.Table)
cursor, err := r.DB(b.config.Database).Table(b.config.Table).Changes(r.ChangesOpts{

View File

@ -10,9 +10,10 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func getTestMetadata() map[string]string {

View File

@ -12,9 +12,10 @@ import (
"strconv"
"strings"
"gopkg.in/gomail.v2"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"gopkg.in/gomail.v2"
)
const (
@ -24,13 +25,13 @@ const (
mailSeparator = ";"
)
// Mailer allows sending of emails using the Simple Mail Transfer Protocol
// Mailer allows sending of emails using the Simple Mail Transfer Protocol.
type Mailer struct {
metadata Metadata
logger logger.Logger
}
// Metadata holds standard email properties
// Metadata holds standard email properties.
type Metadata struct {
Host string `json:"host"`
Port int `json:"port"`
@ -45,12 +46,12 @@ type Metadata struct {
Priority int `json:"priority"`
}
// NewSMTP returns a new smtp binding instance
// NewSMTP returns a new smtp binding instance.
func NewSMTP(logger logger.Logger) *Mailer {
return &Mailer{logger: logger}
}
// Init smtp component (parse metadata)
// Init smtp component (parse metadata).
func (s *Mailer) Init(metadata bindings.Metadata) error {
// parse metadata
meta, err := s.parseMetadata(metadata)
@ -62,12 +63,12 @@ func (s *Mailer) Init(metadata bindings.Metadata) error {
return nil
}
// Operations returns the allowed binding operations
// Operations returns the allowed binding operations.
func (s *Mailer) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation}
}
// Invoke sends an email message
// Invoke sends an email message.
func (s *Mailer) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
// Merge config metadata with request metadata
metadata, err := s.metadata.mergeWithRequestMetadata(req)
@ -119,7 +120,7 @@ func (s *Mailer) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse,
return nil, nil
}
// Helper to parse metadata
// Helper to parse metadata.
func (s *Mailer) parseMetadata(meta bindings.Metadata) (Metadata, error) {
smtpMeta := Metadata{}
@ -170,7 +171,7 @@ func (s *Mailer) parseMetadata(meta bindings.Metadata) (Metadata, error) {
return smtpMeta, nil
}
// Helper to merge config and request metadata
// Helper to merge config and request metadata.
func (metadata Metadata) mergeWithRequestMetadata(req *bindings.InvokeRequest) (Metadata, error) {
merged := metadata

View File

@ -8,9 +8,10 @@ package smtp
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -12,19 +12,20 @@ import (
"strconv"
"strings"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/sendgrid/sendgrid-go"
"github.com/sendgrid/sendgrid-go/helpers/mail"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// SendGrid allows sending of emails using the 3rd party SendGrid service
// SendGrid allows sending of emails using the 3rd party SendGrid service.
type SendGrid struct {
metadata sendGridMetadata
logger logger.Logger
}
// Our metadata holds standard email properties
// Our metadata holds standard email properties.
type sendGridMetadata struct {
APIKey string `json:"apiKey"`
EmailFrom string `json:"emailFrom"`
@ -34,7 +35,7 @@ type sendGridMetadata struct {
EmailBcc string `json:"emailBcc"`
}
// Wrapper to help decode SendGrid API errors
// Wrapper to help decode SendGrid API errors.
type sendGridRestError struct {
Errors []struct {
Field interface{} `json:"field"`
@ -43,12 +44,12 @@ type sendGridRestError struct {
} `json:"errors"`
}
// NewSendGrid returns a new SendGrid bindings instance
// NewSendGrid returns a new SendGrid bindings instance.
func NewSendGrid(logger logger.Logger) *SendGrid {
return &SendGrid{logger: logger}
}
// Helper to parse metadata
// Helper to parse metadata.
func (sg *SendGrid) parseMetadata(meta bindings.Metadata) (sendGridMetadata, error) {
sgMeta := sendGridMetadata{}
@ -69,7 +70,7 @@ func (sg *SendGrid) parseMetadata(meta bindings.Metadata) (sendGridMetadata, err
return sgMeta, nil
}
// Init does metadata parsing and not much else :)
// Init does metadata parsing and not much else :).
func (sg *SendGrid) Init(metadata bindings.Metadata) error {
// Parse input metadata
meta, err := sg.parseMetadata(metadata)
@ -87,7 +88,7 @@ func (sg *SendGrid) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.CreateOperation}
}
// Write does the work of sending message to SendGrid API
// Write does the work of sending message to SendGrid API.
func (sg *SendGrid) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
// We allow two possible sources of the properties we need,
// the component metadata or request metadata, request takes priority if present

View File

@ -8,9 +8,10 @@ package sendgrid
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -13,9 +13,10 @@ import (
"sync/atomic"
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
type mockTransport struct {

View File

@ -14,14 +14,15 @@ import (
"syscall"
"time"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/dghubble/go-twitter/twitter"
"github.com/dghubble/oauth1"
"github.com/pkg/errors"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
// Binding represents Twitter input/output binding
// Binding represents Twitter input/output binding.
type Binding struct {
client *twitter.Client
query string
@ -30,12 +31,12 @@ type Binding struct {
var _ = bindings.InputBinding(&Binding{})
// NewTwitter returns a new Twitter event input binding
// NewTwitter returns a new Twitter event input binding.
func NewTwitter(logger logger.Logger) *Binding {
return &Binding{logger: logger}
}
// Init initializes the Twitter binding
// Init initializes the Twitter binding.
func (t *Binding) Init(metadata bindings.Metadata) error {
ck, f := metadata.Properties["consumerKey"]
if !f || ck == "" {
@ -70,12 +71,12 @@ func (t *Binding) Init(metadata bindings.Metadata) error {
return nil
}
// Operations returns list of operations supported by twitter binding
// Operations returns list of operations supported by twitter binding.
func (t *Binding) Operations() []bindings.OperationKind {
return []bindings.OperationKind{bindings.GetOperation}
}
// Read triggers the Twitter search and events on each result tweet
// Read triggers the Twitter search and events on each result tweet.
func (t *Binding) Read(handler func(*bindings.ReadResponse) ([]byte, error)) error {
if t.query == "" {
return nil
@ -144,7 +145,7 @@ func (t *Binding) Read(handler func(*bindings.ReadResponse) ([]byte, error)) err
return nil
}
// Invoke handles all operations
// Invoke handles all operations.
func (t *Binding) Invoke(req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) {
t.logger.Debugf("operation: %v", req.Operation)
if req.Metadata == nil {

View File

@ -10,10 +10,11 @@ import (
"os"
"testing"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/dghubble/go-twitter/twitter"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
)
const (
@ -44,7 +45,7 @@ func getRuntimeMetadata() map[string]string {
}
}
// go test -v -count=1 ./bindings/twitter/
// go test -v -count=1 ./bindings/twitter/.
func TestInit(t *testing.T) {
m := getTestMetadata()
tw := NewTwitter(logger.NewLogger("test"))
@ -53,7 +54,7 @@ func TestInit(t *testing.T) {
}
// TestReadError excutes the Read method and fails before the Twitter API call
// go test -v -count=1 -run TestReadError ./bindings/twitter/
// go test -v -count=1 -run TestReadError ./bindings/twitter/.
func TestReadError(t *testing.T) {
tw := NewTwitter(logger.NewLogger("test"))
m := getTestMetadata()
@ -69,7 +70,7 @@ func TestReadError(t *testing.T) {
}
// TestRead executes the Read method which calls Twiter API
// env RUN_LIVE_TW_TEST=true go test -v -count=1 -run TestReed ./bindings/twitter/
// env RUN_LIVE_TW_TEST=true go test -v -count=1 -run TestReed ./bindings/twitter/.
func TestReed(t *testing.T) {
if os.Getenv("RUN_LIVE_TW_TEST") != "true" {
t.SkipNow() // skip this test until able to read credentials in test infra
@ -99,7 +100,7 @@ func TestReed(t *testing.T) {
// TestInvoke executes the Invoke method which calls Twiter API
// test tokens must be set
// env RUN_LIVE_TW_TEST=true go test -v -count=1 -run TestInvoke ./bindings/twitter/
// env RUN_LIVE_TW_TEST=true go test -v -count=1 -run TestInvoke ./bindings/twitter/.
func TestInvoke(t *testing.T) {
if os.Getenv("RUN_LIVE_TW_TEST") != "true" {
t.SkipNow() // skip this test until able to read credentials in test infra

View File

@ -10,6 +10,7 @@ import (
"errors"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
@ -17,7 +18,7 @@ import (
var ErrMissingGatewayAddr = errors.New("gatewayAddr is a required attribute")
// ClientFactory enables injection for testing
// ClientFactory enables injection for testing.
type ClientFactory interface {
Get(metadata bindings.Metadata) (zbc.Client, error)
}
@ -34,7 +35,7 @@ type clientMetadata struct {
UsePlaintextConnection bool `json:"usePlainTextConnection,string"`
}
// NewClientFactoryImpl returns a new ClientFactory instance
// NewClientFactoryImpl returns a new ClientFactory instance.
func NewClientFactoryImpl(logger logger.Logger) *ClientFactoryImpl {
return &ClientFactoryImpl{logger: logger}
}

View File

@ -9,9 +9,10 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestParseMetadata(t *testing.T) {

View File

@ -14,10 +14,11 @@ import (
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/camunda-cloud/zeebe/clients/go/pkg/entities"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
contrib_metadata "github.com/dapr/components-contrib/metadata"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
type mockActivateJobsClient struct {

View File

@ -13,9 +13,10 @@ import (
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/camunda-cloud/zeebe/clients/go/pkg/pb"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
type mockCancelInstanceClient struct {

View File

@ -10,13 +10,14 @@ import (
"fmt"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/components-contrib/bindings/zeebe"
"github.com/dapr/kit/logger"
)
const (
// operations
// operations.
TopologyOperation bindings.OperationKind = "topology"
DeployProcessOperation bindings.OperationKind = "deploy-process"
CreateInstanceOperation bindings.OperationKind = "create-instance"
@ -38,19 +39,19 @@ var (
}
)
// ZeebeCommand executes Zeebe commands
// ZeebeCommand executes Zeebe commands.
type ZeebeCommand struct {
clientFactory zeebe.ClientFactory
client zbc.Client
logger logger.Logger
}
// NewZeebeCommand returns a new ZeebeCommand instance
// NewZeebeCommand returns a new ZeebeCommand instance.
func NewZeebeCommand(logger logger.Logger) *ZeebeCommand {
return &ZeebeCommand{clientFactory: zeebe.NewClientFactoryImpl(logger), logger: logger}
}
// Init does metadata parsing and connection creation
// Init does metadata parsing and connection creation.
func (z *ZeebeCommand) Init(metadata bindings.Metadata) error {
client, err := z.clientFactory.Get(metadata)
if err != nil {

View File

@ -10,11 +10,12 @@ import (
"testing"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/components-contrib/bindings/zeebe"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type mockClientFactory struct {

View File

@ -11,6 +11,7 @@ import (
"fmt"
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/dapr/components-contrib/bindings"
)

View File

@ -13,9 +13,10 @@ import (
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/camunda-cloud/zeebe/clients/go/pkg/pb"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
type mockCompleteJobClient struct {

View File

@ -12,6 +12,7 @@ import (
"fmt"
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/dapr/components-contrib/bindings"
)

View File

@ -13,9 +13,10 @@ import (
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/camunda-cloud/zeebe/clients/go/pkg/pb"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
type mockCreateInstanceClient struct {

View File

@ -15,7 +15,7 @@ import (
)
const (
// metadata
// metadata.
fileName = "fileName"
)

View File

@ -8,9 +8,10 @@ package command
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
func TestDeployProcess(t *testing.T) {

View File

@ -13,9 +13,10 @@ import (
"github.com/camunda-cloud/zeebe/clients/go/pkg/commands"
"github.com/camunda-cloud/zeebe/clients/go/pkg/pb"
"github.com/camunda-cloud/zeebe/clients/go/pkg/zbc"
"github.com/stretchr/testify/assert"
"github.com/dapr/components-contrib/bindings"
"github.com/dapr/kit/logger"
"github.com/stretchr/testify/assert"
)
type mockFailJobClient struct {

Some files were not shown because too many files have changed in this diff Show More