mirror of https://github.com/dapr/dapr.git
Update code for new linter checks
Signed-off-by: joshvanl <me@joshvanl.dev>
This commit is contained in:
parent
ce4600d177
commit
cf9e2ecea5
|
@ -36,6 +36,7 @@ issues:
|
|||
- ^pkg.*client.*clientset.*versioned.*
|
||||
- ^pkg.*client.*informers.*externalversions.*
|
||||
- ^pkg.*proto.*
|
||||
- pkg/proto
|
||||
|
||||
# output configuration options
|
||||
output:
|
||||
|
|
2
Makefile
2
Makefile
|
@ -402,7 +402,7 @@ test-integration-parallel: test-deps
|
|||
# You can download version v1.64.6 at https://github.com/golangci/golangci-lint/releases/tag/v1.64.6
|
||||
.PHONY: lint
|
||||
lint: check-linter
|
||||
$(GOLANGCI_LINT) run --build-tags=$(GOLANGCI_LINT_TAGS) --timeout=20m
|
||||
$(GOLANGCI_LINT) run --build-tags=$(GOLANGCI_LINT_TAGS) --timeout=20m --max-same-issues 0 --max-issues-per-linter 0
|
||||
|
||||
|
||||
################################################################################
|
||||
|
|
|
@ -23,6 +23,8 @@ import (
|
|||
)
|
||||
|
||||
// ReminderPeriod contains the parsed period for a reminder.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type ReminderPeriod struct {
|
||||
value string // Raw value as received from the user
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ func TestReminderPeriodJSON(t *testing.T) {
|
|||
out := &bytes.Buffer{}
|
||||
err = json.Compact(out, got)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, wantJSON, out.String())
|
||||
assert.JSONEq(t, wantJSON, out.String())
|
||||
|
||||
// Unmarshal
|
||||
dec := ReminderPeriod{}
|
||||
|
|
|
@ -28,6 +28,8 @@ import (
|
|||
)
|
||||
|
||||
// Reminder represents a reminder or timer for a unique actor.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type Reminder struct {
|
||||
ActorID string `json:"actorID,omitempty"`
|
||||
ActorType string `json:"actorType,omitempty"`
|
||||
|
|
|
@ -118,7 +118,7 @@ func TestReminderProperties(t *testing.T) {
|
|||
require.Equal(t, -1, r.RepeatsLeft())
|
||||
require.Equal(t, -1, r.Period.repeats)
|
||||
|
||||
for i := 0; i <= 3; i++ {
|
||||
for i := range 4 {
|
||||
nextTick, active := r.NextTick()
|
||||
require.Equal(t, time1.Add((2*time.Second)*time.Duration(i)), nextTick)
|
||||
|
||||
|
@ -207,7 +207,7 @@ func TestReminderJSON(t *testing.T) {
|
|||
// Marshal
|
||||
enc, err := json.Marshal(dec)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, payload, string(enc))
|
||||
require.JSONEq(t, payload, string(enc))
|
||||
})
|
||||
|
||||
t.Run("failed to unmarshal", func(t *testing.T) {
|
||||
|
|
|
@ -30,6 +30,8 @@ type GetReminderRequest struct {
|
|||
}
|
||||
|
||||
// CreateReminderRequest is the request object to create a new reminder.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type CreateReminderRequest struct {
|
||||
Name string
|
||||
ActorType string
|
||||
|
@ -96,6 +98,8 @@ func (req *CreateReminderRequest) UnmarshalJSON(data []byte) error {
|
|||
}
|
||||
|
||||
// CreateTimerRequest is the request object to create a new timer.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type CreateTimerRequest struct {
|
||||
Name string
|
||||
ActorType string
|
||||
|
|
|
@ -75,7 +75,7 @@ func (p *PubSubError) withTopicError(topic string, err error) *PubSubMetadataErr
|
|||
}
|
||||
}
|
||||
|
||||
func (p PubSubError) PublishMessage(topic string, err error) error {
|
||||
func (p *PubSubError) PublishMessage(topic string, err error) error {
|
||||
return p.withTopicError(topic, err).build(
|
||||
codes.Internal,
|
||||
http.StatusInternalServerError,
|
||||
|
@ -96,7 +96,7 @@ func (p *PubSubError) PublishForbidden(topic, appID string, err error) error {
|
|||
// TestNotFound is specifically for the error we are expecting for the api_tests. The not found
|
||||
// expected error codes are different than the existing ones for PubSubNotFound, hence
|
||||
// why this one is needed
|
||||
func (p PubSubError) TestNotFound(topic string, err error) error {
|
||||
func (p *PubSubError) TestNotFound(topic string, err error) error {
|
||||
return p.withTopicError(topic, err).build(
|
||||
codes.NotFound,
|
||||
http.StatusBadRequest,
|
||||
|
|
|
@ -59,7 +59,7 @@ func TestGetActorState(t *testing.T) {
|
|||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
// act
|
||||
res, err := client.GetActorState(context.Background(), &runtimev1pb.GetActorStateRequest{
|
||||
res, err := client.GetActorState(t.Context(), &runtimev1pb.GetActorStateRequest{
|
||||
ActorId: "fakeActorID",
|
||||
ActorType: "fakeActorType",
|
||||
Key: "key1",
|
||||
|
@ -101,7 +101,7 @@ func TestExecuteActorStateTransaction(t *testing.T) {
|
|||
|
||||
// act
|
||||
res, err := client.ExecuteActorStateTransaction(
|
||||
context.Background(),
|
||||
t.Context(),
|
||||
&runtimev1pb.ExecuteActorStateTransactionRequest{
|
||||
ActorId: "fakeActorID",
|
||||
ActorType: "fakeActorType",
|
||||
|
|
|
@ -15,7 +15,6 @@ package grpc
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
@ -58,7 +57,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
t.Run("data and options in single chunk", func(t *testing.T) {
|
||||
var enc []byte
|
||||
t.Run("encrypt", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -80,7 +79,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -103,7 +102,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
t.Run("one data chunk", func(t *testing.T) {
|
||||
var enc []byte
|
||||
t.Run("encrypt", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -127,7 +126,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -152,7 +151,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
t.Run("multiple data chunks", func(t *testing.T) {
|
||||
var enc []byte
|
||||
t.Run("encrypt", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -182,7 +181,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt - whole header in first chunk", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -211,7 +210,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt - header split in multiple chunks", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
defer stream.CloseSend()
|
||||
|
@ -253,7 +252,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
_, err := io.ReadFull(rand.Reader, largeData)
|
||||
require.NoError(t, err)
|
||||
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -276,7 +275,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt without header", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -297,7 +296,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
|
||||
t.Run("invalid sequence number", func(t *testing.T) {
|
||||
t.Run("encrypt", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -319,7 +318,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -341,7 +340,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
|
||||
t.Run("options in non-leading message", func(t *testing.T) {
|
||||
t.Run("encrypt", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -370,7 +369,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("decrypt", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -397,7 +396,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
|
||||
t.Run("encrypt without required options", func(t *testing.T) {
|
||||
t.Run("missing options", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -411,7 +410,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("missing component name", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -429,7 +428,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("missing key name", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -447,7 +446,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("missing algorithm", func(t *testing.T) {
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -467,7 +466,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
|
||||
t.Run("decrypt without required options", func(t *testing.T) {
|
||||
t.Run("missing options", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -481,7 +480,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("missing component name", func(t *testing.T) {
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
send := []runtimev1pb.CryptoRequests{
|
||||
|
@ -500,7 +499,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
t.Run("time out while waiting for first chunk", func(t *testing.T) {
|
||||
t.Run("encrypt", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
stream, err := client.EncryptAlpha1(context.Background())
|
||||
stream, err := client.EncryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
|
||||
|
@ -512,7 +511,7 @@ func TestCryptoAlpha1(t *testing.T) {
|
|||
|
||||
t.Run("decrypt", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
stream, err := client.DecryptAlpha1(context.Background())
|
||||
stream, err := client.DecryptAlpha1(t.Context())
|
||||
require.NoError(t, err)
|
||||
defer stream.CloseSend()
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package grpc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"testing"
|
||||
|
@ -50,7 +49,7 @@ func TestCallLocal(t *testing.T) {
|
|||
request := invokev1.NewInvokeMethodRequest("method")
|
||||
defer request.Close()
|
||||
|
||||
_, err := client.CallLocal(context.Background(), request.Proto())
|
||||
_, err := client.CallLocal(t.Context(), request.Proto())
|
||||
assert.Equal(t, codes.Internal, status.Code(err))
|
||||
})
|
||||
|
||||
|
@ -72,7 +71,7 @@ func TestCallLocal(t *testing.T) {
|
|||
Message: nil,
|
||||
}
|
||||
|
||||
_, err := client.CallLocal(context.Background(), request)
|
||||
_, err := client.CallLocal(t.Context(), request)
|
||||
assert.Equal(t, codes.InvalidArgument, status.Code(err))
|
||||
})
|
||||
|
||||
|
@ -97,7 +96,7 @@ func TestCallLocal(t *testing.T) {
|
|||
request := invokev1.NewInvokeMethodRequest("method")
|
||||
defer request.Close()
|
||||
|
||||
_, err := client.CallLocal(context.Background(), request.Proto())
|
||||
_, err := client.CallLocal(t.Context(), request.Proto())
|
||||
assert.Equal(t, codes.Internal, status.Code(err))
|
||||
})
|
||||
}
|
||||
|
@ -116,7 +115,7 @@ func TestCallLocalStream(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := internalv1pb.NewServiceInvocationClient(clientConn)
|
||||
st, err := client.CallLocalStream(context.Background())
|
||||
st, err := client.CallLocalStream(t.Context())
|
||||
require.NoError(t, err)
|
||||
|
||||
request := invokev1.NewInvokeMethodRequest("method")
|
||||
|
@ -146,7 +145,7 @@ func TestCallLocalStream(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := internalv1pb.NewServiceInvocationClient(clientConn)
|
||||
st, err := client.CallLocalStream(context.Background())
|
||||
st, err := client.CallLocalStream(t.Context())
|
||||
require.NoError(t, err)
|
||||
|
||||
err = st.Send(&internalv1pb.InternalInvokeRequestStream{
|
||||
|
@ -183,7 +182,7 @@ func TestCallLocalStream(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := internalv1pb.NewServiceInvocationClient(clientConn)
|
||||
st, err := client.CallLocalStream(context.Background())
|
||||
st, err := client.CallLocalStream(t.Context())
|
||||
require.NoError(t, err)
|
||||
|
||||
request := invokev1.NewInvokeMethodRequest("method").
|
||||
|
@ -221,7 +220,7 @@ func TestCallRemoteAppWithTracing(t *testing.T) {
|
|||
request := invokev1.NewInvokeMethodRequest("method")
|
||||
defer request.Close()
|
||||
|
||||
resp, err := client.CallLocal(context.Background(), request.Proto())
|
||||
resp, err := client.CallLocal(t.Context(), request.Proto())
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, resp.GetMessage(), "failed to generate trace context with app call")
|
||||
}
|
||||
|
@ -239,7 +238,7 @@ func TestCallActorWithTracing(t *testing.T) {
|
|||
WithActor("test-actor", "actor-1")
|
||||
defer request.Close()
|
||||
|
||||
resp, err := client.CallActor(context.Background(), request.Proto())
|
||||
resp, err := client.CallActor(t.Context(), request.Proto())
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, resp.GetMessage(), "failed to generate trace context with actor call")
|
||||
}
|
||||
|
|
|
@ -429,7 +429,7 @@ func TestAPIToken(t *testing.T) {
|
|||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
md := grpcMetadata.Pairs("dapr-api-token", token)
|
||||
ctx := grpcMetadata.NewOutgoingContext(context.Background(), md)
|
||||
ctx := grpcMetadata.NewOutgoingContext(t.Context(), md)
|
||||
|
||||
t.Run("unary", func(t *testing.T) {
|
||||
// act
|
||||
|
@ -496,7 +496,7 @@ func TestAPIToken(t *testing.T) {
|
|||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
md := grpcMetadata.Pairs("dapr-api-token", "bad, bad token")
|
||||
ctx := grpcMetadata.NewOutgoingContext(context.Background(), md)
|
||||
ctx := grpcMetadata.NewOutgoingContext(t.Context(), md)
|
||||
|
||||
t.Run("unary", func(t *testing.T) {
|
||||
// act
|
||||
|
@ -559,7 +559,7 @@ func TestAPIToken(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
t.Run("unary", func(t *testing.T) {
|
||||
// act
|
||||
|
@ -687,7 +687,7 @@ func TestInvokeServiceFromHTTPResponse(t *testing.T) {
|
|||
},
|
||||
}
|
||||
var header grpcMetadata.MD
|
||||
_, err := client.InvokeService(context.Background(), req, grpc.Header(&header))
|
||||
_, err := client.InvokeService(t.Context(), req, grpc.Header(&header))
|
||||
|
||||
// assert
|
||||
mockDirectMessaging.AssertNumberOfCalls(t, "Invoke", 1)
|
||||
|
@ -759,7 +759,7 @@ func TestInvokeServiceFromGRPCResponse(t *testing.T) {
|
|||
Data: &anypb.Any{Value: []byte("testData")},
|
||||
},
|
||||
}
|
||||
_, err := client.InvokeService(context.Background(), req)
|
||||
_, err := client.InvokeService(t.Context(), req)
|
||||
|
||||
// assert
|
||||
mockDirectMessaging.AssertNumberOfCalls(t, "Invoke", 1)
|
||||
|
@ -790,7 +790,7 @@ func TestSecretStoreNotConfigured(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.GetSecret(context.Background(), &runtimev1pb.GetSecretRequest{})
|
||||
_, err := client.GetSecret(t.Context(), &runtimev1pb.GetSecretRequest{})
|
||||
assert.Equal(t, codes.FailedPrecondition, status.Code(err))
|
||||
}
|
||||
|
||||
|
@ -928,7 +928,7 @@ func TestGetSecret(t *testing.T) {
|
|||
StoreName: tt.storeName,
|
||||
Key: tt.key,
|
||||
}
|
||||
resp, err := client.GetSecret(context.Background(), req)
|
||||
resp, err := client.GetSecret(t.Context(), req)
|
||||
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
|
@ -1004,7 +1004,7 @@ func TestGetBulkSecret(t *testing.T) {
|
|||
req := &runtimev1pb.GetBulkSecretRequest{
|
||||
StoreName: tt.storeName,
|
||||
}
|
||||
resp, err := client.GetBulkSecret(context.Background(), req)
|
||||
resp, err := client.GetBulkSecret(t.Context(), req)
|
||||
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
|
@ -1032,7 +1032,7 @@ func TestGetStateWhenStoreNotConfigured(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.GetState(context.Background(), &runtimev1pb.GetStateRequest{})
|
||||
_, err := client.GetState(t.Context(), &runtimev1pb.GetStateRequest{})
|
||||
assert.Equal(t, codes.FailedPrecondition, status.Code(err))
|
||||
}
|
||||
|
||||
|
@ -1227,7 +1227,7 @@ func TestSaveState(t *testing.T) {
|
|||
// test and assert
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.testName, func(t *testing.T) {
|
||||
_, err := client.SaveState(context.Background(), &runtimev1pb.SaveStateRequest{
|
||||
_, err := client.SaveState(t.Context(), &runtimev1pb.SaveStateRequest{
|
||||
StoreName: tt.storeName,
|
||||
States: tt.states,
|
||||
})
|
||||
|
@ -1321,7 +1321,7 @@ func TestGetState(t *testing.T) {
|
|||
Key: tt.key,
|
||||
}
|
||||
|
||||
resp, err := client.GetState(context.Background(), req)
|
||||
resp, err := client.GetState(t.Context(), req)
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, resp.GetData(), tt.expectedResponse.GetData(), "Expected response Data to be same")
|
||||
|
@ -1459,7 +1459,7 @@ func TestGetConfiguration(t *testing.T) {
|
|||
Keys: tt.keys,
|
||||
}
|
||||
|
||||
resp, err := client.GetConfigurationAlpha1(context.Background(), req)
|
||||
resp, err := client.GetConfigurationAlpha1(t.Context(), req)
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, resp.GetItems(), tt.expectedResponse.GetItems(), "Expected response items to be same")
|
||||
|
@ -1475,7 +1475,7 @@ func TestGetConfiguration(t *testing.T) {
|
|||
Keys: tt.keys,
|
||||
}
|
||||
|
||||
resp, err := client.GetConfiguration(context.Background(), req)
|
||||
resp, err := client.GetConfiguration(t.Context(), req)
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, resp.GetItems(), tt.expectedResponse.GetItems(), "Expected response items to be same")
|
||||
|
@ -1499,7 +1499,7 @@ func TestSubscribeConfiguration(t *testing.T) {
|
|||
}),
|
||||
mock.MatchedBy(func(f configuration.UpdateHandler) bool {
|
||||
if len(tempReq.Keys) == 1 && tempReq.Keys[0] == goodKey {
|
||||
go f(context.Background(), &configuration.UpdateEvent{
|
||||
go f(t.Context(), &configuration.UpdateEvent{
|
||||
Items: map[string]*configuration.Item{
|
||||
goodKey: {
|
||||
Value: "test-data",
|
||||
|
@ -1525,7 +1525,7 @@ func TestSubscribeConfiguration(t *testing.T) {
|
|||
}),
|
||||
mock.MatchedBy(func(f configuration.UpdateHandler) bool {
|
||||
if len(tempReq.Keys) == 2 && tempReq.Keys[0] == goodKey && tempReq.Keys[1] == goodKey2 {
|
||||
go f(context.Background(), &configuration.UpdateEvent{
|
||||
go f(t.Context(), &configuration.UpdateEvent{
|
||||
Items: map[string]*configuration.Item{
|
||||
goodKey: {
|
||||
Value: "test-data",
|
||||
|
@ -1634,7 +1634,7 @@ func TestSubscribeConfiguration(t *testing.T) {
|
|||
Keys: tt.keys,
|
||||
}
|
||||
|
||||
resp, _ := subscribeFn(context.Background(), req)
|
||||
resp, _ := subscribeFn(t.Context(), req)
|
||||
|
||||
if !tt.errorExcepted {
|
||||
// First message should contain the ID only
|
||||
|
@ -1714,7 +1714,7 @@ func TestUnSubscribeConfiguration(t *testing.T) {
|
|||
return
|
||||
default:
|
||||
}
|
||||
if err := f(context.Background(), &configuration.UpdateEvent{
|
||||
if err := f(t.Context(), &configuration.UpdateEvent{
|
||||
Items: map[string]*configuration.Item{
|
||||
goodKey: {
|
||||
Value: "test-data",
|
||||
|
@ -1748,7 +1748,7 @@ func TestUnSubscribeConfiguration(t *testing.T) {
|
|||
return
|
||||
default:
|
||||
}
|
||||
if err := f(context.Background(), &configuration.UpdateEvent{
|
||||
if err := f(t.Context(), &configuration.UpdateEvent{
|
||||
Items: map[string]*configuration.Item{
|
||||
goodKey: {
|
||||
Value: "test-data",
|
||||
|
@ -1830,7 +1830,7 @@ func TestUnSubscribeConfiguration(t *testing.T) {
|
|||
Keys: tt.keys,
|
||||
}
|
||||
|
||||
resp, err := client.SubscribeConfigurationAlpha1(context.Background(), req)
|
||||
resp, err := client.SubscribeConfigurationAlpha1(t.Context(), req)
|
||||
require.NoError(t, err, "Error should be nil")
|
||||
const retry = 3
|
||||
count := 0
|
||||
|
@ -1852,7 +1852,7 @@ func TestUnSubscribeConfiguration(t *testing.T) {
|
|||
subscribeID = rsp.GetId()
|
||||
}
|
||||
require.NoError(t, err, "Error should be nil")
|
||||
_, err = client.UnsubscribeConfigurationAlpha1(context.Background(), &runtimev1pb.UnsubscribeConfigurationRequest{
|
||||
_, err = client.UnsubscribeConfigurationAlpha1(t.Context(), &runtimev1pb.UnsubscribeConfigurationRequest{
|
||||
StoreName: tt.storeName,
|
||||
Id: subscribeID,
|
||||
})
|
||||
|
@ -1879,7 +1879,7 @@ func TestUnSubscribeConfiguration(t *testing.T) {
|
|||
Keys: tt.keys,
|
||||
}
|
||||
|
||||
resp, err := client.SubscribeConfiguration(context.Background(), req)
|
||||
resp, err := client.SubscribeConfiguration(t.Context(), req)
|
||||
require.NoError(t, err, "Error should be nil")
|
||||
const retry = 3
|
||||
count := 0
|
||||
|
@ -1901,7 +1901,7 @@ func TestUnSubscribeConfiguration(t *testing.T) {
|
|||
subscribeID = rsp.GetId()
|
||||
}
|
||||
require.NoError(t, err, "Error should be nil")
|
||||
_, err = client.UnsubscribeConfiguration(context.Background(), &runtimev1pb.UnsubscribeConfigurationRequest{
|
||||
_, err = client.UnsubscribeConfiguration(t.Context(), &runtimev1pb.UnsubscribeConfigurationRequest{
|
||||
StoreName: tt.storeName,
|
||||
Id: subscribeID,
|
||||
})
|
||||
|
@ -1975,7 +1975,7 @@ func TestUnsubscribeConfigurationErrScenario(t *testing.T) {
|
|||
Id: tt.id,
|
||||
}
|
||||
|
||||
resp, err := client.UnsubscribeConfigurationAlpha1(context.Background(), req)
|
||||
resp, err := client.UnsubscribeConfigurationAlpha1(t.Context(), req)
|
||||
assert.Equal(t, tt.expectedResponse, resp != nil)
|
||||
assert.Equal(t, tt.expectedError, err != nil)
|
||||
})
|
||||
|
@ -1985,7 +1985,7 @@ func TestUnsubscribeConfigurationErrScenario(t *testing.T) {
|
|||
Id: tt.id,
|
||||
}
|
||||
|
||||
resp, err := client.UnsubscribeConfiguration(context.Background(), req)
|
||||
resp, err := client.UnsubscribeConfiguration(t.Context(), req)
|
||||
assert.Equal(t, tt.expectedResponse, resp != nil)
|
||||
assert.Equal(t, tt.expectedError, err != nil)
|
||||
})
|
||||
|
@ -2096,7 +2096,7 @@ func TestGetBulkState(t *testing.T) {
|
|||
Keys: tt.keys,
|
||||
}
|
||||
|
||||
resp, err := client.GetBulkState(context.Background(), req)
|
||||
resp, err := client.GetBulkState(t.Context(), req)
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -2210,7 +2210,7 @@ func TestDeleteState(t *testing.T) {
|
|||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.testName, func(t *testing.T) {
|
||||
_, err := client.DeleteState(context.Background(), &runtimev1pb.DeleteStateRequest{
|
||||
_, err := client.DeleteState(t.Context(), &runtimev1pb.DeleteStateRequest{
|
||||
StoreName: tt.storeName,
|
||||
Key: tt.key,
|
||||
})
|
||||
|
@ -2338,7 +2338,7 @@ func TestDeleteBulkState(t *testing.T) {
|
|||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.testName, func(t *testing.T) {
|
||||
_, err := client.DeleteBulkState(context.Background(), &runtimev1pb.DeleteBulkStateRequest{
|
||||
_, err := client.DeleteBulkState(t.Context(), &runtimev1pb.DeleteBulkStateRequest{
|
||||
StoreName: tt.storeName,
|
||||
States: tt.states,
|
||||
})
|
||||
|
@ -2403,19 +2403,19 @@ func TestPublishTopic(t *testing.T) {
|
|||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
t.Run("err: empty publish event request", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{})
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{})
|
||||
assert.Equal(t, codes.InvalidArgument, status.Code(err))
|
||||
})
|
||||
|
||||
t.Run("err: publish event request with empty topic", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{
|
||||
PubsubName: "pubsub",
|
||||
})
|
||||
assert.Equal(t, codes.InvalidArgument, status.Code(err))
|
||||
})
|
||||
|
||||
t.Run("no err: publish event request with topic and pubsub alone", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
})
|
||||
|
@ -2423,7 +2423,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("no err: publish event request with topic, pubsub and ce metadata override", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
Metadata: map[string]string{
|
||||
|
@ -2436,7 +2436,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: publish event request with error-topic and pubsub", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "error-topic",
|
||||
})
|
||||
|
@ -2444,7 +2444,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: publish event request with err-not-found topic and pubsub", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "err-not-found",
|
||||
})
|
||||
|
@ -2452,7 +2452,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: publish event request with err-not-allowed topic and pubsub", func(t *testing.T) {
|
||||
_, err := client.PublishEvent(context.Background(), &runtimev1pb.PublishEventRequest{
|
||||
_, err := client.PublishEvent(t.Context(), &runtimev1pb.PublishEventRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "err-not-allowed",
|
||||
})
|
||||
|
@ -2460,12 +2460,12 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: empty bulk publish event request", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{})
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{})
|
||||
assert.Equal(t, codes.InvalidArgument, status.Code(err))
|
||||
})
|
||||
|
||||
t.Run("err: bulk publish event request with duplicate entry Ids", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
Entries: []*runtimev1pb.BulkPublishRequestEntry{
|
||||
|
@ -2489,7 +2489,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: bulk publish event request with missing entry Ids", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
Entries: []*runtimev1pb.BulkPublishRequestEntry{
|
||||
|
@ -2511,14 +2511,14 @@ func TestPublishTopic(t *testing.T) {
|
|||
assert.Contains(t, err.Error(), "not present for entry")
|
||||
})
|
||||
t.Run("err: bulk publish event request with pubsub and empty topic", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
})
|
||||
assert.Equal(t, codes.InvalidArgument, status.Code(err))
|
||||
})
|
||||
|
||||
t.Run("no err: bulk publish event request with pubsub, topic and empty entries", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
})
|
||||
|
@ -2526,7 +2526,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: bulk publish event request with error-topic and pubsub", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "error-topic",
|
||||
})
|
||||
|
@ -2534,7 +2534,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: bulk publish event request with err-not-found topic and pubsub", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "err-not-found",
|
||||
})
|
||||
|
@ -2542,7 +2542,7 @@ func TestPublishTopic(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("err: bulk publish event request with err-not-allowed topic and pubsub", func(t *testing.T) {
|
||||
_, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
_, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "err-not-allowed",
|
||||
})
|
||||
|
@ -2605,7 +2605,7 @@ func TestBulkPublish(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("no failures", func(t *testing.T) {
|
||||
res, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
res, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
Entries: sampleEntries,
|
||||
|
@ -2615,7 +2615,7 @@ func TestBulkPublish(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("no failures with ce metadata override", func(t *testing.T) {
|
||||
res, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
res, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "topic",
|
||||
Entries: sampleEntries,
|
||||
|
@ -2630,7 +2630,7 @@ func TestBulkPublish(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("all failures from component", func(t *testing.T) {
|
||||
res, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
res, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "error-topic",
|
||||
Entries: sampleEntries,
|
||||
|
@ -2643,7 +2643,7 @@ func TestBulkPublish(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("partial failures from component", func(t *testing.T) {
|
||||
res, err := client.BulkPublishEventAlpha1(context.Background(), &runtimev1pb.BulkPublishRequest{
|
||||
res, err := client.BulkPublishEventAlpha1(t.Context(), &runtimev1pb.BulkPublishRequest{
|
||||
PubsubName: "pubsub",
|
||||
Topic: "even-error-topic",
|
||||
Entries: sampleEntries,
|
||||
|
@ -2671,12 +2671,12 @@ func TestInvokeBinding(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.InvokeBinding(context.Background(), &runtimev1pb.InvokeBindingRequest{})
|
||||
_, err := client.InvokeBinding(t.Context(), &runtimev1pb.InvokeBindingRequest{})
|
||||
require.NoError(t, err)
|
||||
_, err = client.InvokeBinding(context.Background(), &runtimev1pb.InvokeBindingRequest{Name: "error-binding"})
|
||||
_, err = client.InvokeBinding(t.Context(), &runtimev1pb.InvokeBindingRequest{Name: "error-binding"})
|
||||
assert.Equal(t, codes.Internal, status.Code(err))
|
||||
|
||||
ctx := grpcMetadata.AppendToOutgoingContext(context.Background(), "traceparent", "Test")
|
||||
ctx := grpcMetadata.AppendToOutgoingContext(t.Context(), "traceparent", "Test")
|
||||
resp, err := client.InvokeBinding(ctx, &runtimev1pb.InvokeBindingRequest{Metadata: map[string]string{"userMetadata": "val1"}})
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, resp)
|
||||
|
@ -2699,7 +2699,7 @@ func TestTransactionStateStoreNotConfigured(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.ExecuteStateTransaction(context.Background(), &runtimev1pb.ExecuteStateTransactionRequest{})
|
||||
_, err := client.ExecuteStateTransaction(t.Context(), &runtimev1pb.ExecuteStateTransactionRequest{})
|
||||
assert.Equal(t, codes.FailedPrecondition, status.Code(err))
|
||||
}
|
||||
|
||||
|
@ -2718,7 +2718,7 @@ func TestTransactionStateStoreNotImplemented(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.ExecuteStateTransaction(context.Background(), &runtimev1pb.ExecuteStateTransactionRequest{
|
||||
_, err := client.ExecuteStateTransaction(t.Context(), &runtimev1pb.ExecuteStateTransactionRequest{
|
||||
StoreName: "store1",
|
||||
})
|
||||
assert.Equal(t, codes.Unimplemented, status.Code(err))
|
||||
|
@ -2742,12 +2742,12 @@ func TestExecuteStateTransaction(t *testing.T) {
|
|||
fakeStore.On("Multi",
|
||||
mock.MatchedBy(matchContextInterface),
|
||||
mock.MatchedBy(func(req *state.TransactionalStateRequest) bool {
|
||||
return matchKeyFn(context.Background(), req, goodKey)
|
||||
return matchKeyFn(t.Context(), req, goodKey)
|
||||
})).Return(nil)
|
||||
fakeStore.On("Multi",
|
||||
mock.MatchedBy(matchContextInterface),
|
||||
mock.MatchedBy(func(req *state.TransactionalStateRequest) bool {
|
||||
return matchKeyFn(context.Background(), req, "error-key")
|
||||
return matchKeyFn(t.Context(), req, "error-key")
|
||||
})).Return(errors.New("error to execute with key2"))
|
||||
|
||||
compStore := compstore.New()
|
||||
|
@ -2855,7 +2855,7 @@ func TestExecuteStateTransaction(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
_, err := client.ExecuteStateTransaction(context.Background(), req)
|
||||
_, err := client.ExecuteStateTransaction(t.Context(), req)
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err)
|
||||
} else {
|
||||
|
@ -3037,7 +3037,7 @@ func TestQueryState(t *testing.T) {
|
|||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
resp, err := client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
resp, err := client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "store1",
|
||||
Query: queryTestRequestOK,
|
||||
})
|
||||
|
@ -3047,20 +3047,20 @@ func TestQueryState(t *testing.T) {
|
|||
assert.NotNil(t, resp.GetResults()[0].GetData())
|
||||
}
|
||||
|
||||
resp, err = client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
resp, err = client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "store1",
|
||||
Query: queryTestRequestNoRes,
|
||||
})
|
||||
assert.Empty(t, resp.GetResults())
|
||||
assert.Equal(t, codes.OK, status.Code(err))
|
||||
|
||||
_, err = client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
_, err = client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "store1",
|
||||
Query: queryTestRequestErr,
|
||||
})
|
||||
assert.Equal(t, codes.Internal, status.Code(err))
|
||||
|
||||
_, err = client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
_, err = client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "store1",
|
||||
Query: queryTestRequestSyntaxErr,
|
||||
})
|
||||
|
@ -3083,7 +3083,7 @@ func TestStateStoreQuerierNotImplemented(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
_, err := client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "store1",
|
||||
})
|
||||
assert.Equal(t, codes.Internal, status.Code(err))
|
||||
|
@ -3107,7 +3107,7 @@ func TestStateStoreQuerierEncrypted(t *testing.T) {
|
|||
defer clientConn.Close()
|
||||
|
||||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
_, err := client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
_, err := client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: storeName,
|
||||
})
|
||||
assert.Equal(t, codes.Internal, status.Code(err))
|
||||
|
@ -3139,7 +3139,7 @@ func TestGetConfigurationAPI(t *testing.T) {
|
|||
|
||||
testFn := func(getFn getConfigurationFn) func(t *testing.T) {
|
||||
return func(t *testing.T) {
|
||||
r, err := getFn(context.Background(), &runtimev1pb.GetConfigurationRequest{
|
||||
r, err := getFn(t.Context(), &runtimev1pb.GetConfigurationRequest{
|
||||
StoreName: "store1",
|
||||
Keys: []string{
|
||||
"key1",
|
||||
|
@ -3178,7 +3178,7 @@ func TestSubscribeConfigurationAPI(t *testing.T) {
|
|||
|
||||
getConfigurationItemTest := func(subscribeFn subscribeConfigurationFn) func(t *testing.T) {
|
||||
return func(t *testing.T) {
|
||||
s, err := subscribeFn(context.Background(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
s, err := subscribeFn(t.Context(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
StoreName: "store1",
|
||||
Keys: []string{
|
||||
"key1",
|
||||
|
@ -3222,7 +3222,7 @@ func TestSubscribeConfigurationAPI(t *testing.T) {
|
|||
|
||||
getAllConfigurationItemTest := func(subscribeFn subscribeConfigurationFn) func(t *testing.T) {
|
||||
return func(t *testing.T) {
|
||||
s, err := subscribeFn(context.Background(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
s, err := subscribeFn(t.Context(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
StoreName: "store1",
|
||||
Keys: []string{},
|
||||
})
|
||||
|
@ -3317,7 +3317,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
t.Run("get state request retries with resiliency", func(t *testing.T) {
|
||||
_, err := client.GetState(context.Background(), &runtimev1pb.GetStateRequest{
|
||||
_, err := client.GetState(t.Context(), &runtimev1pb.GetStateRequest{
|
||||
StoreName: "failStore",
|
||||
Key: "failingGetKey",
|
||||
})
|
||||
|
@ -3327,7 +3327,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("get state request times out with resiliency", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := client.GetState(context.Background(), &runtimev1pb.GetStateRequest{
|
||||
_, err := client.GetState(t.Context(), &runtimev1pb.GetStateRequest{
|
||||
StoreName: "failStore",
|
||||
Key: "timeoutGetKey",
|
||||
})
|
||||
|
@ -3339,7 +3339,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("set state request retries with resiliency", func(t *testing.T) {
|
||||
_, err := client.SaveState(context.Background(), &runtimev1pb.SaveStateRequest{
|
||||
_, err := client.SaveState(t.Context(), &runtimev1pb.SaveStateRequest{
|
||||
StoreName: "failStore",
|
||||
States: []*commonv1pb.StateItem{
|
||||
{
|
||||
|
@ -3354,7 +3354,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("set state request times out with resiliency", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := client.SaveState(context.Background(), &runtimev1pb.SaveStateRequest{
|
||||
_, err := client.SaveState(t.Context(), &runtimev1pb.SaveStateRequest{
|
||||
StoreName: "failStore",
|
||||
States: []*commonv1pb.StateItem{
|
||||
{
|
||||
|
@ -3371,7 +3371,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("delete state request retries with resiliency", func(t *testing.T) {
|
||||
_, err := client.DeleteState(context.Background(), &runtimev1pb.DeleteStateRequest{
|
||||
_, err := client.DeleteState(t.Context(), &runtimev1pb.DeleteStateRequest{
|
||||
StoreName: "failStore",
|
||||
Key: "failingDeleteKey",
|
||||
})
|
||||
|
@ -3381,7 +3381,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("delete state request times out with resiliency", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := client.DeleteState(context.Background(), &runtimev1pb.DeleteStateRequest{
|
||||
_, err := client.DeleteState(t.Context(), &runtimev1pb.DeleteStateRequest{
|
||||
StoreName: "failStore",
|
||||
Key: "timeoutDeleteKey",
|
||||
})
|
||||
|
@ -3399,7 +3399,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
failingStore.BulkFailKey.Store(ptr.Of(""))
|
||||
})
|
||||
|
||||
_, err := client.GetBulkState(context.Background(), &runtimev1pb.GetBulkStateRequest{
|
||||
_, err := client.GetBulkState(t.Context(), &runtimev1pb.GetBulkStateRequest{
|
||||
StoreName: "failStore",
|
||||
Keys: []string{"failingBulkGetKey", "goodBulkGetKey"},
|
||||
})
|
||||
|
@ -3408,7 +3408,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("bulk state set recovers from single key failure with resiliency", func(t *testing.T) {
|
||||
_, err := client.SaveState(context.Background(), &runtimev1pb.SaveStateRequest{
|
||||
_, err := client.SaveState(t.Context(), &runtimev1pb.SaveStateRequest{
|
||||
StoreName: "failStore",
|
||||
States: []*commonv1pb.StateItem{
|
||||
{
|
||||
|
@ -3429,7 +3429,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("bulk state set times out with resiliency", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := client.SaveState(context.Background(), &runtimev1pb.SaveStateRequest{
|
||||
_, err := client.SaveState(t.Context(), &runtimev1pb.SaveStateRequest{
|
||||
StoreName: "failStore",
|
||||
States: []*commonv1pb.StateItem{
|
||||
{
|
||||
|
@ -3451,7 +3451,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("state transaction passes after retries with resiliency", func(t *testing.T) {
|
||||
_, err := client.ExecuteStateTransaction(context.Background(), &runtimev1pb.ExecuteStateTransactionRequest{
|
||||
_, err := client.ExecuteStateTransaction(t.Context(), &runtimev1pb.ExecuteStateTransactionRequest{
|
||||
StoreName: "failStore",
|
||||
Operations: []*runtimev1pb.TransactionalStateOperation{
|
||||
{
|
||||
|
@ -3468,7 +3468,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("state transaction times out with resiliency", func(t *testing.T) {
|
||||
_, err := client.ExecuteStateTransaction(context.Background(), &runtimev1pb.ExecuteStateTransactionRequest{
|
||||
_, err := client.ExecuteStateTransaction(t.Context(), &runtimev1pb.ExecuteStateTransactionRequest{
|
||||
StoreName: "failStore",
|
||||
Operations: []*runtimev1pb.TransactionalStateOperation{
|
||||
{
|
||||
|
@ -3485,7 +3485,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("state query retries with resiliency", func(t *testing.T) {
|
||||
_, err := client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
_, err := client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "failStore",
|
||||
Query: queryTestRequestOK,
|
||||
Metadata: map[string]string{"key": "failingQueryKey"},
|
||||
|
@ -3496,7 +3496,7 @@ func TestStateAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("state query times out with resiliency", func(t *testing.T) {
|
||||
_, err := client.QueryStateAlpha1(context.Background(), &runtimev1pb.QueryStateRequest{
|
||||
_, err := client.QueryStateAlpha1(t.Context(), &runtimev1pb.QueryStateRequest{
|
||||
StoreName: "failStore",
|
||||
Query: queryTestRequestOK,
|
||||
Metadata: map[string]string{"key": "timeoutQueryKey"},
|
||||
|
@ -3544,7 +3544,7 @@ func TestConfigurationAPIWithResiliency(t *testing.T) {
|
|||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
t.Run("test get configuration retries with resiliency", func(t *testing.T) {
|
||||
_, err := client.GetConfiguration(context.Background(), &runtimev1pb.GetConfigurationRequest{
|
||||
_, err := client.GetConfiguration(t.Context(), &runtimev1pb.GetConfigurationRequest{
|
||||
StoreName: "failConfig",
|
||||
Keys: []string{},
|
||||
Metadata: map[string]string{"key": "failingGetKey"},
|
||||
|
@ -3555,7 +3555,7 @@ func TestConfigurationAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("test get configuration fails due to timeout with resiliency", func(t *testing.T) {
|
||||
_, err := client.GetConfiguration(context.Background(), &runtimev1pb.GetConfigurationRequest{
|
||||
_, err := client.GetConfiguration(t.Context(), &runtimev1pb.GetConfigurationRequest{
|
||||
StoreName: "failConfig",
|
||||
Keys: []string{},
|
||||
Metadata: map[string]string{"key": "timeoutGetKey"},
|
||||
|
@ -3566,7 +3566,7 @@ func TestConfigurationAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("test subscribe configuration retries with resiliency", func(t *testing.T) {
|
||||
resp, err := client.SubscribeConfiguration(context.Background(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
resp, err := client.SubscribeConfiguration(t.Context(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
StoreName: "failConfig",
|
||||
Keys: []string{},
|
||||
Metadata: map[string]string{"key": "failingSubscribeKey"},
|
||||
|
@ -3580,7 +3580,7 @@ func TestConfigurationAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("test subscribe configuration fails due to timeout with resiliency", func(t *testing.T) {
|
||||
resp, err := client.SubscribeConfiguration(context.Background(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
resp, err := client.SubscribeConfiguration(t.Context(), &runtimev1pb.SubscribeConfigurationRequest{
|
||||
StoreName: "failConfig",
|
||||
Keys: []string{},
|
||||
Metadata: map[string]string{"key": "timeoutSubscribeKey"},
|
||||
|
@ -3626,7 +3626,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
t.Run("Get secret - retries on initial failure with resiliency", func(t *testing.T) {
|
||||
_, err := client.GetSecret(context.Background(), &runtimev1pb.GetSecretRequest{
|
||||
_, err := client.GetSecret(t.Context(), &runtimev1pb.GetSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Key: "key",
|
||||
})
|
||||
|
@ -3638,7 +3638,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
t.Run("Get secret - timeout before request ends", func(t *testing.T) {
|
||||
// Store sleeps for 30 seconds, let's make sure our timeout takes less time than that.
|
||||
start := time.Now()
|
||||
_, err := client.GetSecret(context.Background(), &runtimev1pb.GetSecretRequest{
|
||||
_, err := client.GetSecret(t.Context(), &runtimev1pb.GetSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Key: "timeout",
|
||||
})
|
||||
|
@ -3650,7 +3650,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("Get bulk secret - retries on initial failure with resiliency", func(t *testing.T) {
|
||||
_, err := client.GetBulkSecret(context.Background(), &runtimev1pb.GetBulkSecretRequest{
|
||||
_, err := client.GetBulkSecret(t.Context(), &runtimev1pb.GetBulkSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Metadata: map[string]string{"key": "bulk"},
|
||||
})
|
||||
|
@ -3661,7 +3661,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("Get bulk secret - timeout before request ends", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := client.GetBulkSecret(context.Background(), &runtimev1pb.GetBulkSecretRequest{
|
||||
_, err := client.GetBulkSecret(t.Context(), &runtimev1pb.GetBulkSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Metadata: map[string]string{"key": "bulkTimeout"},
|
||||
})
|
||||
|
@ -3710,7 +3710,7 @@ func TestServiceInvocationWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("Test invoke direct message retries with resiliency", func(t *testing.T) {
|
||||
val := []byte("failingKey")
|
||||
res, err := client.InvokeService(context.Background(), &runtimev1pb.InvokeServiceRequest{
|
||||
res, err := client.InvokeService(t.Context(), &runtimev1pb.InvokeServiceRequest{
|
||||
Id: "failingApp",
|
||||
Message: &commonv1pb.InvokeRequest{
|
||||
Method: "test",
|
||||
|
@ -3727,7 +3727,7 @@ func TestServiceInvocationWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("Test invoke direct message fails with timeout", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := client.InvokeService(context.Background(), &runtimev1pb.InvokeServiceRequest{
|
||||
_, err := client.InvokeService(t.Context(), &runtimev1pb.InvokeServiceRequest{
|
||||
Id: "failingApp",
|
||||
Message: &commonv1pb.InvokeRequest{
|
||||
Method: "test",
|
||||
|
@ -3742,7 +3742,7 @@ func TestServiceInvocationWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("Test invoke direct messages fails after exhausting retries", func(t *testing.T) {
|
||||
_, err := client.InvokeService(context.Background(), &runtimev1pb.InvokeServiceRequest{
|
||||
_, err := client.InvokeService(t.Context(), &runtimev1pb.InvokeServiceRequest{
|
||||
Id: "failingApp",
|
||||
Message: &commonv1pb.InvokeRequest{
|
||||
Method: "test",
|
||||
|
@ -3756,7 +3756,7 @@ func TestServiceInvocationWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("Test invoke direct messages opens circuit breaker after consecutive failures", func(t *testing.T) {
|
||||
// Circuit breaker trips on the 5th request, ending the retries.
|
||||
_, err := client.InvokeService(context.Background(), &runtimev1pb.InvokeServiceRequest{
|
||||
_, err := client.InvokeService(t.Context(), &runtimev1pb.InvokeServiceRequest{
|
||||
Id: "circuitBreakerApp",
|
||||
Message: &commonv1pb.InvokeRequest{
|
||||
Method: "test",
|
||||
|
@ -3767,7 +3767,7 @@ func TestServiceInvocationWithResiliency(t *testing.T) {
|
|||
assert.Equal(t, 5, failingDirectMessaging.Failure.CallCount("circuitBreakerKey"))
|
||||
|
||||
// Additional requests should fail due to the circuit breaker.
|
||||
_, err = client.InvokeService(context.Background(), &runtimev1pb.InvokeServiceRequest{
|
||||
_, err = client.InvokeService(t.Context(), &runtimev1pb.InvokeServiceRequest{
|
||||
Id: "circuitBreakerApp",
|
||||
Message: &commonv1pb.InvokeRequest{
|
||||
Method: "test",
|
||||
|
@ -3858,7 +3858,7 @@ func TestTryLock(t *testing.T) {
|
|||
StoreName: "abc",
|
||||
ExpiryInSeconds: 10,
|
||||
}
|
||||
_, err := api.TryLockAlpha1(context.Background(), req)
|
||||
_, err := api.TryLockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = FailedPrecondition desc = lock store is not configured", err.Error())
|
||||
})
|
||||
|
||||
|
@ -3879,7 +3879,7 @@ func TestTryLock(t *testing.T) {
|
|||
StoreName: "mock",
|
||||
ExpiryInSeconds: 10,
|
||||
}
|
||||
_, err := api.TryLockAlpha1(context.Background(), req)
|
||||
_, err := api.TryLockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = ResourceId is empty in lock store mock", err.Error())
|
||||
})
|
||||
|
||||
|
@ -3902,7 +3902,7 @@ func TestTryLock(t *testing.T) {
|
|||
ResourceId: "resource",
|
||||
ExpiryInSeconds: 10,
|
||||
}
|
||||
_, err := api.TryLockAlpha1(context.Background(), req)
|
||||
_, err := api.TryLockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = LockOwner is empty in lock store mock", err.Error())
|
||||
})
|
||||
|
||||
|
@ -3927,7 +3927,7 @@ func TestTryLock(t *testing.T) {
|
|||
LockOwner: "owner",
|
||||
ExpiryInSeconds: 0,
|
||||
}
|
||||
_, err := api.TryLockAlpha1(context.Background(), req)
|
||||
_, err := api.TryLockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = ExpiryInSeconds is not positive in lock store mock", err.Error())
|
||||
})
|
||||
|
||||
|
@ -3952,7 +3952,7 @@ func TestTryLock(t *testing.T) {
|
|||
LockOwner: "owner",
|
||||
ExpiryInSeconds: 1,
|
||||
}
|
||||
_, err := api.TryLockAlpha1(context.Background(), req)
|
||||
_, err := api.TryLockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = lock store abc not found", err.Error())
|
||||
})
|
||||
|
||||
|
@ -3962,7 +3962,7 @@ func TestTryLock(t *testing.T) {
|
|||
|
||||
mockLockStore := daprt.NewMockStore(ctl)
|
||||
|
||||
mockLockStore.EXPECT().TryLock(context.Background(), gomock.Any()).DoAndReturn(func(ctx context.Context, req *lock.TryLockRequest) (*lock.TryLockResponse, error) {
|
||||
mockLockStore.EXPECT().TryLock(t.Context(), gomock.Any()).DoAndReturn(func(ctx context.Context, req *lock.TryLockRequest) (*lock.TryLockResponse, error) {
|
||||
assert.Equal(t, "lock||resource", req.ResourceID)
|
||||
assert.Equal(t, "owner", req.LockOwner)
|
||||
assert.Equal(t, int32(1), req.ExpiryInSeconds)
|
||||
|
@ -3986,7 +3986,7 @@ func TestTryLock(t *testing.T) {
|
|||
LockOwner: "owner",
|
||||
ExpiryInSeconds: 1,
|
||||
}
|
||||
resp, err := api.TryLockAlpha1(context.Background(), req)
|
||||
resp, err := api.TryLockAlpha1(t.Context(), req)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, resp.GetSuccess())
|
||||
})
|
||||
|
@ -4009,7 +4009,7 @@ func TestUnlock(t *testing.T) {
|
|||
req := &runtimev1pb.UnlockRequest{
|
||||
StoreName: "abc",
|
||||
}
|
||||
_, err := api.UnlockAlpha1(context.Background(), req)
|
||||
_, err := api.UnlockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = FailedPrecondition desc = lock store is not configured", err.Error())
|
||||
})
|
||||
|
||||
|
@ -4030,7 +4030,7 @@ func TestUnlock(t *testing.T) {
|
|||
req := &runtimev1pb.UnlockRequest{
|
||||
StoreName: "abc",
|
||||
}
|
||||
_, err := api.UnlockAlpha1(context.Background(), req)
|
||||
_, err := api.UnlockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = ResourceId is empty in lock store abc", err.Error())
|
||||
})
|
||||
|
||||
|
@ -4051,7 +4051,7 @@ func TestUnlock(t *testing.T) {
|
|||
StoreName: "abc",
|
||||
ResourceId: "resource",
|
||||
}
|
||||
_, err := api.UnlockAlpha1(context.Background(), req)
|
||||
_, err := api.UnlockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = LockOwner is empty in lock store abc", err.Error())
|
||||
})
|
||||
|
||||
|
@ -4074,7 +4074,7 @@ func TestUnlock(t *testing.T) {
|
|||
ResourceId: "resource",
|
||||
LockOwner: "owner",
|
||||
}
|
||||
_, err := api.UnlockAlpha1(context.Background(), req)
|
||||
_, err := api.UnlockAlpha1(t.Context(), req)
|
||||
assert.Equal(t, "api error: code = InvalidArgument desc = lock store abc not found", err.Error())
|
||||
})
|
||||
|
||||
|
@ -4084,7 +4084,7 @@ func TestUnlock(t *testing.T) {
|
|||
|
||||
mockLockStore := daprt.NewMockStore(ctl)
|
||||
|
||||
mockLockStore.EXPECT().Unlock(context.Background(), gomock.Any()).DoAndReturn(func(ctx context.Context, req *lock.UnlockRequest) (*lock.UnlockResponse, error) {
|
||||
mockLockStore.EXPECT().Unlock(t.Context(), gomock.Any()).DoAndReturn(func(ctx context.Context, req *lock.UnlockRequest) (*lock.UnlockResponse, error) {
|
||||
assert.Equal(t, "lock||resource", req.ResourceID)
|
||||
assert.Equal(t, "owner", req.LockOwner)
|
||||
return &lock.UnlockResponse{
|
||||
|
@ -4105,7 +4105,7 @@ func TestUnlock(t *testing.T) {
|
|||
ResourceId: "resource",
|
||||
LockOwner: "owner",
|
||||
}
|
||||
resp, err := api.UnlockAlpha1(context.Background(), req)
|
||||
resp, err := api.UnlockAlpha1(t.Context(), req)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, runtimev1pb.UnlockResponse_SUCCESS, resp.GetStatus()) //nolint:nosnakecase
|
||||
})
|
||||
|
@ -4228,7 +4228,7 @@ func TestMetadata(t *testing.T) {
|
|||
client := runtimev1pb.NewDaprClient(clientConn)
|
||||
|
||||
t.Run("Set Metadata", func(t *testing.T) {
|
||||
_, err := client.SetMetadata(context.Background(), &runtimev1pb.SetMetadataRequest{
|
||||
_, err := client.SetMetadata(t.Context(), &runtimev1pb.SetMetadataRequest{
|
||||
Key: "foo",
|
||||
Value: "bar",
|
||||
})
|
||||
|
@ -4236,7 +4236,7 @@ func TestMetadata(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("Get Metadata", func(t *testing.T) {
|
||||
res, err := client.GetMetadata(context.Background(), &runtimev1pb.GetMetadataRequest{})
|
||||
res, err := client.GetMetadata(t.Context(), &runtimev1pb.GetMetadataRequest{})
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "fakeAPI", res.GetId())
|
||||
|
|
|
@ -55,12 +55,6 @@ func (f *Frame) ProtoMessage() {
|
|||
// nop
|
||||
}
|
||||
|
||||
// Raw returns the raw message.
|
||||
// This is primarily useful for debugging.
|
||||
func (f Frame) Raw() []byte {
|
||||
return f.payload
|
||||
}
|
||||
|
||||
// Marshal implements the encoding.Codec interface method.
|
||||
func (p *Proxy) Marshal(v any) ([]byte, error) {
|
||||
out, ok := v.(*Frame)
|
||||
|
|
|
@ -476,7 +476,7 @@ func (s *proxyTestSuite) TestResiliencyUnary() {
|
|||
|
||||
setupMetrics(s)
|
||||
|
||||
ctx := metadata.NewOutgoingContext(context.Background(), metadata.Pairs(diag.GRPCProxyAppIDKey, testAppID))
|
||||
ctx := metadata.NewOutgoingContext(t.Context(), metadata.Pairs(diag.GRPCProxyAppIDKey, testAppID))
|
||||
|
||||
_, err := s.testClient.Ping(ctx, &pb.PingRequest{Value: message})
|
||||
require.Error(t, err, "Ping should fail due to timeouts")
|
||||
|
@ -515,7 +515,7 @@ func (s *proxyTestSuite) TestResiliencyUnary() {
|
|||
go func(i int) {
|
||||
for j := range numOperations {
|
||||
pingMsg := fmt.Sprintf("%d:%d", i, j)
|
||||
ctx := metadata.NewOutgoingContext(context.Background(), metadata.Pairs(diag.GRPCProxyAppIDKey, testAppID))
|
||||
ctx := metadata.NewOutgoingContext(t.Context(), metadata.Pairs(diag.GRPCProxyAppIDKey, testAppID))
|
||||
res, err := s.testClient.Ping(ctx, &pb.PingRequest{Value: pingMsg})
|
||||
require.NoErrorf(t, err, "Ping should succeed for operation %d:%d", i, j)
|
||||
require.NotNilf(t, res, "Response should not be nil for operation %d:%d", i, j)
|
||||
|
@ -581,7 +581,7 @@ func (s *proxyTestSuite) TestResiliencyStreaming() {
|
|||
|
||||
s.T().Run("retries are not allowed", func(t *testing.T) {
|
||||
// We're purposely not setting dapr-stream=true in this context because we want to simulate the failure when the RPC is not marked as streaming
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
ctx, cancel := context.WithTimeout(t.Context(), time.Minute)
|
||||
defer cancel()
|
||||
ctx = metadata.NewOutgoingContext(ctx, metadata.Pairs(
|
||||
diag.GRPCProxyAppIDKey, "test",
|
||||
|
@ -616,7 +616,7 @@ func (s *proxyTestSuite) TestResiliencyStreaming() {
|
|||
})
|
||||
|
||||
s.T().Run("timeouts do not apply after initial handshake", func(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
ctx, cancel := context.WithTimeout(t.Context(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
setupMetrics(s)
|
||||
|
@ -667,7 +667,7 @@ func (s *proxyTestSuite) TestResiliencyStreaming() {
|
|||
s.service.simulateConnectionFailures.Store(0)
|
||||
}()
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
ctx, cancel := context.WithTimeout(t.Context(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
setupMetrics(s)
|
||||
|
|
|
@ -168,7 +168,7 @@ func TestGrpcAPILoggingMiddlewares(t *testing.T) {
|
|||
if userAgent != "" {
|
||||
md["user-agent"] = []string{userAgent}
|
||||
}
|
||||
ctx := grpcMetadata.NewIncomingContext(context.Background(), md)
|
||||
ctx := grpcMetadata.NewIncomingContext(t.Context(), md)
|
||||
|
||||
info := &grpcGo.UnaryServerInfo{
|
||||
FullMethod: "/dapr.proto.runtime.v1.Dapr/GetState",
|
||||
|
|
|
@ -3907,11 +3907,11 @@ type fakeConfigurationStore struct {
|
|||
counter int
|
||||
}
|
||||
|
||||
func (c fakeConfigurationStore) Ping() error {
|
||||
func (c *fakeConfigurationStore) Ping() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c fakeConfigurationStore) Get(ctx context.Context, req *configuration.GetRequest) (*configuration.GetResponse, error) {
|
||||
func (c *fakeConfigurationStore) Get(ctx context.Context, req *configuration.GetRequest) (*configuration.GetResponse, error) {
|
||||
if len(req.Keys) == 0 {
|
||||
return &configuration.GetResponse{
|
||||
Items: map[string]*configuration.Item{
|
||||
|
@ -3974,8 +3974,8 @@ func (c fakeConfigurationStore) Get(ctx context.Context, req *configuration.GetR
|
|||
return nil, errors.New("get key error: value not found")
|
||||
}
|
||||
|
||||
func (c fakeConfigurationStore) Init(ctx context.Context, metadata configuration.Metadata) error {
|
||||
c.counter = 0 //nolint:staticcheck
|
||||
func (c *fakeConfigurationStore) Init(ctx context.Context, metadata configuration.Metadata) error {
|
||||
c.counter = 0
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -3992,7 +3992,7 @@ func (c *fakeConfigurationStore) Unsubscribe(ctx context.Context, req *configura
|
|||
|
||||
type fakeLockStore struct{}
|
||||
|
||||
func (l fakeLockStore) Ping() error {
|
||||
func (l *fakeLockStore) Ping() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -4427,11 +4427,11 @@ func matchContextInterface(v any) bool {
|
|||
return ok
|
||||
}
|
||||
|
||||
func (c fakeConfigurationStore) Close() error {
|
||||
func (c *fakeConfigurationStore) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l fakeLockStore) Close() error {
|
||||
func (l *fakeLockStore) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ func TestUnescapeRequestParametersHandler(t *testing.T) {
|
|||
newCtx := func() context.Context {
|
||||
chiCtx := chi.NewRouteContext()
|
||||
chiCtx.URLParams.Add("testparam", "foo%20bar")
|
||||
return context.WithValue(context.Background(), chi.RouteCtxKey, chiCtx)
|
||||
return context.WithValue(t.Context(), chi.RouteCtxKey, chiCtx)
|
||||
}
|
||||
|
||||
t.Run("unescapeRequestParametersHandler is added as middleware if the endpoint includes Parameters in its path", func(t *testing.T) {
|
||||
|
|
|
@ -158,7 +158,7 @@ func TestUniversalHTTPHandler(t *testing.T) {
|
|||
respBody, err := io.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, `{"errorCode":"ERR_BAD_REQUEST","message":"invalid request: unexpected message"}`, string(respBody))
|
||||
assert.JSONEq(t, `{"errorCode":"ERR_BAD_REQUEST","message":"invalid request: unexpected message"}`, string(respBody))
|
||||
})
|
||||
|
||||
t.Run("Handler returns nil", func(t *testing.T) {
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package universal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
@ -110,7 +109,7 @@ func TestGetMetadata(t *testing.T) {
|
|||
globalConfig: &config.Configuration{},
|
||||
}
|
||||
|
||||
response, err := fakeAPI.GetMetadata(context.Background(), &runtimev1pb.GetMetadataRequest{})
|
||||
response, err := fakeAPI.GetMetadata(t.Context(), &runtimev1pb.GetMetadataRequest{})
|
||||
require.NoError(t, err, "Expected no error")
|
||||
|
||||
bytes, err := json.Marshal(response)
|
||||
|
@ -141,7 +140,7 @@ func TestSetMetadata(t *testing.T) {
|
|||
appID: "fakeAPI",
|
||||
}
|
||||
|
||||
_, err := fakeAPI.SetMetadata(context.Background(), &runtimev1pb.SetMetadataRequest{
|
||||
_, err := fakeAPI.SetMetadata(t.Context(), &runtimev1pb.SetMetadataRequest{
|
||||
Key: "testKey",
|
||||
Value: "testValue",
|
||||
})
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package universal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
|
@ -41,13 +40,13 @@ func TestSecretStoreNotConfigured(t *testing.T) {
|
|||
|
||||
// act
|
||||
t.Run("GetSecret", func(t *testing.T) {
|
||||
_, err := fakeAPI.GetSecret(context.Background(), &runtimev1pb.GetSecretRequest{})
|
||||
_, err := fakeAPI.GetSecret(t.Context(), &runtimev1pb.GetSecretRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrSecretStoreNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("GetBulkSecret", func(t *testing.T) {
|
||||
_, err := fakeAPI.GetBulkSecret(context.Background(), &runtimev1pb.GetBulkSecretRequest{})
|
||||
_, err := fakeAPI.GetBulkSecret(t.Context(), &runtimev1pb.GetBulkSecretRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrSecretStoreNotConfigured)
|
||||
})
|
||||
|
@ -175,7 +174,7 @@ func TestGetSecret(t *testing.T) {
|
|||
StoreName: tt.storeName,
|
||||
Key: tt.key,
|
||||
}
|
||||
resp, err := fakeAPI.GetSecret(context.Background(), req)
|
||||
resp, err := fakeAPI.GetSecret(t.Context(), req)
|
||||
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err, "Expected no error")
|
||||
|
@ -239,7 +238,7 @@ func TestGetBulkSecret(t *testing.T) {
|
|||
req := &runtimev1pb.GetBulkSecretRequest{
|
||||
StoreName: tt.storeName,
|
||||
}
|
||||
resp, err := fakeAPI.GetBulkSecret(context.Background(), req)
|
||||
resp, err := fakeAPI.GetBulkSecret(t.Context(), req)
|
||||
|
||||
if !tt.errorExcepted {
|
||||
require.NoError(t, err, "Expected no error")
|
||||
|
@ -273,7 +272,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
|
||||
// act
|
||||
t.Run("Get secret - retries on initial failure with resiliency", func(t *testing.T) {
|
||||
_, err := fakeAPI.GetSecret(context.Background(), &runtimev1pb.GetSecretRequest{
|
||||
_, err := fakeAPI.GetSecret(t.Context(), &runtimev1pb.GetSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Key: "key",
|
||||
})
|
||||
|
@ -285,7 +284,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
t.Run("Get secret - timeout before request ends", func(t *testing.T) {
|
||||
// Store sleeps for 30 seconds, let's make sure our timeout takes less time than that.
|
||||
start := time.Now()
|
||||
_, err := fakeAPI.GetSecret(context.Background(), &runtimev1pb.GetSecretRequest{
|
||||
_, err := fakeAPI.GetSecret(t.Context(), &runtimev1pb.GetSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Key: "timeout",
|
||||
})
|
||||
|
@ -297,7 +296,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("Get bulk secret - retries on initial failure with resiliency", func(t *testing.T) {
|
||||
_, err := fakeAPI.GetBulkSecret(context.Background(), &runtimev1pb.GetBulkSecretRequest{
|
||||
_, err := fakeAPI.GetBulkSecret(t.Context(), &runtimev1pb.GetBulkSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Metadata: map[string]string{"key": "bulk"},
|
||||
})
|
||||
|
@ -308,7 +307,7 @@ func TestSecretAPIWithResiliency(t *testing.T) {
|
|||
|
||||
t.Run("Get bulk secret - timeout before request ends", func(t *testing.T) {
|
||||
start := time.Now()
|
||||
_, err := fakeAPI.GetBulkSecret(context.Background(), &runtimev1pb.GetBulkSecretRequest{
|
||||
_, err := fakeAPI.GetBulkSecret(t.Context(), &runtimev1pb.GetBulkSecretRequest{
|
||||
StoreName: "failSecret",
|
||||
Metadata: map[string]string{"key": "bulkTimeout"},
|
||||
})
|
||||
|
|
|
@ -34,7 +34,7 @@ func TestShutdownEndpoint(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("Shutdown successfully", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
_, err := fakeAPI.Shutdown(ctx, &runtimev1pb.ShutdownRequest{})
|
||||
cancel()
|
||||
require.NoError(t, err, "Expected no error")
|
||||
|
|
|
@ -18,7 +18,6 @@ limitations under the License.
|
|||
package universal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"testing"
|
||||
|
@ -46,7 +45,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("return key in PEM format", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Name: "good-key",
|
||||
Format: runtimev1pb.SubtleGetKeyRequest_PEM,
|
||||
|
@ -58,7 +57,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("return key in JSON format", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Name: "good-key",
|
||||
Format: runtimev1pb.SubtleGetKeyRequest_JSON,
|
||||
|
@ -70,7 +69,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("default to PEM format", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Name: "good-key",
|
||||
})
|
||||
|
@ -81,7 +80,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("key not found", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Name: "not-found",
|
||||
})
|
||||
|
@ -92,7 +91,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("key has key ID", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
res, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Name: "with-name",
|
||||
})
|
||||
|
@ -108,13 +107,13 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{})
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -122,7 +121,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("invalid format", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Format: runtimev1pb.SubtleGetKeyRequest_KeyFormat(-9000),
|
||||
})
|
||||
|
@ -132,7 +131,7 @@ func TestSubtleGetKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to get key", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(context.Background(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
_, err := fakeAPI.SubtleGetKeyAlpha1(t.Context(), &runtimev1pb.SubtleGetKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
Name: "error-key",
|
||||
})
|
||||
|
@ -153,7 +152,7 @@ func TestSubtleEncryptAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("encrypt message", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleEncryptAlpha1(context.Background(), &runtimev1pb.SubtleEncryptRequest{
|
||||
res, err := fakeAPI.SubtleEncryptAlpha1(t.Context(), &runtimev1pb.SubtleEncryptRequest{
|
||||
ComponentName: "myvault",
|
||||
Plaintext: []byte("hello world"),
|
||||
KeyName: "good-tag",
|
||||
|
@ -171,13 +170,13 @@ func TestSubtleEncryptAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleEncryptAlpha1(context.Background(), &runtimev1pb.SubtleEncryptRequest{})
|
||||
_, err := fakeAPI.SubtleEncryptAlpha1(t.Context(), &runtimev1pb.SubtleEncryptRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleEncryptAlpha1(context.Background(), &runtimev1pb.SubtleEncryptRequest{
|
||||
_, err := fakeAPI.SubtleEncryptAlpha1(t.Context(), &runtimev1pb.SubtleEncryptRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -185,7 +184,7 @@ func TestSubtleEncryptAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to encrypt", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleEncryptAlpha1(context.Background(), &runtimev1pb.SubtleEncryptRequest{
|
||||
_, err := fakeAPI.SubtleEncryptAlpha1(t.Context(), &runtimev1pb.SubtleEncryptRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
})
|
||||
|
@ -207,7 +206,7 @@ func TestSubtleDecryptAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("decrypt message", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleDecryptAlpha1(context.Background(), &runtimev1pb.SubtleDecryptRequest{
|
||||
res, err := fakeAPI.SubtleDecryptAlpha1(t.Context(), &runtimev1pb.SubtleDecryptRequest{
|
||||
ComponentName: "myvault",
|
||||
Ciphertext: []byte("hello world"),
|
||||
KeyName: "good",
|
||||
|
@ -224,13 +223,13 @@ func TestSubtleDecryptAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleDecryptAlpha1(context.Background(), &runtimev1pb.SubtleDecryptRequest{})
|
||||
_, err := fakeAPI.SubtleDecryptAlpha1(t.Context(), &runtimev1pb.SubtleDecryptRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleDecryptAlpha1(context.Background(), &runtimev1pb.SubtleDecryptRequest{
|
||||
_, err := fakeAPI.SubtleDecryptAlpha1(t.Context(), &runtimev1pb.SubtleDecryptRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -238,7 +237,7 @@ func TestSubtleDecryptAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to decrypt", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleDecryptAlpha1(context.Background(), &runtimev1pb.SubtleDecryptRequest{
|
||||
_, err := fakeAPI.SubtleDecryptAlpha1(t.Context(), &runtimev1pb.SubtleDecryptRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
})
|
||||
|
@ -260,7 +259,7 @@ func TestSubtleWrapKeyAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("wrap key", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleWrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
res, err := fakeAPI.SubtleWrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
PlaintextKey: []byte("hello world"),
|
||||
KeyName: "good-tag",
|
||||
|
@ -277,13 +276,13 @@ func TestSubtleWrapKeyAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleWrapKeyRequest{})
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleWrapKeyRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -291,7 +290,7 @@ func TestSubtleWrapKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("key is empty", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
})
|
||||
|
@ -301,7 +300,7 @@ func TestSubtleWrapKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to wrap key", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
_, err := fakeAPI.SubtleWrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleWrapKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
PlaintextKey: oneHundredTwentyEightBits,
|
||||
|
@ -324,7 +323,7 @@ func TestSubtleUnwrapKeyAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("unwrap key", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleUnwrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleUnwrapKeyRequest{
|
||||
res, err := fakeAPI.SubtleUnwrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleUnwrapKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
WrappedKey: []byte("hello world"),
|
||||
KeyName: "good",
|
||||
|
@ -341,13 +340,13 @@ func TestSubtleUnwrapKeyAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleUnwrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleUnwrapKeyRequest{})
|
||||
_, err := fakeAPI.SubtleUnwrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleUnwrapKeyRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleUnwrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleUnwrapKeyRequest{
|
||||
_, err := fakeAPI.SubtleUnwrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleUnwrapKeyRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -355,7 +354,7 @@ func TestSubtleUnwrapKeyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to unwrap key", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleUnwrapKeyAlpha1(context.Background(), &runtimev1pb.SubtleUnwrapKeyRequest{
|
||||
_, err := fakeAPI.SubtleUnwrapKeyAlpha1(t.Context(), &runtimev1pb.SubtleUnwrapKeyRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
WrappedKey: oneHundredTwentyEightBits,
|
||||
|
@ -378,7 +377,7 @@ func TestSubtleSignAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("sign message", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleSignAlpha1(context.Background(), &runtimev1pb.SubtleSignRequest{
|
||||
res, err := fakeAPI.SubtleSignAlpha1(t.Context(), &runtimev1pb.SubtleSignRequest{
|
||||
ComponentName: "myvault",
|
||||
Digest: []byte("hello world"),
|
||||
KeyName: "good",
|
||||
|
@ -395,13 +394,13 @@ func TestSubtleSignAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleSignAlpha1(context.Background(), &runtimev1pb.SubtleSignRequest{})
|
||||
_, err := fakeAPI.SubtleSignAlpha1(t.Context(), &runtimev1pb.SubtleSignRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleSignAlpha1(context.Background(), &runtimev1pb.SubtleSignRequest{
|
||||
_, err := fakeAPI.SubtleSignAlpha1(t.Context(), &runtimev1pb.SubtleSignRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -409,7 +408,7 @@ func TestSubtleSignAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to sign", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleSignAlpha1(context.Background(), &runtimev1pb.SubtleSignRequest{
|
||||
_, err := fakeAPI.SubtleSignAlpha1(t.Context(), &runtimev1pb.SubtleSignRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
Digest: oneHundredTwentyEightBits,
|
||||
|
@ -432,7 +431,7 @@ func TestSubtleVerifyAlpha1(t *testing.T) {
|
|||
}
|
||||
|
||||
t.Run("signature is valid", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleVerifyAlpha1(context.Background(), &runtimev1pb.SubtleVerifyRequest{
|
||||
res, err := fakeAPI.SubtleVerifyAlpha1(t.Context(), &runtimev1pb.SubtleVerifyRequest{
|
||||
ComponentName: "myvault",
|
||||
Digest: oneHundredTwentyEightBits,
|
||||
Signature: oneHundredTwentyEightBits,
|
||||
|
@ -444,7 +443,7 @@ func TestSubtleVerifyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("signature is invalid", func(t *testing.T) {
|
||||
res, err := fakeAPI.SubtleVerifyAlpha1(context.Background(), &runtimev1pb.SubtleVerifyRequest{
|
||||
res, err := fakeAPI.SubtleVerifyAlpha1(t.Context(), &runtimev1pb.SubtleVerifyRequest{
|
||||
ComponentName: "myvault",
|
||||
Digest: oneHundredTwentyEightBits,
|
||||
Signature: oneHundredTwentyEightBits,
|
||||
|
@ -461,13 +460,13 @@ func TestSubtleVerifyAlpha1(t *testing.T) {
|
|||
compStore.AddCryptoProvider("myvault", fakeCryptoProvider)
|
||||
}()
|
||||
|
||||
_, err := fakeAPI.SubtleVerifyAlpha1(context.Background(), &runtimev1pb.SubtleVerifyRequest{})
|
||||
_, err := fakeAPI.SubtleVerifyAlpha1(t.Context(), &runtimev1pb.SubtleVerifyRequest{})
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, messages.ErrCryptoProvidersNotConfigured)
|
||||
})
|
||||
|
||||
t.Run("provider not found", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleVerifyAlpha1(context.Background(), &runtimev1pb.SubtleVerifyRequest{
|
||||
_, err := fakeAPI.SubtleVerifyAlpha1(t.Context(), &runtimev1pb.SubtleVerifyRequest{
|
||||
ComponentName: "notfound",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -475,7 +474,7 @@ func TestSubtleVerifyAlpha1(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("failed to verify", func(t *testing.T) {
|
||||
_, err := fakeAPI.SubtleVerifyAlpha1(context.Background(), &runtimev1pb.SubtleVerifyRequest{
|
||||
_, err := fakeAPI.SubtleVerifyAlpha1(t.Context(), &runtimev1pb.SubtleVerifyRequest{
|
||||
ComponentName: "myvault",
|
||||
KeyName: "error",
|
||||
})
|
||||
|
|
|
@ -101,7 +101,7 @@ func TestStartWorkflowAPI(t *testing.T) {
|
|||
InstanceId: tt.instanceID,
|
||||
WorkflowName: tt.workflowName,
|
||||
}
|
||||
_, err := fakeAPI.StartWorkflow(context.Background(), req)
|
||||
_, err := fakeAPI.StartWorkflow(t.Context(), req)
|
||||
|
||||
if tt.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
@ -146,7 +146,7 @@ func TestGetWorkflowAPI(t *testing.T) {
|
|||
WorkflowComponent: tt.workflowComponent,
|
||||
InstanceId: tt.instanceID,
|
||||
}
|
||||
_, err := fakeAPI.GetWorkflow(context.Background(), req)
|
||||
_, err := fakeAPI.GetWorkflow(t.Context(), req)
|
||||
|
||||
if tt.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
@ -199,7 +199,7 @@ func TestTerminateWorkflowAPI(t *testing.T) {
|
|||
WorkflowComponent: tt.workflowComponent,
|
||||
InstanceId: tt.instanceID,
|
||||
}
|
||||
_, err := fakeAPI.TerminateWorkflow(context.Background(), req)
|
||||
_, err := fakeAPI.TerminateWorkflow(t.Context(), req)
|
||||
|
||||
if tt.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
@ -258,7 +258,7 @@ func TestRaiseEventWorkflowApi(t *testing.T) {
|
|||
EventName: tt.eventName,
|
||||
EventData: []byte("fake_input"),
|
||||
}
|
||||
_, err := fakeAPI.RaiseEventWorkflow(context.Background(), req)
|
||||
_, err := fakeAPI.RaiseEventWorkflow(t.Context(), req)
|
||||
|
||||
if tt.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
@ -311,7 +311,7 @@ func TestPauseWorkflowApi(t *testing.T) {
|
|||
WorkflowComponent: tt.workflowComponent,
|
||||
InstanceId: tt.instanceID,
|
||||
}
|
||||
_, err := fakeAPI.PauseWorkflow(context.Background(), req)
|
||||
_, err := fakeAPI.PauseWorkflow(t.Context(), req)
|
||||
|
||||
if tt.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
@ -356,7 +356,7 @@ func TestResumeWorkflowApi(t *testing.T) {
|
|||
WorkflowComponent: tt.workflowComponent,
|
||||
InstanceId: tt.instanceID,
|
||||
}
|
||||
_, err := fakeAPI.ResumeWorkflow(context.Background(), req)
|
||||
_, err := fakeAPI.ResumeWorkflow(t.Context(), req)
|
||||
|
||||
if tt.expectedError == nil {
|
||||
require.NoError(t, err)
|
||||
|
|
|
@ -37,7 +37,7 @@ type NameValuePair struct {
|
|||
}
|
||||
|
||||
// HasValue returns true if the NameValuePair has a non-empty value.
|
||||
func (nvp NameValuePair) HasValue() bool {
|
||||
func (nvp *NameValuePair) HasValue() bool {
|
||||
return len(nvp.Value.JSON.Raw) > 0
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ type Scoped struct {
|
|||
}
|
||||
|
||||
// IsAppScoped returns true if the appID is allowed in the scopes for the resource.
|
||||
func (s Scoped) IsAppScoped(appID string) bool {
|
||||
func (s *Scoped) IsAppScoped(appID string) bool {
|
||||
if len(s.Scopes) == 0 {
|
||||
// If there are no scopes, then every app is allowed
|
||||
return true
|
||||
|
|
|
@ -20,8 +20,6 @@ limitations under the License.
|
|||
|
||||
package common
|
||||
|
||||
import ()
|
||||
|
||||
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
|
||||
func (in *DynamicValue) DeepCopyInto(out *DynamicValue) {
|
||||
*out = *in
|
||||
|
|
|
@ -32,6 +32,8 @@ const (
|
|||
//+kubebuilder:object:root=true
|
||||
|
||||
// Component describes an Dapr component type.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type Component struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
//+optional
|
||||
|
|
|
@ -32,6 +32,8 @@ const (
|
|||
|
||||
// HTTPEndpoint describes a Dapr HTTPEndpoint type for external service invocation.
|
||||
// This endpoint can be external to Dapr, or external to the environment.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type HTTPEndpoint struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
//+optional
|
||||
|
|
|
@ -34,7 +34,7 @@ type Resiliency struct {
|
|||
}
|
||||
|
||||
// String implements fmt.Stringer and is used for debugging. It returns the policy object encoded as JSON.
|
||||
func (r Resiliency) String() string {
|
||||
func (r *Resiliency) String() string {
|
||||
b, _ := json.Marshal(r)
|
||||
return string(b)
|
||||
}
|
||||
|
|
|
@ -31,6 +31,8 @@ const (
|
|||
// +kubebuilder:object:root=true
|
||||
|
||||
// Subscription describes an pub/sub event subscription.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type Subscription struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
// +optional
|
||||
|
|
|
@ -32,6 +32,8 @@ const (
|
|||
// +kubebuilder:storageversion
|
||||
|
||||
// Subscription describes an pub/sub event subscription.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type Subscription struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
// +optional
|
||||
|
|
|
@ -35,7 +35,7 @@ func TestAppHealth_setResult(t *testing.T) {
|
|||
}, nil)
|
||||
|
||||
// Set the initial state to healthy
|
||||
h.setResult(context.Background(), true)
|
||||
h.setResult(t.Context(), true)
|
||||
|
||||
statusChange := make(chan uint8, 1)
|
||||
unexpectedStatusChanges := atomic.Int32{}
|
||||
|
@ -55,7 +55,7 @@ func TestAppHealth_setResult(t *testing.T) {
|
|||
if i == threshold-1 {
|
||||
<-statusChange // Allow the channel to be written into
|
||||
}
|
||||
h.setResult(context.Background(), false)
|
||||
h.setResult(t.Context(), false)
|
||||
if i == threshold-1 {
|
||||
select {
|
||||
case v := <-statusChange:
|
||||
|
@ -77,7 +77,7 @@ func TestAppHealth_setResult(t *testing.T) {
|
|||
|
||||
// First success should bring the app back to healthy
|
||||
<-statusChange // Allow the channel to be written into
|
||||
h.setResult(context.Background(), true)
|
||||
h.setResult(t.Context(), true)
|
||||
select {
|
||||
case v := <-statusChange:
|
||||
assert.Equal(t, AppStatusHealthy, v)
|
||||
|
@ -93,7 +93,7 @@ func TestAppHealth_setResult(t *testing.T) {
|
|||
wg.Add(1)
|
||||
go func() {
|
||||
for range threshold + 5 {
|
||||
h.setResult(context.Background(), false)
|
||||
h.setResult(t.Context(), false)
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
|
@ -114,7 +114,7 @@ func TestAppHealth_setResult(t *testing.T) {
|
|||
h.failureCount.Store(int32(math.MaxInt32 - 2))
|
||||
statusChange <- 255 // Fill the channel again
|
||||
for range 5 {
|
||||
h.setResult(context.Background(), false)
|
||||
h.setResult(t.Context(), false)
|
||||
}
|
||||
assert.Empty(t, unexpectedStatusChanges.Load())
|
||||
assert.Equal(t, threshold+3, h.failureCount.Load())
|
||||
|
@ -172,7 +172,7 @@ func TestAppHealth_ratelimitReports(t *testing.T) {
|
|||
|
||||
func Test_StartProbes(t *testing.T) {
|
||||
t.Run("closing context should return", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
done := make(chan struct{})
|
||||
|
@ -203,7 +203,7 @@ func Test_StartProbes(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("calling StartProbes after it has already closed should error", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
h := New(config.AppHealthConfig{
|
||||
|
@ -229,7 +229,7 @@ func Test_StartProbes(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("should return after closed", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
h := New(config.AppHealthConfig{
|
||||
|
@ -260,7 +260,7 @@ func Test_StartProbes(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("should call app probe function after interval", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
var probeCalls atomic.Int64
|
||||
|
|
|
@ -74,7 +74,7 @@ func TestMain(m *testing.M) {
|
|||
}
|
||||
|
||||
func createConnection(t *testing.T) *grpc.ClientConn {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
ctx, cancel := context.WithTimeout(t.Context(), 2*time.Second)
|
||||
conn, err := grpc.DialContext(ctx, "localhost:9998", //nolint:staticcheck
|
||||
grpc.WithTransportCredentials(insecure.NewCredentials()),
|
||||
grpc.WithBlock(), //nolint:staticcheck
|
||||
|
@ -99,7 +99,7 @@ func TestInvokeMethod(t *testing.T) {
|
|||
appMetadataToken: "token1",
|
||||
maxRequestBodySize: 4 << 20,
|
||||
}
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
t.Run("successful request", func(t *testing.T) {
|
||||
req := invokev1.NewInvokeMethodRequest("method").
|
||||
|
@ -145,7 +145,7 @@ func TestHealthProbe(t *testing.T) {
|
|||
appMetadataToken: "token1",
|
||||
maxRequestBodySize: 4 << 20,
|
||||
}
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
var (
|
||||
success bool
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package http
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
|
@ -143,7 +142,7 @@ func (t *testUppercaseHandler) ServeHTTP(w http.ResponseWriter, r *http.Request)
|
|||
func TestInvokeMethodMiddlewaresPipeline(t *testing.T) {
|
||||
var th http.Handler = &testStatusCodeHandler{Code: http.StatusOK}
|
||||
server := httptest.NewServer(th)
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
t.Run("pipeline should be called when handlers are not empty", func(t *testing.T) {
|
||||
called := 0
|
||||
|
@ -403,7 +402,7 @@ func TestInvokeMethodMiddlewaresPipeline(t *testing.T) {
|
|||
|
||||
func TestInvokeMethodHeaders(t *testing.T) {
|
||||
th := &testHeadersHandler{}
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
server := httptest.NewServer(th)
|
||||
defer server.Close()
|
||||
|
||||
|
@ -467,7 +466,7 @@ func TestInvokeMethodHeaders(t *testing.T) {
|
|||
|
||||
func TestInvokeMethod(t *testing.T) {
|
||||
th := &testQueryStringHandler{t: t, serverURL: ""}
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
server := httptest.NewServer(th)
|
||||
defer server.Close()
|
||||
|
||||
|
@ -523,7 +522,7 @@ func TestInvokeMethod(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestInvokeMethodMaxConcurrency(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
t.Run("single concurrency", func(t *testing.T) {
|
||||
handler := testConcurrencyHandler{
|
||||
maxCalls: 1,
|
||||
|
@ -638,7 +637,7 @@ func TestInvokeMethodMaxConcurrency(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestInvokeWithHeaders(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
testServer := httptest.NewServer(&testHandlerHeaders{})
|
||||
c := Channel{
|
||||
baseAddress: testServer.URL,
|
||||
|
@ -673,7 +672,7 @@ func TestInvokeWithHeaders(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestContentType(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
t.Run("no default content type", func(t *testing.T) {
|
||||
handler := &testContentTypeHandler{}
|
||||
|
@ -754,7 +753,7 @@ func TestContentType(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestContentLength(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
handler := &testHandlerHeaders{}
|
||||
testServer := httptest.NewServer(handler)
|
||||
|
@ -788,7 +787,7 @@ func TestContentLength(t *testing.T) {
|
|||
|
||||
func TestAppToken(t *testing.T) {
|
||||
t.Run("token present", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
testServer := httptest.NewServer(&testHandlerHeaders{})
|
||||
c := Channel{
|
||||
baseAddress: testServer.URL,
|
||||
|
@ -820,7 +819,7 @@ func TestAppToken(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("token not present", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
testServer := httptest.NewServer(&testHandlerHeaders{})
|
||||
c := Channel{
|
||||
baseAddress: testServer.URL,
|
||||
|
@ -852,7 +851,7 @@ func TestAppToken(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestHealthProbe(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
h := &testStatusCodeHandler{}
|
||||
testServer := httptest.NewServer(h)
|
||||
c := Channel{
|
||||
|
@ -887,7 +886,7 @@ func TestHealthProbe(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestNoInvalidTraceContext(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
handler := &testHandlerHeaders{}
|
||||
testServer := httptest.NewServer(handler)
|
||||
|
|
|
@ -107,6 +107,5 @@ func parseContentLength(cl string) int64 {
|
|||
if err != nil {
|
||||
return -1
|
||||
}
|
||||
//nolint:gosec
|
||||
return int64(n)
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ func TestInputBindingCalls(t *testing.T) {
|
|||
}()
|
||||
|
||||
conn := inputFromConnector(testLogger, connector)
|
||||
err = conn.Init(context.Background(), bindings.Metadata{
|
||||
err = conn.Init(t.Context(), bindings.Metadata{
|
||||
Base: contribMetadata.Base{},
|
||||
})
|
||||
|
||||
|
@ -180,7 +180,7 @@ func TestInputBindingCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = binding.Read(context.Background(), func(_ context.Context, resp *bindings.ReadResponse) ([]byte, error) {
|
||||
err = binding.Read(t.Context(), func(_ context.Context, resp *bindings.ReadResponse) ([]byte, error) {
|
||||
handleCalled.Add(1)
|
||||
messagesProcessed.Done()
|
||||
assert.Contains(t, messagesData, resp.Data)
|
||||
|
|
|
@ -127,7 +127,7 @@ func TestOutputBindingCalls(t *testing.T) {
|
|||
}()
|
||||
|
||||
conn := outputFromConnector(testLogger, connector)
|
||||
err = conn.Init(context.Background(), bindings.Metadata{
|
||||
err = conn.Init(t.Context(), bindings.Metadata{
|
||||
Base: contribMetadata.Base{},
|
||||
})
|
||||
|
||||
|
@ -171,7 +171,7 @@ func TestOutputBindingCalls(t *testing.T) {
|
|||
defer cleanup()
|
||||
require.NoError(t, err)
|
||||
|
||||
resp, err := outputSvc.Invoke(context.Background(), &bindings.InvokeRequest{
|
||||
resp, err := outputSvc.Invoke(t.Context(), &bindings.InvokeRequest{
|
||||
Data: fakeDataReq,
|
||||
Metadata: fakeMetadata,
|
||||
Operation: fakeOp,
|
||||
|
@ -194,7 +194,7 @@ func TestOutputBindingCalls(t *testing.T) {
|
|||
defer cleanup()
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = outputSvc.Invoke(context.Background(), &bindings.InvokeRequest{})
|
||||
_, err = outputSvc.Invoke(t.Context(), &bindings.InvokeRequest{})
|
||||
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, int64(1), srv.invokeCalled.Load())
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package loader
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -36,7 +35,7 @@ spec:
|
|||
`), fs.FileMode(0o600)))
|
||||
|
||||
loader := NewLocalLoader("", []string{tmp})
|
||||
components, err := loader.Load(context.Background())
|
||||
components, err := loader.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
require.Len(t, components, 1)
|
||||
require.Equal(t, "statestore", components[0].Name)
|
||||
|
@ -55,14 +54,14 @@ spec:
|
|||
`), fs.FileMode(0o600)))
|
||||
|
||||
loader := NewLocalLoader("", []string{tmp})
|
||||
components, err := loader.Load(context.Background())
|
||||
components, err := loader.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, components)
|
||||
})
|
||||
|
||||
t.Run("Test Non Existent Directory", func(t *testing.T) {
|
||||
loader := NewLocalLoader("", []string{"/non-existent-directory"})
|
||||
_, err := loader.Load(context.Background())
|
||||
_, err := loader.Load(t.Context())
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
@ -80,7 +79,7 @@ spec:
|
|||
`), fs.FileMode(0o600)))
|
||||
|
||||
loader := NewLocalLoader("", []string{tmp})
|
||||
err := loader.Validate(context.Background())
|
||||
err := loader.Validate(t.Context())
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
|
@ -97,13 +96,13 @@ spec:
|
|||
`), fs.FileMode(0o600)))
|
||||
|
||||
loader := NewLocalLoader("", []string{tmp})
|
||||
err := loader.Validate(context.Background())
|
||||
err := loader.Validate(t.Context())
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("Test Validate Non Existent Directory", func(t *testing.T) {
|
||||
loader := NewLocalLoader("", []string{"/non-existent-directory"})
|
||||
err := loader.Validate(context.Background())
|
||||
err := loader.Validate(t.Context())
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ func TestGRPCConnector(t *testing.T) {
|
|||
|
||||
assert.Contains(t, acceptedStatus, connector.conn.GetState())
|
||||
assert.Equal(t, 1, fakeFactoryCalled)
|
||||
require.NoError(t, connector.conn.Invoke(context.Background(), fmt.Sprintf("/%s/%s", fakeSvcName, fakeMethodName), structpb.NewNullValue(), structpb.NewNullValue()))
|
||||
require.NoError(t, connector.conn.Invoke(t.Context(), fmt.Sprintf("/%s/%s", fakeSvcName, fakeMethodName), structpb.NewNullValue(), structpb.NewNullValue()))
|
||||
assert.Equal(t, 1, handlerCalled)
|
||||
})
|
||||
|
||||
|
@ -173,7 +173,7 @@ func TestGRPCConnector(t *testing.T) {
|
|||
require.NoError(t, connector.Dial(""))
|
||||
defer connector.Close()
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
assert.True(t, connector.conn.WaitForStateChange(ctx, connectivity.Idle))
|
||||
|
|
|
@ -149,7 +149,7 @@ func TestPubSubPluggableCalls(t *testing.T) {
|
|||
}()
|
||||
|
||||
ps := fromConnector(testLogger, connector)
|
||||
err = ps.Init(context.Background(), pubsub.Metadata{
|
||||
err = ps.Init(t.Context(), pubsub.Metadata{
|
||||
Base: contribMetadata.Base{},
|
||||
})
|
||||
|
||||
|
@ -181,7 +181,7 @@ func TestPubSubPluggableCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = ps.Publish(context.Background(), &pubsub.PublishRequest{
|
||||
err = ps.Publish(t.Context(), &pubsub.PublishRequest{
|
||||
Topic: fakeTopic,
|
||||
})
|
||||
|
||||
|
@ -202,7 +202,7 @@ func TestPubSubPluggableCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = ps.Publish(context.Background(), &pubsub.PublishRequest{
|
||||
err = ps.Publish(t.Context(), &pubsub.PublishRequest{
|
||||
Topic: fakeTopic,
|
||||
})
|
||||
|
||||
|
@ -265,7 +265,7 @@ func TestPubSubPluggableCalls(t *testing.T) {
|
|||
handleErrors <- errors.New("fake-error")
|
||||
close(handleErrors)
|
||||
|
||||
err = ps.Subscribe(context.Background(), pubsub.SubscribeRequest{
|
||||
err = ps.Subscribe(t.Context(), pubsub.SubscribeRequest{
|
||||
Topic: fakeTopic,
|
||||
}, func(_ context.Context, m *pubsub.NewMessage) error {
|
||||
handleCalled.Add(1)
|
||||
|
|
|
@ -130,7 +130,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
}()
|
||||
|
||||
secretStore := fromConnector(testLogger, connector)
|
||||
err = secretStore.Init(context.Background(), secretstores.Metadata{
|
||||
err = secretStore.Init(t.Context(), secretstores.Metadata{
|
||||
Base: contribMetadata.Base{},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
@ -162,7 +162,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := secretStore.GetSecret(context.Background(), secretstores.GetSecretRequest{
|
||||
resp, err := secretStore.GetSecret(t.Context(), secretstores.GetSecretRequest{
|
||||
Name: key,
|
||||
})
|
||||
assert.Equal(t, int64(1), svc.getSecretCalled.Load())
|
||||
|
@ -183,7 +183,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := gSecretStores.BulkGetSecret(context.Background(), secretstores.BulkGetSecretRequest{})
|
||||
resp, err := gSecretStores.BulkGetSecret(t.Context(), secretstores.BulkGetSecretRequest{})
|
||||
assert.Equal(t, int64(1), svc.bulkGetSecretCalled.Load())
|
||||
str := err.Error()
|
||||
assert.Equal(t, err.Error(), str)
|
||||
|
|
|
@ -69,7 +69,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("no error", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
count.Add(1)
|
||||
return nil
|
||||
|
@ -82,7 +82,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("does not retry on etag error", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
count.Add(1)
|
||||
return etagInvalidErr
|
||||
|
@ -97,7 +97,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("retries on other errors", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
count.Add(1)
|
||||
return simulatedErr
|
||||
|
@ -110,7 +110,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("success on second attempt", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
if count.Add(1) == 1 {
|
||||
return simulatedErr
|
||||
|
@ -132,7 +132,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("all successful", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
nil, // The single method should not be invoked, so this will panic if it happens
|
||||
func(ctx context.Context, req []state.SetRequest, opts state.BulkStoreOpts) error {
|
||||
count.Add(1)
|
||||
|
@ -145,7 +145,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("key1 successful, key2 etag mismatch", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
nil, // The single method should not be invoked, so this will panic if it happens
|
||||
func(ctx context.Context, req []state.SetRequest, opts state.BulkStoreOpts) error {
|
||||
count.Add(1)
|
||||
|
@ -163,7 +163,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("key1 etag invalid, key2 etag mismatch", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
nil, // The single method should not be invoked, so this will panic if it happens
|
||||
func(ctx context.Context, req []state.SetRequest, opts state.BulkStoreOpts) error {
|
||||
count.Add(1)
|
||||
|
@ -183,7 +183,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
t.Run("key1 successful, key2 fails and is retried", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
// This should retry, but the second time only key2 should be requested
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
require.Equal(t, "key2", req.Key)
|
||||
count.Add(1)
|
||||
|
@ -204,7 +204,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
t.Run("key1 fails and is retried, key2 has etag error", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
// This should retry, but the second time only key1 should be requested
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
require.Equal(t, "key1", req.Key)
|
||||
count.Add(1)
|
||||
|
@ -232,7 +232,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
count := atomic.Uint32{}
|
||||
// This should retry, but the second time only key1 should be requested
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs2, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs2, policyDef, state.BulkStoreOpts{},
|
||||
func(ctx context.Context, req *state.SetRequest) error {
|
||||
require.Equal(t, "key1", req.Key)
|
||||
count.Add(1)
|
||||
|
@ -259,7 +259,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
}
|
||||
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs2, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs2, policyDef, state.BulkStoreOpts{},
|
||||
nil, // The single method should not be invoked, so this will panic if it happens
|
||||
func(ctx context.Context, req []state.SetRequest, opts state.BulkStoreOpts) error {
|
||||
if count.Add(1) == 1 {
|
||||
|
@ -294,7 +294,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("retries when error is not a multierror", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
nil, // The single method should not be invoked, so this will panic if it happens
|
||||
func(ctx context.Context, req []state.SetRequest, opts state.BulkStoreOpts) error {
|
||||
count.Add(1)
|
||||
|
@ -308,7 +308,7 @@ func TestPerformBulkStoreOperation(t *testing.T) {
|
|||
|
||||
t.Run("retries when multierror contains a non-BulkStoreError error", func(t *testing.T) {
|
||||
count := atomic.Uint32{}
|
||||
err := PerformBulkStoreOperation(context.Background(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
err := PerformBulkStoreOperation(t.Context(), reqs, policyDef, state.BulkStoreOpts{},
|
||||
nil, // The single method should not be invoked, so this will panic if it happens
|
||||
func(ctx context.Context, req []state.SetRequest, opts state.BulkStoreOpts) error {
|
||||
count.Add(1)
|
||||
|
|
|
@ -174,7 +174,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
return dialer(ctx, opts...)
|
||||
})
|
||||
client := clientFactory(testLogger).(*grpcStateStore)
|
||||
require.NoError(t, client.Init(context.Background(), state.Metadata{}))
|
||||
require.NoError(t, client.Init(t.Context(), state.Metadata{}))
|
||||
return client, cleanup, err
|
||||
}
|
||||
|
||||
|
@ -208,7 +208,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
}()
|
||||
|
||||
ps := fromConnector(testLogger, connector)
|
||||
err = ps.Init(context.Background(), state.Metadata{
|
||||
err = ps.Init(t.Context(), state.Metadata{
|
||||
Base: contribMetadata.Base{},
|
||||
})
|
||||
|
||||
|
@ -241,7 +241,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
stStore, cleanup, err := getStateStore(svc)
|
||||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
err = stStore.Delete(context.Background(), &state.DeleteRequest{
|
||||
err = stStore.Delete(t.Context(), &state.DeleteRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -262,7 +262,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
stStore, cleanup, err := getStateStore(svc)
|
||||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
err = stStore.Delete(context.Background(), &state.DeleteRequest{
|
||||
err = stStore.Delete(t.Context(), &state.DeleteRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -292,7 +292,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
stStore, cleanup, err := getStateStore(svc)
|
||||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
err = stStore.Delete(context.Background(), &state.DeleteRequest{
|
||||
err = stStore.Delete(t.Context(), &state.DeleteRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -325,7 +325,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
stStore, cleanup, err := getStateStore(svc)
|
||||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
err = stStore.Delete(context.Background(), &state.DeleteRequest{
|
||||
err = stStore.Delete(t.Context(), &state.DeleteRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -349,7 +349,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.Get(context.Background(), &state.GetRequest{
|
||||
resp, err := stStore.Get(t.Context(), &state.GetRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -370,7 +370,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.Get(context.Background(), &state.GetRequest{
|
||||
resp, err := stStore.Get(t.Context(), &state.GetRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -400,7 +400,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.Get(context.Background(), &state.GetRequest{
|
||||
resp, err := stStore.Get(t.Context(), &state.GetRequest{
|
||||
Key: fakeKey,
|
||||
})
|
||||
|
||||
|
@ -423,7 +423,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.Set(context.Background(), &state.SetRequest{
|
||||
err = stStore.Set(t.Context(), &state.SetRequest{
|
||||
Key: fakeKey,
|
||||
Value: fakeData,
|
||||
})
|
||||
|
@ -445,7 +445,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.Set(context.Background(), &state.SetRequest{
|
||||
err = stStore.Set(t.Context(), &state.SetRequest{
|
||||
Key: fakeKey,
|
||||
Value: fakeData,
|
||||
})
|
||||
|
@ -488,7 +488,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.BulkSet(context.Background(), []state.SetRequest{}, state.BulkStoreOpts{})
|
||||
err = stStore.BulkSet(t.Context(), []state.SetRequest{}, state.BulkStoreOpts{})
|
||||
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, int64(1), svc.bulkSetCalled.Load())
|
||||
|
@ -509,7 +509,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.BulkSet(context.Background(), requests, state.BulkStoreOpts{})
|
||||
err = stStore.BulkSet(t.Context(), requests, state.BulkStoreOpts{})
|
||||
|
||||
require.ErrorIs(t, ErrNilSetValue, err)
|
||||
assert.Equal(t, int64(0), svc.bulkSetCalled.Load())
|
||||
|
@ -536,7 +536,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.BulkSet(context.Background(), requests, state.BulkStoreOpts{})
|
||||
err = stStore.BulkSet(t.Context(), requests, state.BulkStoreOpts{})
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(1), svc.bulkSetCalled.Load())
|
||||
|
@ -561,7 +561,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.BulkDelete(context.Background(), requests, state.BulkStoreOpts{})
|
||||
err = stStore.BulkDelete(t.Context(), requests, state.BulkStoreOpts{})
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(1), svc.bulkDeleteCalled.Load())
|
||||
|
@ -583,7 +583,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.BulkDelete(context.Background(), requests, state.BulkStoreOpts{})
|
||||
err = stStore.BulkDelete(t.Context(), requests, state.BulkStoreOpts{})
|
||||
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, int64(1), svc.bulkDeleteCalled.Load())
|
||||
|
@ -615,7 +615,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.BulkDelete(context.Background(), requests, state.BulkStoreOpts{})
|
||||
err = stStore.BulkDelete(t.Context(), requests, state.BulkStoreOpts{})
|
||||
|
||||
require.Error(t, err)
|
||||
_, ok := err.(*state.BulkDeleteRowMismatchError)
|
||||
|
@ -636,7 +636,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.BulkGet(context.Background(), requests, state.BulkGetOpts{})
|
||||
resp, err := stStore.BulkGet(t.Context(), requests, state.BulkGetOpts{})
|
||||
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, resp)
|
||||
|
@ -669,7 +669,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.BulkGet(context.Background(), requests, state.BulkGetOpts{})
|
||||
resp, err := stStore.BulkGet(t.Context(), requests, state.BulkGetOpts{})
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, resp)
|
||||
|
@ -685,7 +685,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.Multi(context.Background(), &state.TransactionalStateRequest{
|
||||
err = stStore.Multi(t.Context(), &state.TransactionalStateRequest{
|
||||
Operations: []state.TransactionalStateOperation{},
|
||||
Metadata: map[string]string{},
|
||||
})
|
||||
|
@ -715,7 +715,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
err = stStore.Multi(context.Background(), &state.TransactionalStateRequest{
|
||||
err = stStore.Multi(t.Context(), &state.TransactionalStateRequest{
|
||||
Operations: []state.TransactionalStateOperation{
|
||||
operations[0],
|
||||
operations[1],
|
||||
|
@ -734,7 +734,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.Query(context.Background(), &state.QueryRequest{})
|
||||
resp, err := stStore.Query(t.Context(), &state.QueryRequest{})
|
||||
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, resp)
|
||||
|
@ -774,7 +774,7 @@ func TestComponentCalls(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
defer cleanup()
|
||||
|
||||
resp, err := stStore.Query(context.Background(), request)
|
||||
resp, err := stStore.Query(t.Context(), request)
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, resp)
|
||||
|
|
|
@ -78,6 +78,8 @@ var defaultFeatures = map[Feature]bool{
|
|||
}
|
||||
|
||||
// Configuration is an internal (and duplicate) representation of Dapr's Configuration CRD.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type Configuration struct {
|
||||
metav1.TypeMeta `json:",inline" yaml:",inline"`
|
||||
// See https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#metadata
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package diagnostics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
|
@ -26,8 +25,8 @@ func TestPubSub(t *testing.T) {
|
|||
t.Run("record drop by app or sidecar", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.PubsubIngressEvent(context.Background(), componentName, "drop", "success", "A", 1)
|
||||
c.PubsubIngressEvent(context.Background(), componentName, "drop", "drop", "A", 1)
|
||||
c.PubsubIngressEvent(t.Context(), componentName, "drop", "success", "A", 1)
|
||||
c.PubsubIngressEvent(t.Context(), componentName, "drop", "drop", "A", 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/pubsub_ingress/count")
|
||||
v := view.Find("component/pubsub_ingress/count")
|
||||
|
@ -42,7 +41,7 @@ func TestPubSub(t *testing.T) {
|
|||
t.Run("record ingress count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.PubsubIngressEvent(context.Background(), componentName, "retry", "retry", "A", 0)
|
||||
c.PubsubIngressEvent(t.Context(), componentName, "retry", "retry", "A", 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/pubsub_ingress/count")
|
||||
v := view.Find("component/pubsub_ingress/count")
|
||||
|
@ -53,7 +52,7 @@ func TestPubSub(t *testing.T) {
|
|||
t.Run("record ingress latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.PubsubIngressEvent(context.Background(), componentName, "retry", "", "A", 1)
|
||||
c.PubsubIngressEvent(t.Context(), componentName, "retry", "", "A", 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/pubsub_ingress/latencies")
|
||||
v := view.Find("component/pubsub_ingress/latencies")
|
||||
|
@ -66,7 +65,7 @@ func TestPubSub(t *testing.T) {
|
|||
t.Run("record egress latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.PubsubEgressEvent(context.Background(), componentName, "A", true, 1)
|
||||
c.PubsubEgressEvent(t.Context(), componentName, "A", true, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/pubsub_egress/latencies")
|
||||
v := view.Find("component/pubsub_egress/latencies")
|
||||
|
@ -81,7 +80,7 @@ func TestBindings(t *testing.T) {
|
|||
t.Run("record input binding count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.InputBindingEvent(context.Background(), componentName, false, 0)
|
||||
c.InputBindingEvent(t.Context(), componentName, false, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/input_binding/count")
|
||||
v := view.Find("component/input_binding/count")
|
||||
|
@ -92,7 +91,7 @@ func TestBindings(t *testing.T) {
|
|||
t.Run("record input binding latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.InputBindingEvent(context.Background(), componentName, false, 1)
|
||||
c.InputBindingEvent(t.Context(), componentName, false, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/input_binding/latencies")
|
||||
v := view.Find("component/input_binding/count")
|
||||
|
@ -105,7 +104,7 @@ func TestBindings(t *testing.T) {
|
|||
t.Run("record output binding count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.OutputBindingEvent(context.Background(), componentName, "set", false, 0)
|
||||
c.OutputBindingEvent(t.Context(), componentName, "set", false, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/output_binding/count")
|
||||
v := view.Find("component/input_binding/count")
|
||||
|
@ -116,7 +115,7 @@ func TestBindings(t *testing.T) {
|
|||
t.Run("record output binding latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.OutputBindingEvent(context.Background(), componentName, "set", false, 1)
|
||||
c.OutputBindingEvent(t.Context(), componentName, "set", false, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/output_binding/latencies")
|
||||
v := view.Find("component/output_binding/latencies")
|
||||
|
@ -131,7 +130,7 @@ func TestState(t *testing.T) {
|
|||
t.Run("record state count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.StateInvoked(context.Background(), componentName, "get", false, 0)
|
||||
c.StateInvoked(t.Context(), componentName, "get", false, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/state/count")
|
||||
v := view.Find("component/state/count")
|
||||
|
@ -142,7 +141,7 @@ func TestState(t *testing.T) {
|
|||
t.Run("record state latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.StateInvoked(context.Background(), componentName, "get", false, 1)
|
||||
c.StateInvoked(t.Context(), componentName, "get", false, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/state/latencies")
|
||||
v := view.Find("component/state/latencies")
|
||||
|
@ -156,7 +155,7 @@ func TestConfiguration(t *testing.T) {
|
|||
t.Run("record configuration count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.ConfigurationInvoked(context.Background(), componentName, "get", false, 0)
|
||||
c.ConfigurationInvoked(t.Context(), componentName, "get", false, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/configuration/count")
|
||||
v := view.Find("component/configuration/count")
|
||||
|
@ -167,7 +166,7 @@ func TestConfiguration(t *testing.T) {
|
|||
t.Run("record configuration latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.ConfigurationInvoked(context.Background(), componentName, "get", false, 1)
|
||||
c.ConfigurationInvoked(t.Context(), componentName, "get", false, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/configuration/latencies")
|
||||
v := view.Find("component/configuration/latencies")
|
||||
|
@ -182,7 +181,7 @@ func TestSecrets(t *testing.T) {
|
|||
t.Run("record secret count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.SecretInvoked(context.Background(), componentName, "get", false, 0)
|
||||
c.SecretInvoked(t.Context(), componentName, "get", false, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/secret/count")
|
||||
v := view.Find("component/secret/count")
|
||||
|
@ -193,7 +192,7 @@ func TestSecrets(t *testing.T) {
|
|||
t.Run("record secret latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.SecretInvoked(context.Background(), componentName, "get", false, 1)
|
||||
c.SecretInvoked(t.Context(), componentName, "get", false, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/secret/latencies")
|
||||
v := view.Find("component/secret/latencies")
|
||||
|
@ -208,7 +207,7 @@ func TestConversation(t *testing.T) {
|
|||
t.Run("record conversation count", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.ConversationInvoked(context.Background(), componentName, false, 0)
|
||||
c.ConversationInvoked(t.Context(), componentName, false, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/conversation/count")
|
||||
v := view.Find("component/conversation/count")
|
||||
|
@ -219,7 +218,7 @@ func TestConversation(t *testing.T) {
|
|||
t.Run("record conversation latency", func(t *testing.T) {
|
||||
c := componentsMetrics()
|
||||
|
||||
c.ConversationInvoked(context.Background(), componentName, false, 1)
|
||||
c.ConversationInvoked(t.Context(), componentName, false, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData("component/conversation/latencies")
|
||||
v := view.Find("component/conversation/latencies")
|
||||
|
|
|
@ -19,7 +19,6 @@ import (
|
|||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
@ -75,7 +74,7 @@ func TestUserDefinedMetadata(t *testing.T) {
|
|||
"no-attr": []string{"value3"},
|
||||
}
|
||||
|
||||
testCtx := grpcMetadata.NewIncomingContext(context.Background(), md)
|
||||
testCtx := grpcMetadata.NewIncomingContext(t.Context(), md)
|
||||
metadata.SetMetadataInContextUnary(testCtx, nil, nil, func(ctx context.Context, req any) (any, error) {
|
||||
testCtx = ctx
|
||||
return nil, nil
|
||||
|
@ -90,7 +89,7 @@ func TestUserDefinedMetadata(t *testing.T) {
|
|||
|
||||
func TestSpanContextToGRPCMetadata(t *testing.T) {
|
||||
t.Run("empty span context", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
newCtx := SpanContextToGRPCMetadata(ctx, trace.SpanContext{})
|
||||
|
||||
assert.Equal(t, ctx, newCtx)
|
||||
|
@ -103,7 +102,7 @@ func TestGRPCTraceUnaryServerInterceptor(t *testing.T) {
|
|||
tp := sdktrace.NewTracerProvider(
|
||||
sdktrace.WithBatcher(exp),
|
||||
)
|
||||
defer func() { _ = tp.Shutdown(context.Background()) }()
|
||||
defer func() { _ = tp.Shutdown(t.Context()) }()
|
||||
otel.SetTracerProvider(tp)
|
||||
|
||||
interceptor := GRPCTraceUnaryServerInterceptor("fakeAppID", config.TracingSpec{SamplingRate: "1"})
|
||||
|
@ -113,7 +112,7 @@ func TestGRPCTraceUnaryServerInterceptor(t *testing.T) {
|
|||
testTraceBinary := diagUtils.BinaryFromSpanContext(testSpanContext)
|
||||
|
||||
t.Run("grpc-trace-bin is given", func(t *testing.T) {
|
||||
ctx := grpcMetadata.NewIncomingContext(context.Background(), grpcMetadata.Pairs("grpc-trace-bin", string(testTraceBinary)))
|
||||
ctx := grpcMetadata.NewIncomingContext(t.Context(), grpcMetadata.Pairs("grpc-trace-bin", string(testTraceBinary)))
|
||||
fakeInfo := &grpc.UnaryServerInfo{
|
||||
FullMethod: "/dapr.proto.runtime.v1.Dapr/GetState",
|
||||
}
|
||||
|
@ -154,7 +153,7 @@ func TestGRPCTraceUnaryServerInterceptor(t *testing.T) {
|
|||
return nil, errors.New("fake error")
|
||||
}
|
||||
|
||||
interceptor(context.Background(), fakeReq, fakeInfo, assertHandler)
|
||||
interceptor(t.Context(), fakeReq, fakeInfo, assertHandler)
|
||||
|
||||
sc := span.SpanContext()
|
||||
traceID := sc.TraceID()
|
||||
|
@ -178,11 +177,11 @@ func TestGRPCTraceUnaryServerInterceptor(t *testing.T) {
|
|||
return nil, errors.New("fake error")
|
||||
}
|
||||
|
||||
interceptor(context.Background(), fakeReq, fakeInfo, assertHandler)
|
||||
interceptor(t.Context(), fakeReq, fakeInfo, assertHandler)
|
||||
|
||||
sc := span.SpanContext()
|
||||
spanString := fmt.Sprintf("%v", span)
|
||||
assert.True(t, strings.Contains(spanString, "CallLocal/targetID/method1"))
|
||||
assert.Contains(t, spanString, "CallLocal/targetID/method1")
|
||||
traceID := sc.TraceID()
|
||||
spanID := sc.SpanID()
|
||||
assert.NotEmpty(t, hex.EncodeToString(traceID[:]))
|
||||
|
@ -201,7 +200,7 @@ func TestGRPCTraceUnaryServerInterceptor(t *testing.T) {
|
|||
)
|
||||
oldTracerProvider := otel.GetTracerProvider()
|
||||
defer func() {
|
||||
_ = tp.Shutdown(context.Background())
|
||||
_ = tp.Shutdown(t.Context())
|
||||
// reset tracer provider to older one once the test completes
|
||||
otel.SetTracerProvider(oldTracerProvider)
|
||||
}()
|
||||
|
@ -222,11 +221,11 @@ func TestGRPCTraceUnaryServerInterceptor(t *testing.T) {
|
|||
return nil, status.Error(codes.Internal, errors.New("fake status error").Error())
|
||||
}
|
||||
|
||||
interceptor(context.Background(), fakeReq, fakeInfo, assertHandler)
|
||||
interceptor(t.Context(), fakeReq, fakeInfo, assertHandler)
|
||||
|
||||
sc := span.SpanContext()
|
||||
spanString := fmt.Sprintf("%v", span)
|
||||
assert.True(t, strings.Contains(spanString, "CallLocal/targetID/method1"))
|
||||
assert.Contains(t, spanString, "CallLocal/targetID/method1")
|
||||
traceID := sc.TraceID()
|
||||
spanID := sc.SpanID()
|
||||
assert.NotEmpty(t, hex.EncodeToString(traceID[:]))
|
||||
|
@ -240,7 +239,7 @@ func TestGRPCTraceStreamServerInterceptor(t *testing.T) {
|
|||
tp := sdktrace.NewTracerProvider(
|
||||
sdktrace.WithBatcher(exp),
|
||||
)
|
||||
defer func() { _ = tp.Shutdown(context.Background()) }()
|
||||
defer func() { _ = tp.Shutdown(t.Context()) }()
|
||||
otel.SetTracerProvider(tp)
|
||||
|
||||
interceptor := GRPCTraceStreamServerInterceptor("test", config.TracingSpec{SamplingRate: "1"})
|
||||
|
@ -268,7 +267,7 @@ func TestGRPCTraceStreamServerInterceptor(t *testing.T) {
|
|||
FullMethod: "/dapr.proto.runtime.v1.Dapr/GetState",
|
||||
}
|
||||
|
||||
ctx := grpcMetadata.NewIncomingContext(context.Background(), grpcMetadata.Pairs("grpc-trace-bin", string(testTraceBinary)))
|
||||
ctx := grpcMetadata.NewIncomingContext(t.Context(), grpcMetadata.Pairs("grpc-trace-bin", string(testTraceBinary)))
|
||||
ctx, _ = metadata.SetMetadataInTapHandle(ctx, nil)
|
||||
|
||||
var span trace.Span
|
||||
|
@ -326,7 +325,7 @@ func TestGRPCTraceStreamServerInterceptor(t *testing.T) {
|
|||
FullMethod: "/dapr.proto.internals.v1.ServiceInvocation/CallLocal",
|
||||
}
|
||||
|
||||
ctx := grpcMetadata.NewIncomingContext(context.Background(), grpcMetadata.Pairs("grpc-trace-bin", string(testTraceBinary)))
|
||||
ctx := grpcMetadata.NewIncomingContext(t.Context(), grpcMetadata.Pairs("grpc-trace-bin", string(testTraceBinary)))
|
||||
ctx, _ = metadata.SetMetadataInTapHandle(ctx, nil)
|
||||
|
||||
var span trace.Span
|
||||
|
@ -384,7 +383,7 @@ func TestGRPCTraceStreamServerInterceptor(t *testing.T) {
|
|||
GRPCProxyAppIDKey: "myapp",
|
||||
"grpc-trace-bin": string(testTraceBinary),
|
||||
})
|
||||
ctx := grpcMetadata.NewIncomingContext(context.Background(), md)
|
||||
ctx := grpcMetadata.NewIncomingContext(t.Context(), md)
|
||||
ctx, _ = metadata.SetMetadataInTapHandle(ctx, nil)
|
||||
|
||||
var span trace.Span
|
||||
|
@ -411,7 +410,7 @@ func TestGRPCTraceStreamServerInterceptor(t *testing.T) {
|
|||
md := grpcMetadata.New(map[string]string{
|
||||
GRPCProxyAppIDKey: "myapp",
|
||||
})
|
||||
ctx := grpcMetadata.NewIncomingContext(context.Background(), md)
|
||||
ctx := grpcMetadata.NewIncomingContext(t.Context(), md)
|
||||
ctx, _ = metadata.SetMetadataInTapHandle(ctx, nil)
|
||||
|
||||
var span trace.Span
|
||||
|
|
|
@ -39,7 +39,7 @@ func BenchmarkHTTPMiddlewareLowCardinalityNoPathMatching(b *testing.B) {
|
|||
}))
|
||||
|
||||
// act
|
||||
for i := 0; i < b.N; i++ {
|
||||
for i := range b.N {
|
||||
testRequest := fakeOrdersHTTPRequest(requestBody, i)
|
||||
handler.ServeHTTP(httptest.NewRecorder(), testRequest)
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ func BenchmarkHTTPMiddlewareHighCardinalityNoPathMatching(b *testing.B) {
|
|||
}))
|
||||
|
||||
// act
|
||||
for i := 0; i < b.N; i++ {
|
||||
for i := range b.N {
|
||||
testRequest := fakeOrdersHTTPRequest(requestBody, i)
|
||||
handler.ServeHTTP(httptest.NewRecorder(), testRequest)
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ func BenchmarkHTTPMiddlewareLowCardinalityWithPathMatching(b *testing.B) {
|
|||
}))
|
||||
|
||||
// act
|
||||
for i := 0; i < b.N; i++ {
|
||||
for i := range b.N {
|
||||
testRequest := fakeOrdersHTTPRequest(requestBody, i)
|
||||
handler.ServeHTTP(httptest.NewRecorder(), testRequest)
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ func BenchmarkHTTPMiddlewareHighCardinalityWithPathMatching(b *testing.B) {
|
|||
}))
|
||||
|
||||
// act
|
||||
for i := 0; i < b.N; i++ {
|
||||
for i := range b.N {
|
||||
testRequest := fakeOrdersHTTPRequest(requestBody, i)
|
||||
handler.ServeHTTP(httptest.NewRecorder(), testRequest)
|
||||
}
|
||||
|
|
|
@ -260,7 +260,7 @@ func TestHTTPTraceMiddleware(t *testing.T) {
|
|||
tp := sdktrace.NewTracerProvider(
|
||||
sdktrace.WithBatcher(exp),
|
||||
)
|
||||
defer func() { _ = tp.Shutdown(context.Background()) }()
|
||||
defer func() { _ = tp.Shutdown(t.Context()) }()
|
||||
otel.SetTracerProvider(tp)
|
||||
|
||||
t.Run("traceparent is given in request and sampling is enabled", func(t *testing.T) {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package diagnostics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
@ -51,7 +50,7 @@ func TestRegexRulesSingle(t *testing.T) {
|
|||
view.Unregister(view.Find(statName))
|
||||
})
|
||||
|
||||
stats.RecordWithTags(context.Background(),
|
||||
stats.RecordWithTags(t.Context(),
|
||||
diagUtils.WithTags(testStat.Name(), methodKey, "/orders/123"),
|
||||
testStat.M(1))
|
||||
|
||||
|
@ -74,7 +73,7 @@ func TestRegexRulesSingle(t *testing.T) {
|
|||
s := newGRPCMetrics()
|
||||
s.Init("test", nil)
|
||||
|
||||
stats.RecordWithTags(context.Background(),
|
||||
stats.RecordWithTags(t.Context(),
|
||||
diagUtils.WithTags(testStat.Name(), methodKey, "/siths/123"),
|
||||
testStat.M(1))
|
||||
|
||||
|
@ -97,10 +96,10 @@ func TestRegexRulesSingle(t *testing.T) {
|
|||
s := newGRPCMetrics()
|
||||
s.Init("test", nil)
|
||||
|
||||
stats.RecordWithTags(context.Background(),
|
||||
stats.RecordWithTags(t.Context(),
|
||||
diagUtils.WithTags(testStat.Name(), methodKey, "/orders/123"),
|
||||
testStat.M(1))
|
||||
stats.RecordWithTags(context.Background(),
|
||||
stats.RecordWithTags(t.Context(),
|
||||
diagUtils.WithTags(testStat.Name(), methodKey, "/lightsabers/123"),
|
||||
testStat.M(1))
|
||||
|
||||
|
|
|
@ -216,7 +216,7 @@ func TestResiliencyCountMonitoringCBStates(t *testing.T) {
|
|||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
for range 2 {
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, nil
|
||||
})
|
||||
|
@ -232,7 +232,7 @@ func TestResiliencyCountMonitoringCBStates(t *testing.T) {
|
|||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
for range 3 {
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -251,7 +251,7 @@ func TestResiliencyCountMonitoringCBStates(t *testing.T) {
|
|||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
for range 3 {
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -259,7 +259,7 @@ func TestResiliencyCountMonitoringCBStates(t *testing.T) {
|
|||
// let the circuit breaker to go to half open state (5x cb timeout)
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -336,7 +336,7 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
for range 2 {
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, nil
|
||||
})
|
||||
|
@ -349,7 +349,7 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
unitFn: func() {
|
||||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -370,7 +370,7 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
for range 2 {
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -392,12 +392,12 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
unitFn: func() {
|
||||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
policyRunner = resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner = resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -421,7 +421,7 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
r := createTestResiliency(testResiliencyName, testResiliencyNamespace, "fakeStateStore")
|
||||
for range 2 {
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
@ -429,7 +429,7 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
// let the circuit breaker to go to half open state (5x cb timeout) and then return success to close it
|
||||
time.Sleep(1000 * time.Millisecond)
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, nil
|
||||
})
|
||||
|
@ -437,7 +437,7 @@ func TestResiliencyActivationsCountMonitoring(t *testing.T) {
|
|||
// now open the circuit breaker again
|
||||
for range 2 {
|
||||
policyDef := r.EndpointPolicy("fakeApp", "fakeEndpoint")
|
||||
policyRunner := resiliency.NewRunner[any](context.Background(), policyDef)
|
||||
policyRunner := resiliency.NewRunner[any](t.Context(), policyDef)
|
||||
_, _ = policyRunner(func(ctx context.Context) (interface{}, error) {
|
||||
return nil, errors.New("fake error")
|
||||
})
|
||||
|
|
|
@ -120,7 +120,7 @@ func TestStartInternalCallbackSpan(t *testing.T) {
|
|||
tp := sdktrace.NewTracerProvider(
|
||||
sdktrace.WithBatcher(exp),
|
||||
)
|
||||
defer func() { _ = tp.Shutdown(context.Background()) }()
|
||||
defer func() { _ = tp.Shutdown(t.Context()) }()
|
||||
otel.SetTracerProvider(tp)
|
||||
|
||||
t.Run("traceparent is provided and sampling is enabled", func(t *testing.T) {
|
||||
|
@ -133,7 +133,7 @@ func TestStartInternalCallbackSpan(t *testing.T) {
|
|||
}
|
||||
parent := trace.NewSpanContext(scConfig)
|
||||
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
_, gotSp := StartInternalCallbackSpan(ctx, "testSpanName", parent, traceSpec)
|
||||
sc := gotSp.SpanContext()
|
||||
|
@ -153,7 +153,7 @@ func TestStartInternalCallbackSpan(t *testing.T) {
|
|||
}
|
||||
parent := trace.NewSpanContext(scConfig)
|
||||
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
ctx, gotSp := StartInternalCallbackSpan(ctx, "testSpanName", parent, traceSpec)
|
||||
assert.Nil(t, gotSp)
|
||||
|
@ -221,9 +221,9 @@ func runTraces(t *testing.T, testName string, numTraces int, samplingRate string
|
|||
sampledCount := 0
|
||||
|
||||
for range numTraces {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
if hasParentSpanContext {
|
||||
traceID, _ := idg.NewIDs(context.Background())
|
||||
traceID, _ := idg.NewIDs(t.Context())
|
||||
scConfig := trace.SpanContextConfig{
|
||||
TraceID: traceID,
|
||||
SpanID: trace.SpanID{0, 240, 103, 170, 11, 169, 2, 183},
|
||||
|
@ -368,7 +368,7 @@ func defaultIDGenerator() IDGenerator {
|
|||
func TestTraceIDAndStateFromSpan(t *testing.T) {
|
||||
t.Run("non-empty span, id and state are not empty", func(t *testing.T) {
|
||||
idg := defaultIDGenerator()
|
||||
traceID, _ := idg.NewIDs(context.Background())
|
||||
traceID, _ := idg.NewIDs(t.Context())
|
||||
scConfig := trace.SpanContextConfig{
|
||||
TraceID: traceID,
|
||||
SpanID: trace.SpanID{0, 240, 103, 170, 11, 169, 2, 183},
|
||||
|
@ -380,7 +380,7 @@ func TestTraceIDAndStateFromSpan(t *testing.T) {
|
|||
scConfig.TraceState = ts
|
||||
parent := trace.NewSpanContext(scConfig)
|
||||
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
ctx = trace.ContextWithRemoteSpanContext(ctx, parent)
|
||||
_, span := tracer.Start(ctx, "testTraceSpan", trace.WithSpanKind(trace.SpanKindClient))
|
||||
|
||||
|
@ -391,7 +391,7 @@ func TestTraceIDAndStateFromSpan(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("empty span, id and state are empty", func(t *testing.T) {
|
||||
span := trace.SpanFromContext(context.Background())
|
||||
span := trace.SpanFromContext(t.Context())
|
||||
id, state := TraceIDAndStateFromSpan(span)
|
||||
assert.Empty(t, id)
|
||||
assert.Empty(t, state)
|
||||
|
|
|
@ -44,7 +44,7 @@ func TestSpanFromContext(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("not nil span for context", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
exp := newOtelFakeExporter()
|
||||
tp := sdktrace.NewTracerProvider(sdktrace.WithBatcher(exp))
|
||||
tracer := tp.Tracer("dapr-diagnostics-utils-tests")
|
||||
|
@ -60,7 +60,7 @@ func TestSpanFromContext(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("nil span for context", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
exp := newOtelFakeExporter()
|
||||
_ = sdktrace.NewTracerProvider(sdktrace.WithBatcher(exp))
|
||||
newCtx := trace.ContextWithSpan(ctx, nil)
|
||||
|
@ -72,7 +72,7 @@ func TestSpanFromContext(t *testing.T) {
|
|||
})
|
||||
|
||||
t.Run("nil", func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
exp := newOtelFakeExporter()
|
||||
_ = sdktrace.NewTracerProvider(sdktrace.WithBatcher(exp))
|
||||
newCtx := trace.ContextWithSpan(ctx, nil)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package diagnostics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
@ -25,7 +24,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Failed Create Operation request count", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), CreateWorkflow, StatusFailed, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), CreateWorkflow, StatusFailed, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -36,7 +35,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Successful Create Operation request count", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), CreateWorkflow, StatusSuccess, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), CreateWorkflow, StatusSuccess, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -47,7 +46,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Create Operation request latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), CreateWorkflow, StatusSuccess, 1)
|
||||
w.WorkflowOperationEvent(t.Context(), CreateWorkflow, StatusSuccess, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData(latencyMetricName)
|
||||
v := view.Find(latencyMetricName)
|
||||
|
@ -62,7 +61,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Failed Get Operation Request", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), GetWorkflow, StatusFailed, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), GetWorkflow, StatusFailed, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -73,7 +72,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Successful Get Operation Request", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), GetWorkflow, StatusSuccess, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), GetWorkflow, StatusSuccess, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -84,7 +83,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Get Operation request latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), GetWorkflow, StatusSuccess, 1)
|
||||
w.WorkflowOperationEvent(t.Context(), GetWorkflow, StatusSuccess, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData(latencyMetricName)
|
||||
v := view.Find(latencyMetricName)
|
||||
|
@ -99,7 +98,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Failed Add Event request", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), AddEvent, StatusFailed, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), AddEvent, StatusFailed, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -110,7 +109,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Successful Add Event request", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), AddEvent, StatusSuccess, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), AddEvent, StatusSuccess, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -121,7 +120,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Add Event Operation latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), AddEvent, StatusSuccess, 1)
|
||||
w.WorkflowOperationEvent(t.Context(), AddEvent, StatusSuccess, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData(latencyMetricName)
|
||||
v := view.Find(latencyMetricName)
|
||||
|
@ -136,7 +135,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Failed Purge workflow request", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), PurgeWorkflow, StatusFailed, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), PurgeWorkflow, StatusFailed, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -147,7 +146,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Successful Purge workflow request", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), PurgeWorkflow, StatusSuccess, 0)
|
||||
w.WorkflowOperationEvent(t.Context(), PurgeWorkflow, StatusSuccess, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -158,7 +157,7 @@ func TestOperations(t *testing.T) {
|
|||
t.Run("Purge workflow Operation latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowOperationEvent(context.Background(), PurgeWorkflow, StatusSuccess, 1)
|
||||
w.WorkflowOperationEvent(t.Context(), PurgeWorkflow, StatusSuccess, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData(latencyMetricName)
|
||||
v := view.Find(latencyMetricName)
|
||||
|
@ -179,7 +178,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("Failed with retryable error", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.ActivityExecutionEvent(context.Background(), activityName, StatusRecoverable, 0)
|
||||
w.ActivityExecutionEvent(t.Context(), activityName, StatusRecoverable, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -190,7 +189,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("Failed with not-retryable error", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.ActivityExecutionEvent(context.Background(), activityName, StatusFailed, 0)
|
||||
w.ActivityExecutionEvent(t.Context(), activityName, StatusFailed, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -201,7 +200,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("Successful activity execution", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.ActivityExecutionEvent(context.Background(), activityName, StatusSuccess, 0)
|
||||
w.ActivityExecutionEvent(t.Context(), activityName, StatusSuccess, 0)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -212,7 +211,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("activity execution latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.ActivityExecutionEvent(context.Background(), activityName, StatusSuccess, 1)
|
||||
w.ActivityExecutionEvent(t.Context(), activityName, StatusSuccess, 1)
|
||||
|
||||
viewData, _ := view.RetrieveData(latencyMetricName)
|
||||
v := view.Find(latencyMetricName)
|
||||
|
@ -231,7 +230,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("Failed with retryable error", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowExecutionEvent(context.Background(), workflowName, StatusRecoverable)
|
||||
w.WorkflowExecutionEvent(t.Context(), workflowName, StatusRecoverable)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -242,7 +241,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("Failed with not-retryable error", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowExecutionEvent(context.Background(), workflowName, StatusFailed)
|
||||
w.WorkflowExecutionEvent(t.Context(), workflowName, StatusFailed)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -253,7 +252,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("Successful workflow execution", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowExecutionEvent(context.Background(), workflowName, StatusSuccess)
|
||||
w.WorkflowExecutionEvent(t.Context(), workflowName, StatusSuccess)
|
||||
|
||||
viewData, _ := view.RetrieveData(countMetricName)
|
||||
v := view.Find(countMetricName)
|
||||
|
@ -264,7 +263,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("workflow execution latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowExecutionLatency(context.Background(), workflowName, StatusSuccess, 20)
|
||||
w.WorkflowExecutionLatency(t.Context(), workflowName, StatusSuccess, 20)
|
||||
|
||||
viewData, _ := view.RetrieveData(executionLatencyMetricName)
|
||||
v := view.Find(executionLatencyMetricName)
|
||||
|
@ -276,7 +275,7 @@ func TestExecution(t *testing.T) {
|
|||
t.Run("workflow scheduling latency", func(t *testing.T) {
|
||||
w := initWorkflowMetrics()
|
||||
|
||||
w.WorkflowSchedulingLatency(context.Background(), workflowName, 10)
|
||||
w.WorkflowSchedulingLatency(t.Context(), workflowName, 10)
|
||||
|
||||
viewData, _ := view.RetrieveData(schedulingLatencyMetricName)
|
||||
v := view.Find(schedulingLatencyMetricName)
|
||||
|
|
|
@ -94,7 +94,7 @@ func TestComponentEncryptionKey(t *testing.T) {
|
|||
secondaryKey := hex.EncodeToString(bytes[:16]) // 128-bit key
|
||||
|
||||
secretStore := &mockSecretStore{}
|
||||
secretStore.Init(context.Background(), secretstores.Metadata{Base: metadata.Base{
|
||||
secretStore.Init(t.Context(), secretstores.Metadata{Base: metadata.Base{
|
||||
Properties: map[string]string{
|
||||
"primaryKey": primaryKey,
|
||||
"secondaryKey": secondaryKey,
|
||||
|
@ -158,7 +158,7 @@ func TestComponentEncryptionKey(t *testing.T) {
|
|||
func TestTryGetEncryptionKeyFromMetadataItem(t *testing.T) {
|
||||
t.Run("no secretRef on valid item", func(t *testing.T) {
|
||||
secretStore := &mockSecretStore{}
|
||||
secretStore.Init(context.Background(), secretstores.Metadata{Base: metadata.Base{
|
||||
secretStore.Init(t.Context(), secretstores.Metadata{Base: metadata.Base{
|
||||
Properties: map[string]string{
|
||||
"primaryKey": "123",
|
||||
"secondaryKey": "456",
|
||||
|
|
|
@ -60,7 +60,7 @@ func BenchmarkEval(b *testing.B) {
|
|||
},
|
||||
}
|
||||
var r interface{}
|
||||
for n := 0; n < b.N; n++ {
|
||||
for range b.N {
|
||||
r, _ = e.Eval(data)
|
||||
}
|
||||
result = r
|
||||
|
|
|
@ -228,7 +228,7 @@ func TestComponentsPatch(t *testing.T) {
|
|||
patch, volumeMount := c.componentsPatchOps(componentContainers, Injectable(test.appID, test.componentsList))
|
||||
patchJSON, _ := json.Marshal(patch)
|
||||
expPatchJSON, _ := json.Marshal(test.expPatch)
|
||||
assert.Equal(t, string(expPatchJSON), string(patchJSON))
|
||||
assert.JSONEq(t, string(expPatchJSON), string(patchJSON))
|
||||
assert.Equal(t, test.expMount, volumeMount)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ import (
|
|||
)
|
||||
|
||||
// Config represents configuration options for the Dapr Sidecar Injector webhook server.
|
||||
//
|
||||
//nolint:recvcheck
|
||||
type Config struct {
|
||||
SidecarImage string `envconfig:"SIDECAR_IMAGE" required:"true"`
|
||||
SidecarImagePullPolicy string `envconfig:"SIDECAR_IMAGE_PULL_POLICY"`
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
|
@ -131,30 +130,30 @@ func TestAllowedControllersServiceAccountUID(t *testing.T) {
|
|||
Namespace: testCase.namespace,
|
||||
},
|
||||
}
|
||||
_, err := client.CoreV1().ServiceAccounts(testCase.namespace).Create(context.TODO(), sa, metav1.CreateOptions{})
|
||||
_, err := client.CoreV1().ServiceAccounts(testCase.namespace).Create(t.Context(), sa, metav1.CreateOptions{})
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
t.Run("injector config has no allowed service account", func(t *testing.T) {
|
||||
uids, err := AllowedControllersServiceAccountUID(context.TODO(), Config{}, client)
|
||||
uids, err := AllowedControllersServiceAccountUID(t.Context(), Config{}, client)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, uids, 2)
|
||||
})
|
||||
|
||||
t.Run("injector config has a valid allowed service account", func(t *testing.T) {
|
||||
uids, err := AllowedControllersServiceAccountUID(context.TODO(), Config{AllowedServiceAccounts: "test:test"}, client)
|
||||
uids, err := AllowedControllersServiceAccountUID(t.Context(), Config{AllowedServiceAccounts: "test:test"}, client)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, uids, 3)
|
||||
})
|
||||
|
||||
t.Run("injector config has a invalid allowed service account", func(t *testing.T) {
|
||||
uids, err := AllowedControllersServiceAccountUID(context.TODO(), Config{AllowedServiceAccounts: "abc:abc"}, client)
|
||||
uids, err := AllowedControllersServiceAccountUID(t.Context(), Config{AllowedServiceAccounts: "abc:abc"}, client)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, uids, 2)
|
||||
})
|
||||
|
||||
t.Run("injector config has multiple allowed service accounts", func(t *testing.T) {
|
||||
uids, err := AllowedControllersServiceAccountUID(context.TODO(), Config{AllowedServiceAccounts: "test:test,abc:abc"}, client)
|
||||
uids, err := AllowedControllersServiceAccountUID(t.Context(), Config{AllowedServiceAccounts: "test:test,abc:abc"}, client)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, uids, 3)
|
||||
})
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package disk
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
|
@ -52,7 +51,7 @@ spec:
|
|||
value: value2
|
||||
`
|
||||
require.NoError(t, os.WriteFile(filepath.Join(tmp, filename), []byte(yaml), fs.FileMode(0o600)))
|
||||
components, err := request.Load(context.Background())
|
||||
components, err := request.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, components, 1)
|
||||
})
|
||||
|
@ -71,7 +70,7 @@ kind: Component
|
|||
metadata:
|
||||
name: statestore`
|
||||
require.NoError(t, os.WriteFile(filepath.Join(tmp, filename), []byte(yaml), fs.FileMode(0o600)))
|
||||
components, err := request.Load(context.Background())
|
||||
components, err := request.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, components)
|
||||
})
|
||||
|
@ -81,7 +80,7 @@ name: statestore`
|
|||
Paths: []string{"test-path-no-exists"},
|
||||
})
|
||||
|
||||
components, err := request.Load(context.Background())
|
||||
components, err := request.Load(t.Context())
|
||||
require.Error(t, err)
|
||||
assert.Empty(t, components)
|
||||
})
|
||||
|
@ -230,7 +229,7 @@ metadata:
|
|||
Paths: []string{tmp},
|
||||
AppID: "myappid",
|
||||
})
|
||||
components, err := loader.Load(context.Background())
|
||||
components, err := loader.Load(t.Context())
|
||||
assert.Equal(t, test.expErr, err != nil, "%v", err)
|
||||
assert.Equal(t, test.expComps, components)
|
||||
})
|
||||
|
|
|
@ -106,7 +106,7 @@ func TestLoadComponents(t *testing.T) {
|
|||
podName: "testPodName",
|
||||
}
|
||||
|
||||
response, err := request.Load(context.Background())
|
||||
response, err := request.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, response)
|
||||
assert.Equal(t, "test", response[0].Name)
|
||||
|
|
|
@ -194,7 +194,7 @@ func TestInvokeRemote(t *testing.T) {
|
|||
WithMetadata(map[string][]string{invokev1.DestinationIDHeader: {"app1"}})
|
||||
defer request.Close()
|
||||
|
||||
res, _, err := messaging.invokeRemote(context.Background(), "app1", "namespace1", "addr1", request)
|
||||
res, _, err := messaging.invokeRemote(t.Context(), "app1", "namespace1", "addr1", request)
|
||||
require.NoError(t, err)
|
||||
|
||||
pd, err := res.ProtoWithData()
|
||||
|
@ -214,7 +214,7 @@ func TestInvokeRemote(t *testing.T) {
|
|||
WithMetadata(map[string][]string{invokev1.DestinationIDHeader: {"app1"}})
|
||||
defer request.Close()
|
||||
|
||||
res, _, err := messaging.invokeRemote(context.Background(), "app1", "namespace1", "addr1", request)
|
||||
res, _, err := messaging.invokeRemote(t.Context(), "app1", "namespace1", "addr1", request)
|
||||
require.NoError(t, err)
|
||||
|
||||
pd, err := res.ProtoWithData()
|
||||
|
@ -239,7 +239,7 @@ func TestInvokeRemote(t *testing.T) {
|
|||
WithMetadata(map[string][]string{invokev1.DestinationIDHeader: {"app1"}})
|
||||
defer request.Close()
|
||||
|
||||
res, _, err := messaging.invokeRemote(context.Background(), "app1", "namespace1", "addr1", request)
|
||||
res, _, err := messaging.invokeRemote(t.Context(), "app1", "namespace1", "addr1", request)
|
||||
require.NoError(t, err)
|
||||
|
||||
pd, err := res.ProtoWithData()
|
||||
|
@ -257,7 +257,7 @@ func TestInvokeRemote(t *testing.T) {
|
|||
WithMetadata(map[string][]string{invokev1.DestinationIDHeader: {"app1"}})
|
||||
defer request.Close()
|
||||
|
||||
res, _, err := messaging.invokeRemote(context.Background(), "app1", "namespace1", "addr1", request)
|
||||
res, _, err := messaging.invokeRemote(t.Context(), "app1", "namespace1", "addr1", request)
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, fmt.Sprintf(streamingUnsupportedErr, "app1"), err.Error())
|
||||
assert.Nil(t, res)
|
||||
|
@ -273,7 +273,7 @@ func TestInvokeRemote(t *testing.T) {
|
|||
WithReplay(true)
|
||||
defer request.Close()
|
||||
|
||||
res, _, err := messaging.invokeRemote(context.Background(), "app1", "namespace1", "addr1", request)
|
||||
res, _, err := messaging.invokeRemote(t.Context(), "app1", "namespace1", "addr1", request)
|
||||
require.NoError(t, err)
|
||||
|
||||
pd, err := res.ProtoWithData()
|
||||
|
@ -297,7 +297,7 @@ func TestInvokeRemote(t *testing.T) {
|
|||
WithMetadata(map[string][]string{invokev1.DestinationIDHeader: {"app1"}})
|
||||
defer request.Close()
|
||||
|
||||
res, _, err := messaging.invokeRemote(context.Background(), "app1", "namespace1", "addr1", request)
|
||||
res, _, err := messaging.invokeRemote(t.Context(), "app1", "namespace1", "addr1", request)
|
||||
require.NoError(t, err)
|
||||
|
||||
pd, err := res.ProtoWithData()
|
||||
|
@ -475,7 +475,7 @@ func TestInvokeRemoteUnaryForHTTPEndpoint(t *testing.T) {
|
|||
channels: (new(channels.Channels)).WithEndpointChannels(map[string]channel.HTTPEndpointAppChannel{"abc": &mockChannel{}}),
|
||||
}
|
||||
|
||||
_, err := d.invokeRemoteUnaryForHTTPEndpoint(context.Background(), nil, "abc")
|
||||
_, err := d.invokeRemoteUnaryForHTTPEndpoint(t.Context(), nil, "abc")
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
|
@ -484,7 +484,7 @@ func TestInvokeRemoteUnaryForHTTPEndpoint(t *testing.T) {
|
|||
channels: new(channels.Channels),
|
||||
}
|
||||
|
||||
_, err := d.invokeRemoteUnaryForHTTPEndpoint(context.Background(), nil, "abc")
|
||||
_, err := d.invokeRemoteUnaryForHTTPEndpoint(t.Context(), nil, "abc")
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ func TestSetTelemetryFn(t *testing.T) {
|
|||
})
|
||||
|
||||
proxy := p.(*proxy)
|
||||
ctx := metadata.NewOutgoingContext(context.TODO(), metadata.MD{"a": []string{"b"}})
|
||||
ctx := metadata.NewOutgoingContext(t.Context(), metadata.MD{"a": []string{"b"}})
|
||||
ctx = proxy.telemetryFn(ctx)
|
||||
|
||||
md, _ := metadata.FromOutgoingContext(ctx)
|
||||
|
@ -138,7 +138,7 @@ func TestIntercept(t *testing.T) {
|
|||
}, nil
|
||||
})
|
||||
|
||||
ctx := metadata.NewOutgoingContext(context.TODO(), metadata.MD{"a": []string{"b"}})
|
||||
ctx := metadata.NewOutgoingContext(t.Context(), metadata.MD{"a": []string{"b"}})
|
||||
proxy := p.(*proxy)
|
||||
_, conn, _, teardown, err := proxy.intercept(ctx, "/test")
|
||||
defer teardown(true)
|
||||
|
@ -164,7 +164,7 @@ func TestIntercept(t *testing.T) {
|
|||
}, nil
|
||||
})
|
||||
|
||||
ctx := metadata.NewIncomingContext(context.TODO(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"b"}})
|
||||
ctx := metadata.NewIncomingContext(t.Context(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"b"}})
|
||||
proxy := p.(*proxy)
|
||||
_, _, _, _, err := proxy.intercept(ctx, "/test")
|
||||
|
||||
|
@ -190,7 +190,7 @@ func TestIntercept(t *testing.T) {
|
|||
|
||||
t.Setenv(securityConsts.AppAPITokenEnvVar, "token1")
|
||||
|
||||
ctx := metadata.NewIncomingContext(context.TODO(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"a"}, securityConsts.APITokenHeader: []string{"oldtoken"}})
|
||||
ctx := metadata.NewIncomingContext(t.Context(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"a"}, securityConsts.APITokenHeader: []string{"oldtoken"}})
|
||||
proxy := p.(*proxy)
|
||||
ctx, conn, _, teardown, err := proxy.intercept(ctx, "/test")
|
||||
defer teardown(true)
|
||||
|
@ -223,7 +223,7 @@ func TestIntercept(t *testing.T) {
|
|||
|
||||
t.Setenv(securityConsts.AppAPITokenEnvVar, "token1")
|
||||
|
||||
ctx := metadata.NewIncomingContext(context.TODO(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"b"}})
|
||||
ctx := metadata.NewIncomingContext(t.Context(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"b"}})
|
||||
proxy := p.(*proxy)
|
||||
ctx, conn, _, teardown, err := proxy.intercept(ctx, "/test")
|
||||
defer teardown(true)
|
||||
|
@ -263,7 +263,7 @@ func TestIntercept(t *testing.T) {
|
|||
return ctx
|
||||
})
|
||||
|
||||
ctx := metadata.NewIncomingContext(context.TODO(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"a"}})
|
||||
ctx := metadata.NewIncomingContext(t.Context(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"a"}})
|
||||
proxy := p.(*proxy)
|
||||
|
||||
_, conn, _, teardown, err := proxy.intercept(ctx, "/test")
|
||||
|
@ -284,7 +284,7 @@ func TestIntercept(t *testing.T) {
|
|||
return ctx
|
||||
})
|
||||
|
||||
ctx := metadata.NewIncomingContext(context.TODO(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"a"}})
|
||||
ctx := metadata.NewIncomingContext(t.Context(), metadata.MD{diagnostics.GRPCProxyAppIDKey: []string{"a"}})
|
||||
proxy := p.(*proxy)
|
||||
_, conn, _, teardown, err := proxy.intercept(ctx, "/test")
|
||||
defer teardown(true)
|
||||
|
|
|
@ -488,11 +488,11 @@ func TestWithDataObject(t *testing.T) {
|
|||
|
||||
gotEnc, err := json.Marshal(got)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, []byte(expectJSON), compactJSON(t, gotEnc))
|
||||
assert.JSONEq(t, expectJSON, string(compactJSON(t, gotEnc)))
|
||||
|
||||
data, err := req.RawDataFull()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, []byte(expectJSON), compactJSON(t, data))
|
||||
assert.JSONEq(t, expectJSON, string(compactJSON(t, data)))
|
||||
}
|
||||
|
||||
func TestRequestReplayable(t *testing.T) {
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package v1
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
@ -49,7 +48,7 @@ func TestInternalMetadataToHTTPHeader(t *testing.T) {
|
|||
|
||||
expectedKeyNames := []string{"custom-header", "dapr-method", "dapr-scheme", "dapr-path", "dapr-authority", "dapr-grpc-timeout"}
|
||||
savedHeaderKeyNames := []string{}
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
InternalMetadataToHTTPHeader(ctx, fakeMetadata, func(k, v string) {
|
||||
savedHeaderKeyNames = append(savedHeaderKeyNames, k)
|
||||
})
|
||||
|
@ -111,7 +110,7 @@ func TestInternalMetadataToGrpcMetadata(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
|
||||
t.Run("without http header conversion for http headers", func(t *testing.T) {
|
||||
convertedMD := InternalMetadataToGrpcMetadata(ctx, httpHeaders, false)
|
||||
|
@ -338,7 +337,7 @@ func TestWithCustomGrpcMetadata(t *testing.T) {
|
|||
md[customMetadataKey(i)] = customMetadataValue(i)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
ctx = WithCustomGRPCMetadata(ctx, md)
|
||||
|
||||
ctxMd, ok := metadata.FromOutgoingContext(ctx)
|
||||
|
|
|
@ -47,7 +47,7 @@ func TestMetricsExporter(t *testing.T) {
|
|||
Healthz: healthz.New(),
|
||||
})
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
errCh := make(chan error)
|
||||
go func() {
|
||||
errCh <- e.Start(ctx)
|
||||
|
|
|
@ -94,7 +94,7 @@ func TestProcessComponentSecrets(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
err := processComponentSecrets(context.Background(), &c, "default", nil)
|
||||
err := processComponentSecrets(t.Context(), &c, "default", nil)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
|
@ -136,13 +136,13 @@ func TestProcessComponentSecrets(t *testing.T) {
|
|||
}).
|
||||
Build()
|
||||
|
||||
err = processComponentSecrets(context.Background(), &c, "default", client)
|
||||
err = processComponentSecrets(t.Context(), &c, "default", client)
|
||||
require.NoError(t, err)
|
||||
|
||||
enc := base64.StdEncoding.EncodeToString([]byte("value1"))
|
||||
jsonEnc, _ := json.Marshal(enc)
|
||||
|
||||
assert.Equal(t, jsonEnc, c.Spec.Metadata[0].Value.Raw)
|
||||
assert.JSONEq(t, string(jsonEnc), string(c.Spec.Metadata[0].Value.Raw))
|
||||
})
|
||||
|
||||
t.Run("secret ref exists, default kubernetes secret store, secret extracted", func(t *testing.T) {
|
||||
|
@ -183,13 +183,13 @@ func TestProcessComponentSecrets(t *testing.T) {
|
|||
}).
|
||||
Build()
|
||||
|
||||
err = processComponentSecrets(context.Background(), &c, "default", client)
|
||||
err = processComponentSecrets(t.Context(), &c, "default", client)
|
||||
require.NoError(t, err)
|
||||
|
||||
enc := base64.StdEncoding.EncodeToString([]byte("value1"))
|
||||
jsonEnc, _ := json.Marshal(enc)
|
||||
|
||||
assert.Equal(t, jsonEnc, c.Spec.Metadata[0].Value.Raw)
|
||||
assert.JSONEq(t, string(jsonEnc), string(c.Spec.Metadata[0].Value.Raw))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -587,7 +587,7 @@ func TestListsNamespaced(t *testing.T) {
|
|||
assert.Equal(t, "sub1", sub.Name)
|
||||
assert.Equal(t, "namespace-a", sub.Namespace)
|
||||
|
||||
res, err = api.ListSubscriptionsV2(context.TODO(), &operatorv1pb.ListSubscriptionsRequest{
|
||||
res, err = api.ListSubscriptionsV2(t.Context(), &operatorv1pb.ListSubscriptionsRequest{
|
||||
PodName: "baz",
|
||||
Namespace: "namespace-c",
|
||||
})
|
||||
|
@ -692,7 +692,7 @@ func TestListsNamespaced(t *testing.T) {
|
|||
assert.Equal(t, "obj1", endpoint.Name)
|
||||
assert.Equal(t, "namespace-a", endpoint.Namespace)
|
||||
|
||||
res, err = api.ListHTTPEndpoints(context.TODO(), &operatorv1pb.ListHTTPEndpointsRequest{
|
||||
res, err = api.ListHTTPEndpoints(t.Context(), &operatorv1pb.ListHTTPEndpointsRequest{
|
||||
Namespace: "namespace-c",
|
||||
})
|
||||
require.Error(t, err)
|
||||
|
@ -719,7 +719,7 @@ func TestProcessHTTPEndpointSecrets(t *testing.T) {
|
|||
},
|
||||
}
|
||||
t.Run("secret ref exists, not kubernetes secret store, no error", func(t *testing.T) {
|
||||
err := processHTTPEndpointSecrets(context.Background(), &e, "default", nil)
|
||||
err := processHTTPEndpointSecrets(t.Context(), &e, "default", nil)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
|
@ -744,11 +744,11 @@ func TestProcessHTTPEndpointSecrets(t *testing.T) {
|
|||
},
|
||||
}).
|
||||
Build()
|
||||
require.NoError(t, processHTTPEndpointSecrets(context.Background(), &e, "default", client))
|
||||
require.NoError(t, processHTTPEndpointSecrets(t.Context(), &e, "default", client))
|
||||
enc := base64.StdEncoding.EncodeToString([]byte("value1"))
|
||||
jsonEnc, err := json.Marshal(enc)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, jsonEnc, e.Spec.Headers[0].Value.Raw)
|
||||
assert.JSONEq(t, string(jsonEnc), string(e.Spec.Headers[0].Value.Raw))
|
||||
})
|
||||
|
||||
t.Run("secret ref exists, default kubernetes secret store, secret extracted", func(t *testing.T) {
|
||||
|
@ -773,12 +773,12 @@ func TestProcessHTTPEndpointSecrets(t *testing.T) {
|
|||
}).
|
||||
Build()
|
||||
|
||||
require.NoError(t, processHTTPEndpointSecrets(context.Background(), &e, "default", client))
|
||||
require.NoError(t, processHTTPEndpointSecrets(t.Context(), &e, "default", client))
|
||||
|
||||
enc := base64.StdEncoding.EncodeToString([]byte("value1"))
|
||||
jsonEnc, err := json.Marshal(enc)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, jsonEnc, e.Spec.Headers[0].Value.Raw)
|
||||
assert.JSONEq(t, string(jsonEnc), string(e.Spec.Headers[0].Value.Raw))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -794,7 +794,7 @@ func Test_Ready(t *testing.T) {
|
|||
close(ch)
|
||||
return ch
|
||||
},
|
||||
ctx: context.Background,
|
||||
ctx: t.Context,
|
||||
expErr: false,
|
||||
},
|
||||
"if context is cancelled, then expect error": {
|
||||
|
@ -803,7 +803,7 @@ func Test_Ready(t *testing.T) {
|
|||
return ch
|
||||
},
|
||||
ctx: func() context.Context {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
cancel()
|
||||
return ctx
|
||||
},
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package authz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/spiffe/go-spiffe/v2/spiffeid"
|
||||
|
@ -33,7 +32,7 @@ func Test_Request(t *testing.T) {
|
|||
pki := test.GenPKI(t, test.PKIOptions{LeafID: serverID, ClientID: appID})
|
||||
|
||||
t.Run("no auth context should error", func(t *testing.T) {
|
||||
id, err := Request(context.Background(), "ns1")
|
||||
id, err := Request(t.Context(), "ns1")
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, codes.PermissionDenied, status.Code(err))
|
||||
assert.Nil(t, id)
|
||||
|
|
|
@ -38,7 +38,7 @@ func Test_loop(t *testing.T) {
|
|||
appCh: make(chan *Event[compapi.Component]),
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
go func() {
|
||||
h.loop(ctx)
|
||||
close(done)
|
||||
|
@ -62,7 +62,7 @@ func Test_loop(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
h.loop(context.Background())
|
||||
h.loop(t.Context())
|
||||
close(done)
|
||||
}()
|
||||
|
||||
|
@ -93,7 +93,7 @@ func Test_loop(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
h.loop(context.Background())
|
||||
h.loop(t.Context())
|
||||
close(done)
|
||||
}()
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ limitations under the License.
|
|||
package informer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
|
@ -46,7 +45,7 @@ func Test_WatchUpdates(t *testing.T) {
|
|||
assert.Equal(t, codes.PermissionDenied, status.Code(err))
|
||||
assert.Nil(t, appCh)
|
||||
|
||||
appCh, err = i.WatchUpdates(context.Background(), "ns2")
|
||||
appCh, err = i.WatchUpdates(t.Context(), "ns2")
|
||||
require.Error(t, err)
|
||||
assert.Equal(t, codes.PermissionDenied, status.Code(err))
|
||||
assert.Nil(t, appCh)
|
||||
|
@ -65,7 +64,7 @@ func Test_WatchUpdates(t *testing.T) {
|
|||
appCh2, err := i.WatchUpdates(pki.ClientGRPCCtx(t), "ns1")
|
||||
require.NoError(t, err)
|
||||
|
||||
i.handleEvent(context.Background(),
|
||||
i.handleEvent(t.Context(),
|
||||
&compapi.Component{
|
||||
ObjectMeta: metav1.ObjectMeta{Name: "comp1", Namespace: "ns1"},
|
||||
},
|
||||
|
@ -80,7 +79,7 @@ func Test_WatchUpdates(t *testing.T) {
|
|||
assert.Equal(c, 1, int(i.batchID.Load()))
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
|
||||
i.handleEvent(context.Background(),
|
||||
i.handleEvent(t.Context(),
|
||||
&compapi.Component{
|
||||
ObjectMeta: metav1.ObjectMeta{Name: "comp1", Namespace: "ns1"},
|
||||
Spec: compapi.ComponentSpec{Type: "bindings.redis"},
|
||||
|
@ -96,7 +95,7 @@ func Test_WatchUpdates(t *testing.T) {
|
|||
assert.Equal(c, 2, int(i.batchID.Load()))
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
|
||||
i.handleEvent(context.Background(),
|
||||
i.handleEvent(t.Context(),
|
||||
nil,
|
||||
&compapi.Component{
|
||||
ObjectMeta: metav1.ObjectMeta{Name: "comp2", Namespace: "ns1"},
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
|
@ -96,20 +95,20 @@ func TestIsAnnotatedForDapr(t *testing.T) {
|
|||
func TestDaprService(t *testing.T) {
|
||||
t.Run("invalid empty app id", func(t *testing.T) {
|
||||
d := getDeployment("", "true")
|
||||
err := getTestDaprHandler().ensureDaprServicePresent(context.TODO(), "default", d)
|
||||
err := getTestDaprHandler().ensureDaprServicePresent(t.Context(), "default", d)
|
||||
require.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("invalid char app id", func(t *testing.T) {
|
||||
d := getDeployment("myapp@", "true")
|
||||
err := getTestDaprHandler().ensureDaprServicePresent(context.TODO(), "default", d)
|
||||
err := getTestDaprHandler().ensureDaprServicePresent(t.Context(), "default", d)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestCreateDaprServiceAppIDAndMetricsSettings(t *testing.T) {
|
||||
testDaprHandler := getTestDaprHandler()
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
myDaprService := types.NamespacedName{
|
||||
Namespace: "test",
|
||||
Name: "test",
|
||||
|
@ -136,7 +135,7 @@ func TestCreateDaprServiceAppIDAndMetricsSettings(t *testing.T) {
|
|||
|
||||
func TestCreateDaprServiceAppIDAndPortsOverride(t *testing.T) {
|
||||
testDaprHandler := getTestDaprHandler()
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
myDaprService := types.NamespacedName{
|
||||
Namespace: "test",
|
||||
Name: "test",
|
||||
|
@ -165,7 +164,7 @@ func TestPatchDaprService(t *testing.T) {
|
|||
cli := fake.NewClientBuilder().WithScheme(s).Build()
|
||||
testDaprHandler.Client = cli
|
||||
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
myDaprService := types.NamespacedName{
|
||||
Namespace: "test",
|
||||
Name: "test",
|
||||
|
@ -342,7 +341,7 @@ func TestInit(t *testing.T) {
|
|||
t.Run("test init dapr handler", func(t *testing.T) {
|
||||
assert.NotNil(t, handler)
|
||||
|
||||
err := handler.Init(context.Background())
|
||||
err := handler.Init(t.Context())
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ func createMockPods(n, daprized, injected, daprdPresent int) (pods []*corev1.Pod
|
|||
}
|
||||
|
||||
func TestDaprWatchdog_listPods(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ctx := t.Context()
|
||||
rl := ratelimit.NewUnlimited()
|
||||
|
||||
t.Run("injectorNotPresent", func(t *testing.T) {
|
||||
|
@ -235,7 +235,7 @@ func Test_patchPodLabel(t *testing.T) {
|
|||
for _, tc := range tests {
|
||||
ctlClient := fake.NewClientBuilder().WithObjects(tc.pod).Build()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
if err := patchPodLabel(context.TODO(), ctlClient, tc.pod); (err != nil) != tc.wantErr {
|
||||
if err := patchPodLabel(t.Context(), ctlClient, tc.pod); (err != nil) != tc.wantErr {
|
||||
t.Fatalf("patchPodLabel() error = %v, wantErr %v", err, tc.wantErr)
|
||||
}
|
||||
if !tc.wantErr {
|
||||
|
@ -247,7 +247,7 @@ func Test_patchPodLabel(t *testing.T) {
|
|||
|
||||
func TestDaprWatchdog_Start(t *testing.T) {
|
||||
// simple test of start
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
cancelled := false
|
||||
defer func() {
|
||||
if !cancelled {
|
||||
|
|
|
@ -60,7 +60,7 @@ func TestCleanupHeartBeats(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestMonitorLeadership(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
raftClusterOpts, err := tests.RaftClusterOpts(t)
|
||||
|
||||
require.NoError(t, err)
|
||||
|
|
|
@ -55,7 +55,7 @@ func TestMembershipChangeWorker(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
setupEach := func(t *testing.T) context.CancelFunc {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
var cancelServer context.CancelFunc
|
||||
|
||||
serverAddress, testServer, clock, cancelServer = newTestPlacementServer(t, *raftOpts)
|
||||
|
@ -513,7 +513,7 @@ func PerformTableUpdateCostTime(t *testing.T) (wastedTime int64) {
|
|||
|
||||
mockMessage := &v1pb.PlacementTables{Version: "demo"}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
ctx, cancel := context.WithTimeout(t.Context(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
var start time.Time
|
||||
|
|
|
@ -59,7 +59,7 @@ func newTestPlacementServer(t *testing.T, raftOptions raft.Options) (string, *Se
|
|||
testServer.clock = clock
|
||||
|
||||
serverStopped := make(chan struct{})
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
go func() {
|
||||
defer close(serverStopped)
|
||||
err := testServer.Run(ctx)
|
||||
|
@ -106,14 +106,14 @@ func newTestClient(t *testing.T, serverAddress string) (*grpc.ClientConn, *net.T
|
|||
|
||||
client := v1pb.NewPlacementClient(conn)
|
||||
|
||||
stream, err := client.ReportDaprStatus(context.Background())
|
||||
stream, err := client.ReportDaprStatus(t.Context())
|
||||
require.NoError(t, err)
|
||||
|
||||
return conn, tcpConn.(*net.TCPConn), stream
|
||||
}
|
||||
|
||||
func TestMemberRegistration_NoLeadership(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
ctx, cancel := context.WithCancel(t.Context())
|
||||
defer cancel()
|
||||
|
||||
raftClusterOpts, err := tests.RaftClusterOpts(t)
|
||||
|
|
|
@ -277,6 +277,7 @@ func createRaftServer(t *testing.T, nodeID int, peers []PeerInfo) (*Server, <-ch
|
|||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
//nolint:usetesting
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
stopped := make(chan struct{})
|
||||
|
|
|
@ -19,11 +19,12 @@
|
|||
package common
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
anypb "google.golang.org/protobuf/types/known/anypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
package components
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package components
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
package components
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
package components
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package components
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
package components
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package components
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,11 +19,12 @@
|
|||
package components
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
anypb "google.golang.org/protobuf/types/known/anypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package components
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
package internals
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,11 +19,12 @@
|
|||
package internals
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
structpb "google.golang.org/protobuf/types/known/structpb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,12 +19,13 @@
|
|||
package internals
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
anypb "google.golang.org/protobuf/types/known/anypb"
|
||||
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,12 +19,13 @@
|
|||
package internals
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
v1 "github.com/dapr/dapr/pkg/proto/common/v1"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package internals
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,11 +19,12 @@
|
|||
package internals
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
anypb "google.golang.org/protobuf/types/known/anypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,11 +19,12 @@
|
|||
package operator
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package operator
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,11 +19,12 @@
|
|||
package operator
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -19,10 +19,11 @@
|
|||
package placement
|
||||
|
||||
import (
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package placement
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
|
|
|
@ -19,14 +19,15 @@
|
|||
package runtime
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
v1 "github.com/dapr/dapr/pkg/proto/common/v1"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
anypb "google.golang.org/protobuf/types/known/anypb"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
structpb "google.golang.org/protobuf/types/known/structpb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package runtime
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
v1 "github.com/dapr/dapr/pkg/proto/common/v1"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
|
|
|
@ -19,14 +19,15 @@
|
|||
package runtime
|
||||
|
||||
import (
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
|
||||
v1 "github.com/dapr/dapr/pkg/proto/common/v1"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
anypb "google.golang.org/protobuf/types/known/anypb"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -20,6 +20,7 @@ package runtime
|
|||
|
||||
import (
|
||||
context "context"
|
||||
|
||||
v1 "github.com/dapr/dapr/pkg/proto/common/v1"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue