From 4764458239b7ed8a3498174a4c42e39960a68434 Mon Sep 17 00:00:00 2001 From: Fabian Martinez <46371672+famarting@users.noreply.github.com> Date: Wed, 31 Jan 2024 20:30:04 +0100 Subject: [PATCH 1/6] add kafka aws iam to docs Signed-off-by: Fabian Martinez <46371672+famarting@users.noreply.github.com> --- .../supported-pubsub/setup-apache-kafka.md | 46 ++++++++++++++++++- 1 file changed, 44 insertions(+), 2 deletions(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md index 028009364..2579f0263 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md @@ -73,7 +73,7 @@ spec: | consumerID | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. If a value for `consumerGroup` is provided, any value for `consumerID` is ignored - a combination of the consumer group and a random unique identifier will be set for the `consumerID` instead. | `"channel1"` | clientID | N | A user-provided string sent with every request to the Kafka brokers for logging, debugging, and auditing purposes. Defaults to `"namespace.appID"` for Kubernetes mode or `"appID"` for Self-Hosted mode. | `"my-namespace.my-dapr-app"`, `"my-dapr-app"` | authRequired | N | *Deprecated* Enable [SASL](https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer) authentication with the Kafka brokers. | `"true"`, `"false"` -| authType | Y | Configure or disable authentication. Supported values: `none`, `password`, `mtls`, or `oidc` | `"password"`, `"none"` +| authType | Y | Configure or disable authentication. Supported values: `none`, `password`, `mtls`, `oidc` or `awsiam` | `"password"`, `"none"` | saslUsername | N | The SASL username used for authentication. Only required if `authType` is set to `"password"`. | `"adminuser"` | saslPassword | N | The SASL password used for authentication. Can be `secretKeyRef` to use a [secret reference]({{< ref component-secrets.md >}}). Only required if `authType is set to `"password"`. | `""`, `"KeFg23!"` | saslMechanism | N | The SASL Authentication Mechanism you wish to use. Only required if `authType` is set to `"password"`. Defaults to `PLAINTEXT` | `"SHA-512", "SHA-256", "PLAINTEXT"` @@ -92,6 +92,12 @@ spec: | oidcClientSecret | N | The OAuth2 client secret that has been provisioned in the identity provider: Required when `authType` is set to `oidc` | `"KeFg23!"` | | oidcScopes | N | Comma-delimited list of OAuth2/OIDC scopes to request with the access token. Recommended when `authType` is set to `oidc`. Defaults to `"openid"` | `"openid,kafka-prod"` | | oidcExtensions | N | Input/Output | String containing a JSON-encoded dictionary of OAuth2/OIDC extensions to request with the access token | `{"cluster":"kafka","poolid":"kafkapool"}` | +| awsRegion | N | The AWS region where the Kafka cluster is deployed to. Required when `authType` is set to `awsiam` | `us-west-1` | +| awsAccessKey | N | AWS access key associated with an IAM account. | `"accessKey"` +| awsSecretKey | N | The secret key associated with the access key. | `"secretKey"` +| awsSessionToken | N | AWS session token to use. A session token is only required if you are using temporary security credentials. | `"sessionToken"` +| awsIamRoleArn | N | IAM role that has access to MSK. This is another option to authenticate with MSK aside from the AWS Credentials. | `"arn:aws:iam::123456789:role/mskRole"` +| awsStsSessionName | N | Represents the session name for assuming a role. | `"MSKSASLDefaultSession"` | schemaRegistryURL | N | Required when using Schema Registry Avro serialization/deserialization. The Schema Registry URL. | `http://localhost:8081` | | schemaRegistryAPIKey | N | When using Schema Registry Avro serialization/deserialization. The Schema Registry credentials API Key. | `XYAXXAZ` | | schemaRegistryAPISecret | N | When using Schema Registry Avro serialization/deserialization. The Schema Registry credentials API Secret. | `ABCDEFGMEADFF` | @@ -107,7 +113,7 @@ The metadata `version` must be set to `1.0.0` when using Azure EventHubs with Ka Kafka supports a variety of authentication schemes and Dapr supports several: SASL password, mTLS, OIDC/OAuth2. With the added authentication methods, the `authRequired` field has been deprecated from the v1.6 release and instead the `authType` field should be used. If `authRequired` is set to `true`, Dapr will attempt to configure `authType` correctly -based on the value of `saslPassword`. There are four valid values for `authType`: `none`, `password`, `certificate`, `mtls`, and `oidc`. Note this is authentication only; authorization is still configured within Kafka. +based on the value of `saslPassword`. This are the valid values for `authType`: `none`, `password`, `certificate`, `mtls`, `oidc` and `awsiam`. Note this is authentication only; authorization is still configured within Kafka except for `awsiam` which can also drive authorization decisions configured in AWS IAM. #### None @@ -276,6 +282,42 @@ spec: value: 0.10.2.0 ``` +#### AWS IAM + +Authenticating with AWS IAM is supported with AWS Managed Streaming for Apache Kafka (MSK). Setting `authType` to `awsiam` uses AWS SDK to generate auth tokens to authenticate. +Note the only required metadata field is `awsRegion`, if no `awsAccessKey` and `awsSecretKey` are provided you can use AWS IAM roles for service accounts to have passwordless authentication to your Kafka cluster. + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: kafka-pubsub-awsiam +spec: + type: pubsub.kafka + version: v1 + metadata: + - name: brokers # Required. Kafka broker connection setting + value: "dapr-kafka.myapp.svc.cluster.local:9092" + - name: consumerGroup # Optional. Used for input bindings. + value: "group1" + - name: clientID # Optional. Used as client tracing ID by Kafka brokers. + value: "my-dapr-app-id" + - name: authType # Required. + value: "awsiam" + - name: awsRegion # Required. + value: "us-west-1" + - name: awsAccessKey # Optional. + value: + - name: awsSecretKey # Optional. + value: + - name: awsSessionToken # Optional. + value: + - name: awsIamRoleArn # Optional. + value: "arn:aws:iam::123456789:role/mskRole" + - name: awsStsSessionName # Optional. + value: "MSKSASLDefaultSession" +``` + ### Communication using TLS By default TLS is enabled to secure the transport layer to Kafka. To disable TLS, set `disableTls` to `true`. When TLS is enabled, you can From 7dea32b7c8e96402698146c556b8e05173fd2123 Mon Sep 17 00:00:00 2001 From: Shivam Kumar Date: Fri, 2 Feb 2024 01:56:53 +0530 Subject: [PATCH 2/6] Adding recursive terminate/purge in workflow API (#3969) * Adding recursive terminate/purge in workflow api Signed-off-by: Shivam Kumar * Apply suggestions from code review Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Shivam Kumar * review comments Signed-off-by: Shivam Kumar --------- Signed-off-by: Shivam Kumar Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> --- .../workflow/workflow-features-concepts.md | 4 +--- .../workflow/workflow-overview.md | 2 +- .../content/en/reference/api/workflow_api.md | 18 ++++++++++++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md index 17aec6553..4d4a30954 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md @@ -97,9 +97,7 @@ Child workflows have many benefits: The return value of a child workflow is its output. If a child workflow fails with an exception, then that exception is surfaced to the parent workflow, just like it is when an activity task fails with an exception. Child workflows also support automatic retry policies. -{{% alert title="Note" color="primary" %}} -Because child workflows are independent of their parents, terminating a parent workflow does not affect any child workflows. You must terminate each child workflow independently using its instance ID. -{{% /alert %}} +Terminating a parent workflow terminates all of the child workflows created by the workflow instance. You can disable this by setting the query parameter `non_recursive` to `true` while sending the terminate request to the parent workflow. See [the terminate workflow api]({{< ref "workflow_api.md#terminate-workflow-request" >}}) for more information. ## Durable timers diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index f5b6dae8b..089ff931a 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -41,7 +41,7 @@ With Dapr Workflow, you can write activities and then orchestrate those activiti ### Child workflows -In addition to activities, you can write workflows to schedule other workflows as child workflows. A child workflow is independent of the parent workflow that started it and support automatic retry policies. +In addition to activities, you can write workflows to schedule other workflows as child workflows. A child workflow has its own instance ID, history, and status that is independent of the parent workflow that started it, except for the fact that terminating the parent workflow terminates all of the child workflows created by it. Child workflow also supports automatic retry policies. [Learn more about child workflows.]({{< ref "workflow-features-concepts.md#child-workflows" >}}) diff --git a/daprdocs/content/en/reference/api/workflow_api.md b/daprdocs/content/en/reference/api/workflow_api.md index 9f9c34de8..1dc80ef54 100644 --- a/daprdocs/content/en/reference/api/workflow_api.md +++ b/daprdocs/content/en/reference/api/workflow_api.md @@ -57,15 +57,23 @@ The API call will provide a response similar to this: Terminate a running workflow instance with the given name and instance ID. ``` -POST http://localhost:3500/v1.0-beta1/workflows///terminate +POST http://localhost:3500/v1.0-beta1/workflows///terminate[?non_recursive=false] ``` +{{% alert title="Note" color="primary" %}} + Terminating a workflow terminates all of the child workflows created by the workflow instance. You can disable this by setting the query parameter `non_recursive` to `true`. + +Terminating a workflow has no effect on any in-flight activity executions that were started by the terminated instance. + +{{% /alert %}} + ### URL parameters Parameter | Description --------- | ----------- `workflowComponentName` | Use `dapr` for Dapr Workflows `instanceId` | Unique value created for each run of a specific workflow +`non_recursive` | (Optional) Boolean to determine if Dapr should not recursively terminate child workflows created by the workflow instance. Default value is `false`. ### HTTP response codes @@ -171,15 +179,21 @@ None. Purge the workflow state from your state store with the workflow's instance ID. ``` -POST http://localhost:3500/v1.0-beta1/workflows///purge +POST http://localhost:3500/v1.0-beta1/workflows///purge[?non_recursive=false] ``` +{{% alert title="Note" color="primary" %}} + Purging a workflow purges all of the child workflows created by the workflow instance. You can disable this by setting the query parameter `non_recursive` to `true`. + +{{% /alert %}} + ### URL parameters Parameter | Description --------- | ----------- `workflowComponentName` | Use `dapr` for Dapr Workflows `instanceId` | Unique value created for each run of a specific workflow +`non_recursive` | (Optional) Boolean to determine if Dapr should not recursively purge child workflows created by the workflow instance. Default value is `false`. ### HTTP response codes From b2cbc2d55f6c0ab5aae34f127e8e896331b65040 Mon Sep 17 00:00:00 2001 From: Mark Fussell Date: Thu, 1 Feb 2024 12:39:43 -0800 Subject: [PATCH 3/6] Update daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Mark Fussell --- .../supported-pubsub/setup-apache-kafka.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md index 2579f0263..8a35eaa8b 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md @@ -285,7 +285,9 @@ spec: #### AWS IAM Authenticating with AWS IAM is supported with AWS Managed Streaming for Apache Kafka (MSK). Setting `authType` to `awsiam` uses AWS SDK to generate auth tokens to authenticate. -Note the only required metadata field is `awsRegion`, if no `awsAccessKey` and `awsSecretKey` are provided you can use AWS IAM roles for service accounts to have passwordless authentication to your Kafka cluster. +{{% alert title="Note" color="primary" %}} +The only required metadata field is `awsRegion`. If no `awsAccessKey` and `awsSecretKey` are provided, you can use AWS IAM roles for service accounts to have password-less authentication to your Kafka cluster. +{{% /alert %}} ```yaml apiVersion: dapr.io/v1alpha1 From 74fcb49572e69dbc2671431cecb721eee158a3c7 Mon Sep 17 00:00:00 2001 From: Mark Fussell Date: Thu, 1 Feb 2024 12:39:51 -0800 Subject: [PATCH 4/6] Update daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Mark Fussell --- .../supported-pubsub/setup-apache-kafka.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md index 8a35eaa8b..8cf760722 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md @@ -113,7 +113,17 @@ The metadata `version` must be set to `1.0.0` when using Azure EventHubs with Ka Kafka supports a variety of authentication schemes and Dapr supports several: SASL password, mTLS, OIDC/OAuth2. With the added authentication methods, the `authRequired` field has been deprecated from the v1.6 release and instead the `authType` field should be used. If `authRequired` is set to `true`, Dapr will attempt to configure `authType` correctly -based on the value of `saslPassword`. This are the valid values for `authType`: `none`, `password`, `certificate`, `mtls`, `oidc` and `awsiam`. Note this is authentication only; authorization is still configured within Kafka except for `awsiam` which can also drive authorization decisions configured in AWS IAM. +based on the value of `saslPassword`. The valid values for `authType` are: +- `none` +- `password` +- `certificate` +- `mtls` +- `oidc` +- `awsiam` + +{{% alert title="Note" color="primary" %}} +`authType` is _authentication_ only. _Authorization_ is still configured within Kafka, except for `awsiam`, which can also drive authorization decisions configured in AWS IAM. +{{% /alert %}} #### None From 48e2f46385f5f7bdf525ba2be5f0f379eb57a6ef Mon Sep 17 00:00:00 2001 From: Mark Fussell Date: Thu, 1 Feb 2024 12:39:56 -0800 Subject: [PATCH 5/6] Update daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Mark Fussell --- .../components-reference/supported-pubsub/setup-apache-kafka.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md index 8cf760722..a76d060a5 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md @@ -294,7 +294,7 @@ spec: #### AWS IAM -Authenticating with AWS IAM is supported with AWS Managed Streaming for Apache Kafka (MSK). Setting `authType` to `awsiam` uses AWS SDK to generate auth tokens to authenticate. +Authenticating with AWS IAM is supported with MSK. Setting `authType` to `awsiam` uses AWS SDK to generate auth tokens to authenticate. {{% alert title="Note" color="primary" %}} The only required metadata field is `awsRegion`. If no `awsAccessKey` and `awsSecretKey` are provided, you can use AWS IAM roles for service accounts to have password-less authentication to your Kafka cluster. {{% /alert %}} From 7a6f38494a114c442b8878b60766cd0a432fd7cb Mon Sep 17 00:00:00 2001 From: Mark Fussell Date: Thu, 1 Feb 2024 12:40:03 -0800 Subject: [PATCH 6/6] Update daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Mark Fussell --- .../components-reference/supported-pubsub/setup-apache-kafka.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md index a76d060a5..f5dc7faa5 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md @@ -96,7 +96,7 @@ spec: | awsAccessKey | N | AWS access key associated with an IAM account. | `"accessKey"` | awsSecretKey | N | The secret key associated with the access key. | `"secretKey"` | awsSessionToken | N | AWS session token to use. A session token is only required if you are using temporary security credentials. | `"sessionToken"` -| awsIamRoleArn | N | IAM role that has access to MSK. This is another option to authenticate with MSK aside from the AWS Credentials. | `"arn:aws:iam::123456789:role/mskRole"` +| awsIamRoleArn | N | IAM role that has access to AWS Managed Streaming for Apache Kafka (MSK). This is another option to authenticate with MSK aside from the AWS Credentials. | `"arn:aws:iam::123456789:role/mskRole"` | awsStsSessionName | N | Represents the session name for assuming a role. | `"MSKSASLDefaultSession"` | schemaRegistryURL | N | Required when using Schema Registry Avro serialization/deserialization. The Schema Registry URL. | `http://localhost:8081` | | schemaRegistryAPIKey | N | When using Schema Registry Avro serialization/deserialization. The Schema Registry credentials API Key. | `XYAXXAZ` |