Merge branch 'v1.12' into sendgrid-dynamic-template
|
@ -34,6 +34,8 @@ jobs:
|
|||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
pip3 install setuptools wheel twine tox mechanical-markdown
|
||||
pip3 uninstall -y mistune
|
||||
pip3 install mistune~=2.0.5 --no-cache-dir
|
||||
- name: Check Markdown Files
|
||||
run: |
|
||||
for name in `find . -name "*.md"`; do echo -e "------\n$name" ; mm.py -l $name || exit 1 ;done
|
||||
|
|
|
@ -3,11 +3,11 @@ name: Azure Static Web App Root
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- v1.10
|
||||
- v1.11
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, closed]
|
||||
branches:
|
||||
- v1.10
|
||||
- v1.11
|
||||
|
||||
jobs:
|
||||
build_and_deploy_job:
|
||||
|
|
|
@ -3,11 +3,11 @@ name: Azure Static Web App v1.9
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- v1.11
|
||||
- v1.12
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, closed]
|
||||
branches:
|
||||
- v1.11
|
||||
- v1.12
|
||||
|
||||
jobs:
|
||||
build_and_deploy_job:
|
|
@ -14,8 +14,8 @@ The following branches are currently maintained:
|
|||
|
||||
| Branch | Website | Description |
|
||||
| ------------------------------------------------------------ | -------------------------- | ------------------------------------------------------------------------------------------------ |
|
||||
| [v1.10](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. |
|
||||
| [v1.11](https://github.com/dapr/docs/tree/v1.11) (pre-release) | https://v1-11.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.11+ go here. |
|
||||
| [v1.11](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. |
|
||||
| [v1.12](https://github.com/dapr/docs/tree/v1.12) (pre-release) | https://v1-12.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.12+ go here. |
|
||||
|
||||
For more information visit the [Dapr branch structure](https://docs.dapr.io/contributing/docs-contrib/contributing-docs/#branch-guidance) document.
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Site Configuration
|
||||
baseURL = "https://v1-11.docs.dapr.io"
|
||||
baseURL = "https://v1-12.docs.dapr.io"
|
||||
title = "Dapr Docs"
|
||||
theme = "docsy"
|
||||
disableFastRender = true
|
||||
|
@ -171,17 +171,20 @@ github_subdir = "daprdocs"
|
|||
github_branch = "v1.11"
|
||||
|
||||
# Versioning
|
||||
version_menu = "v1.11 (preview)"
|
||||
version_menu = "v1.11 (latest)"
|
||||
version = "v1.11"
|
||||
archived_version = false
|
||||
url_latest_version = "https://docs.dapr.io"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v1.11 (preview)"
|
||||
version = "v1.12 (preview)"
|
||||
url = "#"
|
||||
[[params.versions]]
|
||||
version = "v1.10 (latest)"
|
||||
version = "v1.11 (latest)"
|
||||
url = "https://docs.dapr.io"
|
||||
[[params.versions]]
|
||||
version = "v1.10"
|
||||
url = "https://v1-10.docs.dapr.io"
|
||||
[[params.versions]]
|
||||
version = "v1.9"
|
||||
url = "https://v1-9.docs.dapr.io"
|
||||
|
|
|
@ -15,14 +15,54 @@ Now that you've read about [Cryptography as a Dapr building block]({{< ref crypt
|
|||
|
||||
## Encrypt
|
||||
|
||||
Using the Dapr gRPC APIs in your project, you can encrypt a stream of data, such as a file.
|
||||
{{< tabs "JavaScript" "Go" >}}
|
||||
|
||||
{{< tabs "Go" >}}
|
||||
{{% codetab %}}
|
||||
|
||||
<!--JavaScript-->
|
||||
|
||||
Using the Dapr SDK in your project, with the gRPC APIs, you can encrypt data in a buffer or a string:
|
||||
|
||||
```js
|
||||
// When passing data (a buffer or string), `encrypt` returns a Buffer with the encrypted message
|
||||
const ciphertext = await client.crypto.encrypt(plaintext, {
|
||||
// Name of the Dapr component (required)
|
||||
componentName: "mycryptocomponent",
|
||||
// Name of the key stored in the component (required)
|
||||
keyName: "mykey",
|
||||
// Algorithm used for wrapping the key, which must be supported by the key named above.
|
||||
// Options include: "RSA", "AES"
|
||||
keyWrapAlgorithm: "RSA",
|
||||
});
|
||||
```
|
||||
|
||||
The APIs can also be used with streams, to encrypt data more efficiently when it comes from a stream. The example below encrypts a file, writing to another file, using streams:
|
||||
|
||||
```js
|
||||
// `encrypt` can be used as a Duplex stream
|
||||
await pipeline(
|
||||
fs.createReadStream("plaintext.txt"),
|
||||
await client.crypto.encrypt({
|
||||
// Name of the Dapr component (required)
|
||||
componentName: "mycryptocomponent",
|
||||
// Name of the key stored in the component (required)
|
||||
keyName: "mykey",
|
||||
// Algorithm used for wrapping the key, which must be supported by the key named above.
|
||||
// Options include: "RSA", "AES"
|
||||
keyWrapAlgorithm: "RSA",
|
||||
}),
|
||||
fs.createWriteStream("ciphertext.out"),
|
||||
);
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--go-->
|
||||
|
||||
Using the Dapr SDK in your project, you can encrypt a stream of data, such as a file.
|
||||
|
||||
```go
|
||||
out, err := sdkClient.Encrypt(context.Background(), rf, dapr.EncryptOptions{
|
||||
// Name of the Dapr component (required)
|
||||
|
@ -35,18 +75,8 @@ out, err := sdkClient.Encrypt(context.Background(), rf, dapr.EncryptOptions{
|
|||
})
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
The following example puts the `Encrypt` API in context, with code that reads the file, encrypts it, then stores the result in another file.
|
||||
|
||||
{{< tabs "Go" >}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--go-->
|
||||
|
||||
```go
|
||||
// Input file, clear-text
|
||||
rf, err := os.Open("input")
|
||||
|
@ -81,18 +111,8 @@ if err != nil {
|
|||
fmt.Println("Written", n, "bytes")
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
The following example uses the `Encrypt` API to encrypt a string.
|
||||
|
||||
{{< tabs "Go" >}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--go-->
|
||||
|
||||
```go
|
||||
// Input string
|
||||
rf := strings.NewReader("Amor, ch’a nullo amato amar perdona, mi prese del costui piacer sì forte, che, come vedi, ancor non m’abbandona")
|
||||
|
@ -121,15 +141,41 @@ if err != nil {
|
|||
|
||||
## Decrypt
|
||||
|
||||
To decrypt a file, add the `Decrypt` gRPC API to your project.
|
||||
{{< tabs "JavaScript" "Go" >}}
|
||||
|
||||
{{< tabs "Go" >}}
|
||||
{{% codetab %}}
|
||||
|
||||
<!--JavaScript-->
|
||||
|
||||
Using the Dapr SDK, you can decrypt data in a buffer or using streams.
|
||||
|
||||
```js
|
||||
// When passing data as a buffer, `decrypt` returns a Buffer with the decrypted message
|
||||
const plaintext = await client.crypto.decrypt(ciphertext, {
|
||||
// Only required option is the component name
|
||||
componentName: "mycryptocomponent",
|
||||
});
|
||||
|
||||
// `decrypt` can also be used as a Duplex stream
|
||||
await pipeline(
|
||||
fs.createReadStream("ciphertext.out"),
|
||||
await client.crypto.decrypt({
|
||||
// Only required option is the component name
|
||||
componentName: "mycryptocomponent",
|
||||
}),
|
||||
fs.createWriteStream("plaintext.out"),
|
||||
);
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--go-->
|
||||
|
||||
In the following example, `out` is a stream that can be written to file or read in memory, as in the examples above.
|
||||
To decrypt a file, use the `Decrypt` gRPC API to your project.
|
||||
|
||||
In the following example, `out` is a stream that can be written to file or read in memory, as in the examples above.
|
||||
|
||||
```go
|
||||
out, err := sdkClient.Decrypt(context.Background(), rf, dapr.EncryptOptions{
|
||||
|
|
|
@ -31,6 +31,7 @@ metadata:
|
|||
name: lockstore
|
||||
spec:
|
||||
type: lock.redis
|
||||
version: v1
|
||||
metadata:
|
||||
- name: redisHost
|
||||
value: localhost:6379
|
||||
|
|
|
@ -186,7 +186,7 @@ Place `subscription.yaml` in the same directory as your `pubsub.yaml` component.
|
|||
|
||||
Below are code examples that leverage Dapr SDKs to subscribe to the topic you defined in `subscription.yaml`.
|
||||
|
||||
{{< tabs Dotnet Java Python Go Javascript>}}
|
||||
{{< tabs Dotnet Java Python Go JavaScript>}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
|
|
|
@ -30,10 +30,12 @@ The Dapr sidecar doesn’t load any workflow definitions. Rather, the sidecar si
|
|||
|
||||
[Workflow activities]({{< ref "workflow-features-concepts.md#workflow-activites" >}}) are the basic unit of work in a workflow and are the tasks that get orchestrated in the business process.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--csharp-->
|
||||
|
||||
Define the workflow activities you'd like your workflow to perform. Activities are a class definition and can take inputs and outputs. Activities also participate in dependency injection, like binding to a Dapr client.
|
||||
|
||||
The activities called in the example below are:
|
||||
|
@ -96,6 +98,24 @@ public class ProcessPaymentActivity : WorkflowActivity<PaymentRequest, object>
|
|||
|
||||
[See the full `ProcessPaymentActivity.cs` workflow activity example.](https://github.com/dapr/dotnet-sdk/blob/master/examples/Workflow/WorkflowConsoleApp/Activities/ProcessPaymentActivity.cs)
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--python-->
|
||||
|
||||
Define the workflow activities you'd like your workflow to perform. Activities are a function definition and can take inputs and outputs. The following example creates a counter (activity) called `hello_act` that notifies users of the current counter value. `hello_act` is a function derived from a class called `WorkflowActivityContext`.
|
||||
|
||||
```python
|
||||
def hello_act(ctx: WorkflowActivityContext, input):
|
||||
global counter
|
||||
counter += input
|
||||
print(f'New counter value is: {counter}!', flush=True)
|
||||
```
|
||||
|
||||
[See the `hello_act` workflow activity in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL40C1-L43C59)
|
||||
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
@ -104,10 +124,12 @@ public class ProcessPaymentActivity : WorkflowActivity<PaymentRequest, object>
|
|||
|
||||
Next, register and call the activites in a workflow.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--csharp-->
|
||||
|
||||
The `OrderProcessingWorkflow` class is derived from a base class called `Workflow` with input and output parameter types. It also includes a `RunAsync` method that does the heavy lifting of the workflow and calls the workflow activities.
|
||||
|
||||
```csharp
|
||||
|
@ -144,6 +166,28 @@ The `OrderProcessingWorkflow` class is derived from a base class called `Workflo
|
|||
|
||||
[See the full workflow example in `OrderProcessingWorkflow.cs`.](https://github.com/dapr/dotnet-sdk/blob/master/examples/Workflow/WorkflowConsoleApp/Workflows/OrderProcessingWorkflow.cs)
|
||||
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--python-->
|
||||
|
||||
The `hello_world_wf` function is derived from a class called `DaprWorkflowContext` with input and output parameter types. It also includes a `yield` statement that does the heavy lifting of the workflow and calls the workflow activities.
|
||||
|
||||
```python
|
||||
def hello_world_wf(ctx: DaprWorkflowContext, input):
|
||||
print(f'{input}')
|
||||
yield ctx.call_activity(hello_act, input=1)
|
||||
yield ctx.call_activity(hello_act, input=10)
|
||||
yield ctx.wait_for_external_event("event1")
|
||||
yield ctx.call_activity(hello_act, input=100)
|
||||
yield ctx.call_activity(hello_act, input=1000)
|
||||
```
|
||||
|
||||
[See the `hello_world_wf` workflow in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL32C1-L38C51)
|
||||
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
@ -152,10 +196,12 @@ The `OrderProcessingWorkflow` class is derived from a base class called `Workflo
|
|||
|
||||
Finally, compose the application using the workflow.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
|
||||
<!--csharp-->
|
||||
|
||||
[In the following `Program.cs` example](https://github.com/dapr/dotnet-sdk/blob/master/examples/Workflow/WorkflowConsoleApp/Program.cs), for a basic ASP.NET order processing application using the .NET SDK, your project code would include:
|
||||
|
||||
- A NuGet package called `Dapr.Workflow` to receive the .NET SDK capabilities
|
||||
|
@ -223,8 +269,97 @@ app.Run();
|
|||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
{{% codetab %}}
|
||||
|
||||
<!--python-->
|
||||
|
||||
[In the following example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py), for a basic Python hello world application using the Python SDK, your project code would include:
|
||||
|
||||
- A Python package called `DaprClient` to receive the Python SDK capabilities.
|
||||
- A builder with extensions called:
|
||||
- `WorkflowRuntime`: Allows you to register workflows and workflow activities
|
||||
- `DaprWorkflowContext`: Allows you to [create workflows]({{< ref "#write-the-workflow" >}})
|
||||
- `WorkflowActivityContext`: Allows you to [create workflow activities]({{< ref "#write-the-workflow-activities" >}})
|
||||
- API calls. In the example below, these calls start, pause, resume, purge, and terminate the workflow.
|
||||
|
||||
```python
|
||||
from dapr.ext.workflow import WorkflowRuntime, DaprWorkflowContext, WorkflowActivityContext
|
||||
from dapr.clients import DaprClient
|
||||
|
||||
# ...
|
||||
|
||||
def main():
|
||||
with DaprClient() as d:
|
||||
host = settings.DAPR_RUNTIME_HOST
|
||||
port = settings.DAPR_GRPC_PORT
|
||||
workflowRuntime = WorkflowRuntime(host, port)
|
||||
workflowRuntime = WorkflowRuntime()
|
||||
workflowRuntime.register_workflow(hello_world_wf)
|
||||
workflowRuntime.register_activity(hello_act)
|
||||
workflowRuntime.start()
|
||||
|
||||
# Start workflow
|
||||
print("==========Start Counter Increase as per Input:==========")
|
||||
start_resp = d.start_workflow(instance_id=instanceId, workflow_component=workflowComponent,
|
||||
workflow_name=workflowName, input=inputData, workflow_options=workflowOptions)
|
||||
print(f"start_resp {start_resp.instance_id}")
|
||||
|
||||
# ...
|
||||
|
||||
# Pause workflow
|
||||
d.pause_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
getResponse = d.get_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
print(f"Get response from {workflowName} after pause call: {getResponse.runtime_status}")
|
||||
|
||||
# Resume workflow
|
||||
d.resume_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
getResponse = d.get_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
print(f"Get response from {workflowName} after resume call: {getResponse.runtime_status}")
|
||||
|
||||
sleep(1)
|
||||
# Raise workflow
|
||||
d.raise_workflow_event(instance_id=instanceId, workflow_component=workflowComponent,
|
||||
event_name=eventName, event_data=eventData)
|
||||
|
||||
sleep(5)
|
||||
# Purge workflow
|
||||
d.purge_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
try:
|
||||
getResponse = d.get_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
except DaprInternalError as err:
|
||||
if nonExistentIDError in err._message:
|
||||
print("Instance Successfully Purged")
|
||||
|
||||
# Kick off another workflow for termination purposes
|
||||
start_resp = d.start_workflow(instance_id=instanceId, workflow_component=workflowComponent,
|
||||
workflow_name=workflowName, input=inputData, workflow_options=workflowOptions)
|
||||
print(f"start_resp {start_resp.instance_id}")
|
||||
|
||||
# Terminate workflow
|
||||
d.terminate_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
sleep(1)
|
||||
getResponse = d.get_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
print(f"Get response from {workflowName} after terminate call: {getResponse.runtime_status}")
|
||||
|
||||
# Purge workflow
|
||||
d.purge_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
try:
|
||||
getResponse = d.get_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
except DaprInternalError as err:
|
||||
if nonExistentIDError in err._message:
|
||||
print("Instance Successfully Purged")
|
||||
|
||||
workflowRuntime.shutdown()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
```
|
||||
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
|
||||
{{% alert title="Important" color="warning" %}}
|
||||
|
@ -241,4 +376,6 @@ Now that you've authored a workflow, learn how to manage it.
|
|||
## Related links
|
||||
- [Workflow overview]({{< ref workflow-overview.md >}})
|
||||
- [Workflow API reference]({{< ref workflow_api.md >}})
|
||||
- [Try out the .NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
|
||||
- Try out the full SDK examples:
|
||||
- [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
|
||||
- [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow)
|
||||
|
|
|
@ -8,12 +8,12 @@ description: Manage and run workflows
|
|||
|
||||
Now that you've [authored the workflow and its activities in your application]({{< ref howto-author-workflow.md >}}), you can start, terminate, and get information about the workflow using HTTP API calls. For more information, read the [workflow API reference]({{< ref workflow_api.md >}}).
|
||||
|
||||
{{< tabs ".NET SDK" HTTP >}}
|
||||
{{< tabs ".NET" Python HTTP >}}
|
||||
|
||||
<!--NET-->
|
||||
{{% codetab %}}
|
||||
|
||||
Manage your workflow within your code. In the `OrderProcessingWorkflow` example from the [Author a workflow]({{< ref "howto-author-workflow.md#write-the-workflow" >}}) guide, the workflow is registered in the code. You can now start, terminate, and get information about a running workflow:
|
||||
Manage your workflow within your code. In the `OrderProcessingWorkflow` example from the [Author a workflow]({{< ref "howto-author-workflow.md#write-the-application" >}}) guide, the workflow is registered in the code. You can now start, terminate, and get information about a running workflow:
|
||||
|
||||
```csharp
|
||||
string orderId = "exampleOrderId";
|
||||
|
@ -46,6 +46,56 @@ await daprClient.PurgeWorkflowAsync(orderId, workflowComponent);
|
|||
|
||||
{{% /codetab %}}
|
||||
|
||||
<!--Python-->
|
||||
{{% codetab %}}
|
||||
|
||||
Manage your workflow within your code. In the workflow example from the [Author a workflow]({{< ref "howto-author-workflow.md#write-the-application" >}}) guide, the workflow is registered in the code using the following APIs:
|
||||
- **start_workflow**: Start an instance of a workflow
|
||||
- **get_workflow**: Get information on the status of the workflow
|
||||
- **pause_workflow**: Pauses or suspends a workflow instance that can later be resumed
|
||||
- **resume_workflow**: Resumes a paused workflow instance
|
||||
- **raise_workflow_event**: Raise an event on a workflow
|
||||
- **purge_workflow**: Removes all metadata related to a specific workflow instance
|
||||
- **terminate_workflow**: Terminate or stop a particular instance of a workflow
|
||||
|
||||
```python
|
||||
from dapr.ext.workflow import WorkflowRuntime, DaprWorkflowContext, WorkflowActivityContext
|
||||
from dapr.clients import DaprClient
|
||||
|
||||
# Sane parameters
|
||||
instanceId = "exampleInstanceID"
|
||||
workflowComponent = "dapr"
|
||||
workflowName = "hello_world_wf"
|
||||
eventName = "event1"
|
||||
eventData = "eventData"
|
||||
|
||||
# Start the workflow
|
||||
start_resp = d.start_workflow(instance_id=instanceId, workflow_component=workflowComponent,
|
||||
workflow_name=workflowName, input=inputData, workflow_options=workflowOptions)
|
||||
|
||||
# Get info on the workflow
|
||||
getResponse = d.get_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
|
||||
# Pause the workflow
|
||||
d.pause_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
|
||||
# Resume the workflow
|
||||
d.resume_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
|
||||
# Raise an event on the workflow.
|
||||
d.raise_workflow_event(instance_id=instanceId, workflow_component=workflowComponent,
|
||||
event_name=eventName, event_data=eventData)
|
||||
|
||||
# Purge the workflow
|
||||
d.purge_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
|
||||
# Terminate the workflow
|
||||
d.terminate_workflow(instance_id=instanceId, workflow_component=workflowComponent)
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
|
||||
<!--HTTP-->
|
||||
{{% codetab %}}
|
||||
|
||||
|
@ -121,5 +171,7 @@ Learn more about these HTTP calls in the [workflow API reference guide]({{< ref
|
|||
|
||||
## Next steps
|
||||
- [Try out the Workflow quickstart]({{< ref workflow-quickstart.md >}})
|
||||
- [Try out the .NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
|
||||
- Try out the full SDK examples:
|
||||
- [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
|
||||
- [Python example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py)
|
||||
- [Workflow API reference]({{< ref workflow_api.md >}})
|
||||
|
|
|
@ -14,7 +14,7 @@ For more information on how workflow state is managed, see the [workflow archite
|
|||
|
||||
## Workflows
|
||||
|
||||
Dapr Workflows are functions you write that define a series of steps or tasks to be executed in a particular order. The Dapr Workflow engine takes care of coordinating and managing the execution of the steps, including managing failures and retries. If the app hosting your workflows is scaled out across multiple machines, the workflow engine may also load balance the execution of workflows and their tasks across multiple machines.
|
||||
Dapr Workflows are functions you write that define a series of tasks to be executed in a particular order. The Dapr Workflow engine takes care of scheduling and execution of the tasks, including managing failures and retries. If the app hosting your workflows is scaled out across multiple machines, the workflow engine may also load balance the execution of workflows and their tasks across multiple machines.
|
||||
|
||||
There are several different kinds of tasks that a workflow can schedule, including
|
||||
- [Activities]({{< ref "workflow-features-concepts.md#workflow-activities" >}}) for executing custom logic
|
||||
|
@ -24,7 +24,7 @@ There are several different kinds of tasks that a workflow can schedule, includi
|
|||
|
||||
### Workflow identity
|
||||
|
||||
Each workflow you define has a type name, and individual executions of a workflow have a unique _instance ID_. Workflow instance IDs can be generated by your app code, which is useful when workflows correspond to business entities like documents or jobs, or can be auto-generated UUIDs. A workflow's instance ID is useful for debugging and also for managing workflows using the [Workflow APIs]({{< ref workflow_api.md >}}).
|
||||
Each workflow you define has a type name, and individual executions of a workflow require a unique _instance ID_. Workflow instance IDs can be generated by your app code, which is useful when workflows correspond to business entities like documents or jobs, or can be auto-generated UUIDs. A workflow's instance ID is useful for debugging and also for managing workflows using the [Workflow APIs]({{< ref workflow_api.md >}}).
|
||||
|
||||
Only one workflow instance with a given ID can exist at any given time. However, if a workflow instance completes or fails, its ID can be reused by a new workflow instance. Note, however, that the new workflow instance effectively replaces the old one in the configured state store.
|
||||
|
||||
|
@ -36,8 +36,8 @@ When a workflow "awaits" a scheduled task, it unloads itself from memory until t
|
|||
|
||||
When a workflow function is replayed, it runs again from the beginning. However, when it encounters a task that already completed, instead of scheduling that task again, the workflow engine:
|
||||
|
||||
1. Returns the result of the completed task to the workflow.
|
||||
1. Continues execution until the next "await" point.
|
||||
1. Returns the stored result of the completed task to the workflow.
|
||||
1. Continues execution until the next "await" point.
|
||||
|
||||
This "replay" behavior continues until the workflow function completes or fails with an error.
|
||||
|
||||
|
|
|
@ -10,32 +10,32 @@ description: "Overview of Dapr Workflow"
|
|||
Dapr Workflow is currently in alpha.
|
||||
{{% /alert %}}
|
||||
|
||||
Dapr Workflow makes orchestrating the logic required for messaging, state management, and failure handling across various microservices easier for developers. Dapr Workflow enables you to create long running, fault-tolerant, stateful applications. Prior to Dapr Workflow, you'd often need to build ad-hoc workflows in custom, complex code in order to achieve long running, fault-tolerant, stateful applications.
|
||||
Dapr workflow makes it easy for developers to write business logic and integrations in a reliable way. Since Dapr workflows are stateful, they support long-running and fault-tolerant applications, ideal for orchestrating microservices. Dapr workflow works seamlessly with other Dapr building blocks, such as service invocation, pub/sub, state management, and bindings.
|
||||
|
||||
The durable, resilient Dapr Workflow capability:
|
||||
|
||||
- Offers a built-in workflow runtime for driving Dapr Workflow execution
|
||||
- Provides SDKs for authoring workflows in code, using any language
|
||||
- Provides HTTP and gRPC APIs for managing workflows (start, query, suspend/resume, terminate)
|
||||
- Integrates with any other workflow runtime via workflow components
|
||||
- Offers a built-in workflow runtime for driving Dapr Workflow execution.
|
||||
- Provides SDKs for authoring workflows in code, using any language.
|
||||
- Provides HTTP and gRPC APIs for managing workflows (start, query, suspend/resume, terminate).
|
||||
- Integrates with any other workflow runtime via workflow components.
|
||||
|
||||
<img src="/images/workflow-overview/workflow-overview.png" width=800 alt="Diagram showing basics of Dapr Workflow">
|
||||
|
||||
Some example scenarios that Dapr Workflow can perform are:
|
||||
|
||||
- Order processing involving inventory management, payment systems, shipping, etc.
|
||||
- Order processing involving orchestration between inventory management, payment systems, and shipping services.
|
||||
- HR onboarding workflows coordinating tasks across multiple departments and participants.
|
||||
- Orchestrating the roll-out of digital menu updates in a national restaurant chain.
|
||||
- Image processing workflows involving API-based classification and storage.
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
### Workflows and activities
|
||||
|
||||
With Dapr Workflow, you can write activites and then compose those activities together into a workflow. Workflow activities are:
|
||||
With Dapr Workflow, you can write activities and then orchestrate those activities in a workflow. Workflow activities are:
|
||||
|
||||
- The basic unit of work in a workflow
|
||||
- The tasks that get orchestrated in the business process
|
||||
- Used for calling other (Dapr) services, interacting with state stores, and pub/sub brokers.
|
||||
|
||||
[Learn more about workflow activities.]({{< ref "workflow-features-concepts.md##workflow-activities" >}})
|
||||
|
||||
|
@ -47,7 +47,7 @@ In addition to activities, you can write workflows to schedule other workflows a
|
|||
|
||||
### Timers and reminders
|
||||
|
||||
Same as Dapr actors, you can schedule reminder-like durable delays for any time range.
|
||||
Same as Dapr actors, you can schedule reminder-like durable delays for any time range.
|
||||
|
||||
[Learn more about workflow timers]({{< ref "workflow-features-concepts.md#durable-timers" >}}) and [reminders]({{< ref "workflow-architecture.md#reminder-usage-and-execution-guarantees" >}})
|
||||
|
||||
|
@ -81,6 +81,8 @@ You can use the following SDKs to author a workflow.
|
|||
| Language stack | Package |
|
||||
| - | - |
|
||||
| .NET | [Dapr.Workflow](https://www.nuget.org/profiles/dapr.io) |
|
||||
| Python | [dapr-ext-workflow](https://github.com/dapr/python-sdk/tree/master/ext/dapr-ext-workflow) |
|
||||
|
||||
|
||||
## Try out workflows
|
||||
|
||||
|
@ -92,6 +94,8 @@ Want to put workflows to the test? Walk through the following quickstart and tut
|
|||
| ------------------- | ----------- |
|
||||
| [Workflow quickstart]({{< ref workflow-quickstart.md >}}) | Run a .NET workflow application with four workflow activities to see Dapr Workflow in action |
|
||||
| [Workflow .NET SDK example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) | Learn how to create a Dapr Workflow and invoke it using ASP.NET Core web APIs. |
|
||||
| [Workflow Python SDK example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) | Learn how to create a Dapr Workflow and invoke it using the Python `DaprClient` package. |
|
||||
|
||||
|
||||
### Start using workflows directly in your app
|
||||
|
||||
|
@ -110,4 +114,6 @@ Watch [this video for an overview on Dapr Workflow](https://youtu.be/s1p9MNl4VGo
|
|||
## Related links
|
||||
|
||||
- [Workflow API reference]({{< ref workflow_api.md >}})
|
||||
- [Try out the .NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
|
||||
- Try out the full SDK examples:
|
||||
- [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
|
||||
- [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow)
|
||||
|
|
|
@ -12,10 +12,10 @@ Dapr Workflows simplify complex, stateful coordination requirements in microserv
|
|||
|
||||
In the task chaining pattern, multiple steps in a workflow are run in succession, and the output of one step may be passed as the input to the next step. Task chaining workflows typically involve creating a sequence of operations that need to be performed on some data, such as filtering, transforming, and reducing.
|
||||
|
||||
In some cases, the steps of the workflow may need to be orchestrated across multiple microservices. For increased reliability and scalability, you're also likely to use queues to trigger the various steps.
|
||||
|
||||
<img src="/images/workflow-overview/workflows-chaining.png" width=800 alt="Diagram showing how the task chaining workflow pattern works">
|
||||
|
||||
In some cases, the steps of the workflow may need to be orchestrated across multiple microservices. For increased reliability and scalability, you're also likely to use queues to trigger the various steps.
|
||||
|
||||
While the pattern is simple, there are many complexities hidden in the implementation. For example:
|
||||
|
||||
- What happens if one of the microservices are unavailable for an extended period of time?
|
||||
|
@ -25,9 +25,10 @@ While the pattern is simple, there are many complexities hidden in the implement
|
|||
|
||||
Dapr Workflow solves these complexities by allowing you to implement the task chaining pattern concisely as a simple function in the programming language of your choice, as shown in the following example.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--dotnet-->
|
||||
|
||||
```csharp
|
||||
// Expotential backoff retry policy that survives long outages
|
||||
|
@ -45,7 +46,6 @@ try
|
|||
var result1 = await context.CallActivityAsync<string>("Step1", wfInput, retryOptions);
|
||||
var result2 = await context.CallActivityAsync<byte[]>("Step2", result1, retryOptions);
|
||||
var result3 = await context.CallActivityAsync<long[]>("Step3", result2, retryOptions);
|
||||
var result4 = await context.CallActivityAsync<Guid[]>("Step4", result3, retryOptions);
|
||||
return string.Join(", ", result4);
|
||||
}
|
||||
catch (TaskFailedException) // Task failures are surfaced as TaskFailedException
|
||||
|
@ -56,14 +56,61 @@ catch (TaskFailedException) // Task failures are surfaced as TaskFailedException
|
|||
}
|
||||
```
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
In the example above, `"Step1"`, `"Step2"`, `"Step3"`, and `"MyCompensation"` represent workflow activities, which are functions in your code that actually implement the steps of the workflow. For brevity, these activity implementations are left out of this example.
|
||||
{{% /alert %}}
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--python-->
|
||||
|
||||
```python
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
|
||||
def task_chain_workflow(ctx: wf.DaprWorkflowContext, wf_input: int):
|
||||
try:
|
||||
result1 = yield ctx.call_activity(step1, input=wf_input)
|
||||
result2 = yield ctx.call_activity(step2, input=result1)
|
||||
result3 = yield ctx.call_activity(step3, input=result2)
|
||||
except Exception as e:
|
||||
yield ctx.call_activity(error_handler, input=str(e))
|
||||
raise
|
||||
return [result1, result2, result3]
|
||||
|
||||
|
||||
def step1(ctx, activity_input):
|
||||
print(f'Step 1: Received input: {activity_input}.')
|
||||
# Do some work
|
||||
return activity_input + 1
|
||||
|
||||
|
||||
def step2(ctx, activity_input):
|
||||
print(f'Step 2: Received input: {activity_input}.')
|
||||
# Do some work
|
||||
return activity_input * 2
|
||||
|
||||
|
||||
def step3(ctx, activity_input):
|
||||
print(f'Step 3: Received input: {activity_input}.')
|
||||
# Do some work
|
||||
return activity_input ^ 2
|
||||
|
||||
|
||||
def error_handler(ctx, error):
|
||||
print(f'Executing error handler: {error}.')
|
||||
# Do some compensating work
|
||||
```
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
Workflow retry policies will be available in a future version of the Python SDK.
|
||||
{{% /alert %}}
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
In the example above, `"Step1"`, `"Step2"`, `"MyCompensation"`, etc. represent workflow activities, which are functions in your code that actually implement the steps of the workflow. For brevity, these activity implementations are left out of this example.
|
||||
{{% /alert %}}
|
||||
|
||||
As you can see, the workflow is expressed as a simple series of statements in the programming language of your choice. This allows any engineer in the organization to quickly understand the end-to-end flow without necessarily needing to understand the end-to-end system architecture.
|
||||
|
||||
Behind the scenes, the Dapr Workflow runtime:
|
||||
|
@ -88,9 +135,10 @@ In addition to the challenges mentioned in [the previous pattern]({{< ref "workf
|
|||
|
||||
Dapr Workflows provides a way to express the fan-out/fan-in pattern as a simple function, as shown in the following example:
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--dotnet-->
|
||||
|
||||
```csharp
|
||||
// Get a list of N work items to process in parallel.
|
||||
|
@ -114,6 +162,46 @@ await context.CallActivityAsync("PostResults", sum);
|
|||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--python-->
|
||||
|
||||
```python
|
||||
import time
|
||||
from typing import List
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
|
||||
def batch_processing_workflow(ctx: wf.DaprWorkflowContext, wf_input: int):
|
||||
# get a batch of N work items to process in parallel
|
||||
work_batch = yield ctx.call_activity(get_work_batch, input=wf_input)
|
||||
|
||||
# schedule N parallel tasks to process the work items and wait for all to complete
|
||||
parallel_tasks = [ctx.call_activity(process_work_item, input=work_item) for work_item in work_batch]
|
||||
outputs = yield wf.when_all(parallel_tasks)
|
||||
|
||||
# aggregate the results and send them to another activity
|
||||
total = sum(outputs)
|
||||
yield ctx.call_activity(process_results, input=total)
|
||||
|
||||
|
||||
def get_work_batch(ctx, batch_size: int) -> List[int]:
|
||||
return [i + 1 for i in range(batch_size)]
|
||||
|
||||
|
||||
def process_work_item(ctx, work_item: int) -> int:
|
||||
print(f'Processing work item: {work_item}.')
|
||||
time.sleep(5)
|
||||
result = work_item * 2
|
||||
print(f'Work item {work_item} processed. Result: {result}.')
|
||||
return result
|
||||
|
||||
|
||||
def process_results(ctx, final_result: int):
|
||||
print(f'Final result: {final_result}.')
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
The key takeaways from this example are:
|
||||
|
@ -214,9 +302,10 @@ Depending on the business needs, there may be a single monitor or there may be m
|
|||
|
||||
Dapr Workflow supports this pattern natively by allowing you to implement _eternal workflows_. Rather than writing infinite while-loops ([which is an anti-pattern]({{< ref "workflow-features-concepts.md#infinite-loops-and-eternal-workflows" >}})), Dapr Workflow exposes a _continue-as-new_ API that workflow authors can use to restart a workflow function from the beginning with a new input.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--dotnet-->
|
||||
|
||||
```csharp
|
||||
public override async Task<object> RunAsync(WorkflowContext context, MyEntityState myEntityState)
|
||||
|
@ -256,6 +345,53 @@ public override async Task<object> RunAsync(WorkflowContext context, MyEntitySta
|
|||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--python-->
|
||||
|
||||
```python
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import random
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
|
||||
@dataclass
|
||||
class JobStatus:
|
||||
job_id: str
|
||||
is_healthy: bool
|
||||
|
||||
|
||||
def status_monitor_workflow(ctx: wf.DaprWorkflowContext, job: JobStatus):
|
||||
# poll a status endpoint associated with this job
|
||||
status = yield ctx.call_activity(check_status, input=job)
|
||||
if not ctx.is_replaying:
|
||||
print(f"Job '{job.job_id}' is {status}.")
|
||||
|
||||
if status == "healthy":
|
||||
job.is_healthy = True
|
||||
next_sleep_interval = 60 # check less frequently when healthy
|
||||
else:
|
||||
if job.is_healthy:
|
||||
job.is_healthy = False
|
||||
ctx.call_activity(send_alert, input=f"Job '{job.job_id}' is unhealthy!")
|
||||
next_sleep_interval = 5 # check more frequently when unhealthy
|
||||
|
||||
yield ctx.create_timer(fire_at=ctx.current_utc_datetime + timedelta(seconds=next_sleep_interval))
|
||||
|
||||
# restart from the beginning with a new JobStatus input
|
||||
ctx.continue_as_new(job)
|
||||
|
||||
|
||||
def check_status(ctx, _) -> str:
|
||||
return random.choice(["healthy", "unhealthy"])
|
||||
|
||||
|
||||
def send_alert(ctx, message: str):
|
||||
print(f'*** Alert: {message}')
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
A workflow implementing the monitor pattern can loop forever or it can terminate itself gracefully by not calling _continue-as-new_.
|
||||
|
@ -284,9 +420,10 @@ The following diagram illustrates this flow.
|
|||
|
||||
The following example code shows how this pattern can be implemented using Dapr Workflow.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--dotnet-->
|
||||
|
||||
```csharp
|
||||
public override async Task<OrderResult> RunAsync(WorkflowContext context, OrderPayload order)
|
||||
|
@ -331,13 +468,73 @@ In the example above, `RequestApprovalActivity` is the name of a workflow activi
|
|||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--python-->
|
||||
|
||||
```python
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
|
||||
@dataclass
|
||||
class Order:
|
||||
cost: float
|
||||
product: str
|
||||
quantity: int
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.product} ({self.quantity})'
|
||||
|
||||
|
||||
@dataclass
|
||||
class Approval:
|
||||
approver: str
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dict):
|
||||
return Approval(**dict)
|
||||
|
||||
|
||||
def purchase_order_workflow(ctx: wf.DaprWorkflowContext, order: Order):
|
||||
# Orders under $1000 are auto-approved
|
||||
if order.cost < 1000:
|
||||
return "Auto-approved"
|
||||
|
||||
# Orders of $1000 or more require manager approval
|
||||
yield ctx.call_activity(send_approval_request, input=order)
|
||||
|
||||
# Approvals must be received within 24 hours or they will be canceled.
|
||||
approval_event = ctx.wait_for_external_event("approval_received")
|
||||
timeout_event = ctx.create_timer(timedelta(hours=24))
|
||||
winner = yield wf.when_any([approval_event, timeout_event])
|
||||
if winner == timeout_event:
|
||||
return "Cancelled"
|
||||
|
||||
# The order was approved
|
||||
yield ctx.call_activity(place_order, input=order)
|
||||
approval_details = Approval.from_dict(approval_event.get_result())
|
||||
return f"Approved by '{approval_details.approver}'"
|
||||
|
||||
|
||||
def send_approval_request(_, order: Order) -> None:
|
||||
print(f'*** Sending approval request for order: {order}')
|
||||
|
||||
|
||||
def place_order(_, order: Order) -> None:
|
||||
print(f'*** Placing order: {order}')
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
The code that delivers the event to resume the workflow execution is external to the workflow. Workflow events can be delivered to a waiting workflow instance using the [raise event]({{< ref "howto-manage-workflow.md#raise-an-event" >}}) workflow management API, as shown in the following example:
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" Python >}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--dotnet-->
|
||||
|
||||
```csharp
|
||||
// Raise the workflow event to the waiting workflow
|
||||
|
@ -350,6 +547,23 @@ await daprClient.RaiseWorkflowEventAsync(
|
|||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{% codetab %}}
|
||||
<!--python-->
|
||||
|
||||
```python
|
||||
from dapr.clients import DaprClient
|
||||
from dataclasses import asdict
|
||||
|
||||
with DaprClient() as d:
|
||||
d.raise_workflow_event(
|
||||
instance_id=instance_id,
|
||||
workflow_component="dapr",
|
||||
event_name="approval_received",
|
||||
event_data=asdict(Approval("Jane Doe")))
|
||||
```
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
{{< /tabs >}}
|
||||
|
||||
External events don't have to be directly triggered by humans. They can also be triggered by other systems. For example, a workflow may need to pause and wait for a payment to be received. In this case, a payment system might publish an event to a pub/sub topic on receipt of a payment, and a listener on that topic can raise an event to the workflow using the raise event workflow API.
|
||||
|
|
|
@ -64,8 +64,9 @@ cd ./crypto-quickstart
|
|||
```
|
||||
|
||||
The application code defines two required keys:
|
||||
|
||||
- Private RSA key
|
||||
- A 256-bit symmetric (AES) key
|
||||
- A 256-bit symmetric (AES) key
|
||||
|
||||
Generate two keys, an RSA key and and AES key using OpenSSL and write these to two files:
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ In this guide, you'll:
|
|||
|
||||
Currently, you can experience the Dapr Workflow using the .NET SDK.
|
||||
|
||||
{{< tabs ".NET" >}}
|
||||
{{< tabs ".NET" "Python" >}}
|
||||
|
||||
<!-- .NET -->
|
||||
{{% codetab %}}
|
||||
|
@ -254,8 +254,234 @@ The `Activities` directory holds the four workflow activities used by the workfl
|
|||
- `ProcessPaymentActivity.cs`
|
||||
- `UpdateInventoryActivity.cs`
|
||||
|
||||
{{% /codetab %}}
|
||||
|
||||
<!-- Python -->
|
||||
{{% codetab %}}
|
||||
|
||||
### Step 1: Pre-requisites
|
||||
|
||||
For this example, you will need:
|
||||
|
||||
- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started).
|
||||
- [Python 3.7+ installed](https://www.python.org/downloads/).
|
||||
<!-- IGNORE_LINKS -->
|
||||
- [Docker Desktop](https://www.docker.com/products/docker-desktop)
|
||||
<!-- END_IGNORE -->
|
||||
|
||||
### Step 2: Set up the environment
|
||||
|
||||
Clone the [sample provided in the Quickstarts repo](https://github.com/dapr/quickstarts/tree/master/workflows).
|
||||
|
||||
```bash
|
||||
git clone https://github.com/dapr/quickstarts.git
|
||||
```
|
||||
|
||||
In a new terminal window, navigate to the `order-processor` directory:
|
||||
|
||||
```bash
|
||||
cd workflows/python/sdk/order-processor
|
||||
```
|
||||
|
||||
Install the Dapr Python SDK package:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
### Step 3: Run the order processor app
|
||||
|
||||
In the terminal, start the order processor app alongside a Dapr sidecar:
|
||||
|
||||
```bash
|
||||
dapr run --app-id order-processor --resources-path ../../../components/ -- python3 app.py
|
||||
```
|
||||
|
||||
> **Note:** Since Python3.exe is not defined in Windows, you may need to use `python app.py` instead of `python3 app.py`.
|
||||
|
||||
This starts the `order-processor` app with unique workflow ID and runs the workflow activities.
|
||||
|
||||
Expected output:
|
||||
|
||||
```bash
|
||||
== APP == Starting order workflow, purchasing 10 of cars
|
||||
== APP == 2023-06-06 09:35:52.945 durabletask-worker INFO: Successfully connected to 127.0.0.1:65406. Waiting for work items...
|
||||
== APP == INFO:NotifyActivity:Received order f4e1926e-3721-478d-be8a-f5bebd1995da for 10 cars at $150000 !
|
||||
== APP == INFO:VerifyInventoryActivity:Verifying inventory for order f4e1926e-3721-478d-be8a-f5bebd1995da of 10 cars
|
||||
== APP == INFO:VerifyInventoryActivity:There are 100 Cars available for purchase
|
||||
== APP == INFO:RequestApprovalActivity:Requesting approval for payment of 165000 USD for 10 cars
|
||||
== APP == 2023-06-06 09:36:05.969 durabletask-worker INFO: f4e1926e-3721-478d-be8a-f5bebd1995da Event raised: manager_approval
|
||||
== APP == INFO:NotifyActivity:Payment for order f4e1926e-3721-478d-be8a-f5bebd1995da has been approved!
|
||||
== APP == INFO:ProcessPaymentActivity:Processing payment: f4e1926e-3721-478d-be8a-f5bebd1995da for 10 cars at 150000 USD
|
||||
== APP == INFO:ProcessPaymentActivity:Payment for request ID f4e1926e-3721-478d-be8a-f5bebd1995da processed successfully
|
||||
== APP == INFO:UpdateInventoryActivity:Checking inventory for order f4e1926e-3721-478d-be8a-f5bebd1995da for 10 cars
|
||||
== APP == INFO:UpdateInventoryActivity:There are now 90 cars left in stock
|
||||
== APP == INFO:NotifyActivity:Order f4e1926e-3721-478d-be8a-f5bebd1995da has completed!
|
||||
== APP == 2023-06-06 09:36:06.106 durabletask-worker INFO: f4e1926e-3721-478d-be8a-f5bebd1995da: Orchestration completed with status: COMPLETED
|
||||
== APP == Workflow completed! Result: Completed
|
||||
== APP == Purchase of item is Completed
|
||||
```
|
||||
|
||||
### (Optional) Step 4: View in Zipkin
|
||||
|
||||
If you have Zipkin configured for Dapr locally on your machine, you can view the workflow trace spans in the Zipkin web UI (typically at `http://localhost:9411/zipkin/`).
|
||||
|
||||
<img src="/images/workflow-trace-spans-zipkin-python.png" width=900 style="padding-bottom:15px;">
|
||||
|
||||
### What happened?
|
||||
|
||||
When you ran `dapr run --app-id order-processor --resources-path ../../../components/ -- python3 app.py`:
|
||||
|
||||
1. A unique order ID for the workflow is generated (in the above example, `f4e1926e-3721-478d-be8a-f5bebd1995da`) and the workflow is scheduled.
|
||||
1. The `NotifyActivity` workflow activity sends a notification saying an order for 10 cars has been received.
|
||||
1. The `ReserveInventoryActivity` workflow activity checks the inventory data, determines if you can supply the ordered item, and responds with the number of cars in stock.
|
||||
1. Your workflow starts and notifies you of its status.
|
||||
1. The `ProcessPaymentActivity` workflow activity begins processing payment for order `f4e1926e-3721-478d-be8a-f5bebd1995da` and confirms if successful.
|
||||
1. The `UpdateInventoryActivity` workflow activity updates the inventory with the current available cars after the order has been processed.
|
||||
1. The `NotifyActivity` workflow activity sends a notification saying that order `f4e1926e-3721-478d-be8a-f5bebd1995da` has completed.
|
||||
1. The workflow terminates as completed.
|
||||
|
||||
#### `order-processor/app.py`
|
||||
|
||||
In the application's program file:
|
||||
- The unique workflow order ID is generated
|
||||
- The workflow is scheduled
|
||||
- The workflow status is retrieved
|
||||
- The workflow and the workflow activities it invokes are registered
|
||||
|
||||
```python
|
||||
class WorkflowConsoleApp:
|
||||
def main(self):
|
||||
# Register workflow and activities
|
||||
workflowRuntime = WorkflowRuntime(settings.DAPR_RUNTIME_HOST, settings.DAPR_GRPC_PORT)
|
||||
workflowRuntime.register_workflow(order_processing_workflow)
|
||||
workflowRuntime.register_activity(notify_activity)
|
||||
workflowRuntime.register_activity(requst_approval_activity)
|
||||
workflowRuntime.register_activity(verify_inventory_activity)
|
||||
workflowRuntime.register_activity(process_payment_activity)
|
||||
workflowRuntime.register_activity(update_inventory_activity)
|
||||
workflowRuntime.start()
|
||||
|
||||
print("==========Begin the purchase of item:==========", flush=True)
|
||||
item_name = default_item_name
|
||||
order_quantity = 10
|
||||
|
||||
total_cost = int(order_quantity) * baseInventory[item_name].per_item_cost
|
||||
order = OrderPayload(item_name=item_name, quantity=int(order_quantity), total_cost=total_cost)
|
||||
|
||||
# Start Workflow
|
||||
print(f'Starting order workflow, purchasing {order_quantity} of {item_name}', flush=True)
|
||||
start_resp = daprClient.start_workflow(workflow_component=workflow_component,
|
||||
workflow_name=workflow_name,
|
||||
input=order)
|
||||
_id = start_resp.instance_id
|
||||
|
||||
def prompt_for_approval(daprClient: DaprClient):
|
||||
daprClient.raise_workflow_event(instance_id=_id, workflow_component=workflow_component,
|
||||
event_name="manager_approval", event_data={'approval': True})
|
||||
|
||||
approval_seeked = False
|
||||
start_time = datetime.now()
|
||||
while True:
|
||||
time_delta = datetime.now() - start_time
|
||||
state = daprClient.get_workflow(instance_id=_id, workflow_component=workflow_component)
|
||||
if not state:
|
||||
print("Workflow not found!") # not expected
|
||||
elif state.runtime_status == "Completed" or\
|
||||
state.runtime_status == "Failed" or\
|
||||
state.runtime_status == "Terminated":
|
||||
print(f'Workflow completed! Result: {state.runtime_status}', flush=True)
|
||||
break
|
||||
if time_delta.total_seconds() >= 10:
|
||||
state = daprClient.get_workflow(instance_id=_id, workflow_component=workflow_component)
|
||||
if total_cost > 50000 and (
|
||||
state.runtime_status != "Completed" or
|
||||
state.runtime_status != "Failed" or
|
||||
state.runtime_status != "Terminated"
|
||||
) and not approval_seeked:
|
||||
approval_seeked = True
|
||||
threading.Thread(target=prompt_for_approval(daprClient), daemon=True).start()
|
||||
|
||||
print("Purchase of item is ", state.runtime_status, flush=True)
|
||||
|
||||
def restock_inventory(self, daprClient: DaprClient, baseInventory):
|
||||
for key, item in baseInventory.items():
|
||||
print(f'item: {item}')
|
||||
item_str = f'{{"name": "{item.item_name}", "quantity": {item.quantity},\
|
||||
"per_item_cost": {item.per_item_cost}}}'
|
||||
daprClient.save_state("statestore-actors", key, item_str)
|
||||
|
||||
if __name__ == '__main__':
|
||||
app = WorkflowConsoleApp()
|
||||
app.main()
|
||||
```
|
||||
|
||||
#### `order-processor/workflow.py`
|
||||
|
||||
In `workflow.py`, the workflow is defined as a class with all of its associated tasks (determined by workflow activities).
|
||||
|
||||
```python
|
||||
def order_processing_workflow(ctx: DaprWorkflowContext, order_payload_str: OrderPayload):
|
||||
"""Defines the order processing workflow.
|
||||
When the order is received, the inventory is checked to see if there is enough inventory to
|
||||
fulfill the order. If there is enough inventory, the payment is processed and the inventory is
|
||||
updated. If there is not enough inventory, the order is rejected.
|
||||
If the total order is greater than $50,000, the order is sent to a manager for approval.
|
||||
"""
|
||||
order_id = ctx.instance_id
|
||||
order_payload=json.loads(order_payload_str)
|
||||
yield ctx.call_activity(notify_activity,
|
||||
input=Notification(message=('Received order ' +order_id+ ' for '
|
||||
+f'{order_payload["quantity"]}' +' ' +f'{order_payload["item_name"]}'
|
||||
+' at $'+f'{order_payload["total_cost"]}' +' !')))
|
||||
result = yield ctx.call_activity(verify_inventory_activity,
|
||||
input=InventoryRequest(request_id=order_id,
|
||||
item_name=order_payload["item_name"],
|
||||
quantity=order_payload["quantity"]))
|
||||
if not result.success:
|
||||
yield ctx.call_activity(notify_activity,
|
||||
input=Notification(message='Insufficient inventory for '
|
||||
+f'{order_payload["item_name"]}'+'!'))
|
||||
return OrderResult(processed=False)
|
||||
|
||||
if order_payload["total_cost"] > 50000:
|
||||
yield ctx.call_activity(requst_approval_activity, input=order_payload)
|
||||
approval_task = ctx.wait_for_external_event("manager_approval")
|
||||
timeout_event = ctx.create_timer(timedelta(seconds=200))
|
||||
winner = yield when_any([approval_task, timeout_event])
|
||||
if winner == timeout_event:
|
||||
yield ctx.call_activity(notify_activity,
|
||||
input=Notification(message='Payment for order '+order_id
|
||||
+' has been cancelled due to timeout!'))
|
||||
return OrderResult(processed=False)
|
||||
approval_result = yield approval_task
|
||||
if approval_result["approval"]:
|
||||
yield ctx.call_activity(notify_activity, input=Notification(
|
||||
message=f'Payment for order {order_id} has been approved!'))
|
||||
else:
|
||||
yield ctx.call_activity(notify_activity, input=Notification(
|
||||
message=f'Payment for order {order_id} has been rejected!'))
|
||||
return OrderResult(processed=False)
|
||||
|
||||
yield ctx.call_activity(process_payment_activity, input=PaymentRequest(
|
||||
request_id=order_id, item_being_purchased=order_payload["item_name"],
|
||||
amount=order_payload["total_cost"], quantity=order_payload["quantity"]))
|
||||
|
||||
try:
|
||||
yield ctx.call_activity(update_inventory_activity,
|
||||
input=PaymentRequest(request_id=order_id,
|
||||
item_being_purchased=order_payload["item_name"],
|
||||
amount=order_payload["total_cost"],
|
||||
quantity=order_payload["quantity"]))
|
||||
except Exception:
|
||||
yield ctx.call_activity(notify_activity,
|
||||
input=Notification(message=f'Order {order_id} Failed!'))
|
||||
return OrderResult(processed=False)
|
||||
|
||||
yield ctx.call_activity(notify_activity, input=Notification(
|
||||
message=f'Order {order_id} has completed!'))
|
||||
return OrderResult(processed=True)
|
||||
```
|
||||
{{% /codetab %}}
|
||||
|
||||
|
||||
|
|
|
@ -25,10 +25,7 @@ While Dapr's built-in components come [included with the runtime](https://github
|
|||
1. Pluggable components need to be started and ready to take requests _before_ Dapr itself is started.
|
||||
2. The [Unix Domain Socket][uds] file used for the pluggable component communication need to be made accessible to both Dapr and pluggable component.
|
||||
|
||||
Dapr does not launch any pluggable components processes or containers. This is something that you need to do, and it is different depending on how Dapr and your components are run:
|
||||
|
||||
- In self-hosted mode as processes or containers.
|
||||
- In Kubernetes, as containers.
|
||||
In standalone mode, pluggable components run as processes or containers. On Kubernetes, pluggable components run as containers and are automatically injected to the application's pod by Dapr's sidecar injector, allowing customization via the standard [Kubernets Container spec](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.25/#container-v1-core).
|
||||
|
||||
This also changes the approach to share [Unix Domain Socket][uds] files between Dapr and pluggable components.
|
||||
|
||||
|
@ -47,11 +44,11 @@ Select your environment to begin making your component discoverable.
|
|||
|
||||
Both your component and the Unix Socket must be running before Dapr starts.
|
||||
|
||||
By default, Dapr looks for [Unix Domain Socket][uds] files in the folder in `/tmp/dapr-components-sockets`.
|
||||
By default, Dapr sidecar looks for components as [Unix Domain Socket][uds] files in `/tmp/dapr-components-sockets`.
|
||||
|
||||
Filenames in this folder are significant for component registration. They must be formed by appending the component's **name** with a file extension of your choice, more commonly `.sock`. For example, the filename `my-component.sock` is a valid Unix Domain Socket file name for a component named `my-component`.
|
||||
|
||||
Since you are running Dapr in the same host as the component, verify this folder and the files within it are accessible and writable by both your component and Dapr.
|
||||
Since you are running Dapr in the same host as the component, verify that this folder and the files within it are accessible and writable by both your component and Dapr. If you are using Dapr's sidecar injector capability, this volume is created and mounted automatically.
|
||||
|
||||
### Component discovery and multiplexing
|
||||
|
||||
|
@ -92,8 +89,7 @@ Save this file as `component.yaml` in Dapr's component configuration folder. Jus
|
|||
[Initialize Dapr]({{< ref get-started-api.md >}}), and make sure that your component file is placed in the right folder.
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
Dapr v1.9.0 is the minimum version that supports pluggable components.
|
||||
Run the following command specify the runtime version: `dapr init --runtime-version 1.9.0`
|
||||
Dapr 1.9.0 is the minimum version that supports pluggable components. As of version 1.11.0, automatic injection of the containers is supported for pluggable components.
|
||||
{{% /alert %}}
|
||||
|
||||
<!-- We should list the actual command line the user will be typing here -->
|
||||
|
@ -153,28 +149,20 @@ spec:
|
|||
labels:
|
||||
app: app
|
||||
annotations:
|
||||
dapr.io/unix-domain-socket-path: "/tmp/dapr-components-sockets" ## required, the default path where Dapr uses for registering components.
|
||||
# Recommended to automatically inject pluggable components.
|
||||
dapr.io/inject-pluggable-components: "true"
|
||||
dapr.io/app-id: "my-app"
|
||||
dapr.io/enabled: "true"
|
||||
spec:
|
||||
volumes: ## required, the sockets volume
|
||||
- name: dapr-unix-domain-socket
|
||||
emptyDir: {}
|
||||
containers:
|
||||
containers:
|
||||
### --------------------- YOUR APPLICATION CONTAINER GOES HERE -----------
|
||||
# Your application's container spec, as usual.
|
||||
- name: app
|
||||
image: YOUR_APP_IMAGE:YOUR_APP_IMAGE_VERSION
|
||||
### --------------------- YOUR PLUGGABLE COMPONENT CONTAINER GOES HERE -----------
|
||||
- name: component
|
||||
image: YOUR_IMAGE_GOES_HERE:YOUR_IMAGE_VERSION
|
||||
volumeMounts: # required, the sockets volume mount
|
||||
- name: dapr-unix-domain-socket
|
||||
mountPath: /tmp/dapr-components-sockets
|
||||
image: YOUR_IMAGE_GOES_HERE:YOUR_IMAGE_VERSION
|
||||
```
|
||||
|
||||
Alternatively, you can annotate your pods, telling Dapr which containers within that pod are pluggable components, like in the example below:
|
||||
The `dapr.io/inject-pluggable-components` annotation is recommended to be set to "true", indicating Dapr's sidecar injector that this application's pod will have additional containers for pluggable components.
|
||||
|
||||
Alternatively, you can skip Dapr's sidecar injection capability and manually add the pluggable component's container and annotate your pod, telling Dapr which containers within that pod are pluggable components, like in the example below:
|
||||
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
|
@ -220,6 +208,13 @@ apiVersion: dapr.io/v1alpha1
|
|||
kind: Component
|
||||
metadata:
|
||||
name: prod-mystore
|
||||
# When running on Kubernetes and automatic container injection, add annotation below:
|
||||
annotations:
|
||||
dapr.io/component-container: >
|
||||
{
|
||||
"name": "my-component",
|
||||
"image": "<registry>/<image_name>:<image_tag>"
|
||||
}
|
||||
spec:
|
||||
type: your_component_type.your_socket_goes_here
|
||||
version: v1
|
||||
|
@ -227,6 +222,7 @@ spec:
|
|||
scopes:
|
||||
- backend
|
||||
```
|
||||
The `dapr.io/component-container` annotation is mandatory on Kubernetes when you want Dapr's sidecar injector to handle the container and volume injection for the pluggable component. At minimum, you'll need the `name` and `image` attributes for the Dapr's sidecar injector to successfully add the container to the Application's pod. Volume for Unix Domain Socket is automatically created and mounted by Dapr's sidecar injector.
|
||||
|
||||
[Scope]({{< ref component-scopes >}}) your component to make sure that only the target application can connect with the pluggable component, since it will only be running in its deployment. Otherwise the runtime fails when initializing the component.
|
||||
|
||||
|
|
|
@ -6,14 +6,18 @@ weight: 4500
|
|||
description: "Choose which Dapr sidecar APIs are available to the app"
|
||||
---
|
||||
|
||||
In certain scenarios such as zero trust networks or when exposing the Dapr sidecar to external traffic through a frontend, it's recommended to only enable the Dapr sidecar APIs that are being used by the app. Doing so reduces the attack surface and helps keep the Dapr APIs scoped to the actual needs of the application.
|
||||
In certain scenarios, such as zero trust networks or when exposing the Dapr sidecar to external traffic through a frontend, it's recommended to only enable the Dapr sidecar APIs that are being used by the app. Doing so reduces the attack surface and helps keep the Dapr APIs scoped to the actual needs of the application.
|
||||
|
||||
Dapr allows developers to control which APIs are accessible to the application by setting an API allow list using a [Dapr Configuration]({{<ref "configuration-overview.md">}}).
|
||||
Dapr allows developers to control which APIs are accessible to the application by setting an API allowlist or denylist using a [Dapr Configuration]({{<ref "configuration-overview.md">}}).
|
||||
|
||||
### Default behavior
|
||||
|
||||
If an API allow list section is not specified, the default behavior is to allow access to all Dapr APIs.
|
||||
Once an allow list is set, only the specified APIs are accessible.
|
||||
If no API allowlist or denylist is specified, the default behavior is to allow access to all Dapr APIs.
|
||||
|
||||
- If only a denylist is defined, all Dapr APIs are allowed except those defined in the denylist
|
||||
- If only an allowlist is defined, only the Dapr APIs listed in the allowlist are allowed
|
||||
- If both an allowlist and a denylist are defined, the allowed APIs are those defined in the allowlist, unless they are also included in the denylist. In other words, the denylist overrides the allowlist for APIs that are defined in both.
|
||||
- If neither is defined, all APIs are allowed.
|
||||
|
||||
For example, the following configuration enables all APIs for both HTTP and gRPC:
|
||||
|
||||
|
@ -28,9 +32,11 @@ spec:
|
|||
samplingRate: "1"
|
||||
```
|
||||
|
||||
### Enabling specific HTTP APIs
|
||||
### Using an allowlist
|
||||
|
||||
The following example enables the state `v1.0` HTTP API and block all the rest:
|
||||
#### Enabling specific HTTP APIs
|
||||
|
||||
The following example enables the state `v1.0` HTTP API and blocks all other HTTP APIs:
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
|
@ -41,14 +47,14 @@ metadata:
|
|||
spec:
|
||||
api:
|
||||
allowed:
|
||||
- name: state
|
||||
version: v1.0
|
||||
protocol: http
|
||||
- name: state
|
||||
version: v1.0
|
||||
protocol: http
|
||||
```
|
||||
|
||||
### Enabling specific gRPC APIs
|
||||
#### Enabling specific gRPC APIs
|
||||
|
||||
The following example enables the state `v1` gRPC API and block all the rest:
|
||||
The following example enables the state `v1` gRPC API and blocks all other gRPC APIs:
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
|
@ -59,9 +65,47 @@ metadata:
|
|||
spec:
|
||||
api:
|
||||
allowed:
|
||||
- name: state
|
||||
version: v1
|
||||
protocol: grpc
|
||||
- name: state
|
||||
version: v1
|
||||
protocol: grpc
|
||||
```
|
||||
|
||||
### Using a denylist
|
||||
|
||||
#### Disabling specific HTTP APIs
|
||||
|
||||
The following example disables the state `v1.0` HTTP API, allowing all other HTTP APIs:
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Configuration
|
||||
metadata:
|
||||
name: myappconfig
|
||||
namespace: default
|
||||
spec:
|
||||
api:
|
||||
denied:
|
||||
- name: state
|
||||
version: v1.0
|
||||
protocol: http
|
||||
```
|
||||
|
||||
#### Disabling specific gRPC APIs
|
||||
|
||||
The following example disables the state `v1` gRPC API, allowing all other gRPC APIs:
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Configuration
|
||||
metadata:
|
||||
name: myappconfig
|
||||
namespace: default
|
||||
spec:
|
||||
api:
|
||||
denied:
|
||||
- name: state
|
||||
version: v1
|
||||
protocol: grpc
|
||||
```
|
||||
|
||||
### List of Dapr APIs
|
||||
|
@ -70,12 +114,18 @@ The `name` field takes the name of the Dapr API you would like to enable.
|
|||
|
||||
See this list of values corresponding to the different Dapr APIs:
|
||||
|
||||
| Name | Dapr API |
|
||||
| ------------- | ------------- |
|
||||
| state | [State]({{< ref state_api.md>}})|
|
||||
| invoke | [Service Invocation]({{< ref service_invocation_api.md >}}) |
|
||||
| secrets | [Secrets]({{< ref secrets_api.md >}})|
|
||||
| bindings | [Output Bindings]({{< ref bindings_api.md >}}) |
|
||||
| publish | [Pub/Sub]({{< ref pubsub.md >}}) |
|
||||
| actors | [Actors]({{< ref actors_api.md >}}) |
|
||||
| metadata | [Metadata]({{< ref metadata_api.md >}}) |
|
||||
| API group | HTTP API | [gRPC API](https://github.com/dapr/dapr/blob/master/pkg/grpc/endpoints.go) |
|
||||
| ----- | ----- | ----- |
|
||||
| [Service Invocation]({{< ref service_invocation_api.md >}}) | `invoke` (`v1.0`) | `invoke` (`v1`) |
|
||||
| [State]({{< ref state_api.md>}})| `state` (`v1.0` and `v1.0-alpha1`) | `state` (`v1` and `v1alpha1`) |
|
||||
| [Pub/Sub]({{< ref pubsub.md >}}) | `publish` (`v1.0` and `v1.0-alpha1`) | `publish` (`v1` and `v1alpha1`) |
|
||||
| [(Output) Bindings]({{< ref bindings_api.md >}}) | `bindings` (`v1.0`) |`bindings` (`v1`) |
|
||||
| [Secrets]({{< ref secrets_api.md >}})| `secrets` (`v1.0`) | `secrets` (`v1`) |
|
||||
| [Actors]({{< ref actors_api.md >}}) | `actors` (`v1.0`) |`actors` (`v1`) |
|
||||
| [Metadata]({{< ref metadata_api.md >}}) | `metadata` (`v1.0`) |`metadata` (`v1`) |
|
||||
| [Configuration]({{< ref configuration_api.md >}}) | `configuration` (`v1.0` and `v1.0-alpha1`) | `configuration` (`v1` and `v1alpha1`) |
|
||||
| [Distributed Lock]({{< ref distributed_lock_api.md >}}) | `lock` (`v1.0-alpha1`)<br/>`unlock` (`v1.0-alpha1`) | `lock` (`v1alpha1`)<br/>`unlock` (`v1alpha1`) |
|
||||
| Cryptography | `crypto` (`v1.0-alpha1`) | `crypto` (`v1alpha1`) |
|
||||
| [Workflow]({{< ref workflow_api.md >}}) | `workflows` (`v1.0-alpha1`) |`workflows` (`v1alpha1`) |
|
||||
| [Health]({{< ref health_api.md >}}) | `healthz` (`v1.0`) | n/a |
|
||||
| Shutdown | `shutdown` (`v1.0`) | `shutdown` (`v1`) |
|
||||
|
|
|
@ -58,6 +58,18 @@ dapr init -k
|
|||
✅ Success! Dapr has been installed to namespace dapr-system. To verify, run "dapr status -k" in your terminal. To get started, go here: https://aka.ms/dapr-getting-started
|
||||
```
|
||||
|
||||
To run the dashboard, run:
|
||||
|
||||
```bash
|
||||
dapr dashboard -k
|
||||
```
|
||||
|
||||
If you installed Dapr in a non-default namespace, run:
|
||||
|
||||
```bash
|
||||
dapr dashboard -k -n <your-namespace>
|
||||
```
|
||||
|
||||
### Install Dapr (a private Dapr Helm chart)
|
||||
There are some scenarios where it's necessary to install Dapr from a private Helm chart, such as:
|
||||
- needing more granular control of the Dapr Helm chart
|
||||
|
@ -125,7 +137,7 @@ The latest Dapr helm chart no longer supports Helm v2. Please migrate from Helm
|
|||
### Add and install Dapr Helm chart
|
||||
|
||||
1. Make sure [Helm 3](https://github.com/helm/helm/releases) is installed on your machine
|
||||
2. Add Helm repo and update
|
||||
1. Add Helm repo and update
|
||||
|
||||
```bash
|
||||
// Add the official Dapr Helm chart.
|
||||
|
@ -134,10 +146,11 @@ The latest Dapr helm chart no longer supports Helm v2. Please migrate from Helm
|
|||
helm repo add dapr http://helm.custom-domain.com/dapr/dapr/ \
|
||||
--username=xxx --password=xxx
|
||||
helm repo update
|
||||
# See which chart versions are available
|
||||
// See which chart versions are available
|
||||
helm search repo dapr --devel --versions
|
||||
```
|
||||
3. Install the Dapr chart on your cluster in the `dapr-system` namespace.
|
||||
|
||||
1. Install the Dapr chart on your cluster in the `dapr-system` namespace.
|
||||
|
||||
```bash
|
||||
helm upgrade --install dapr dapr/dapr \
|
||||
|
@ -158,8 +171,7 @@ The latest Dapr helm chart no longer supports Helm v2. Please migrate from Helm
|
|||
--wait
|
||||
```
|
||||
|
||||
|
||||
See [Guidelines for production ready deployments on Kubernetes]({{<ref kubernetes-production.md>}}) for more information on installing and upgrading Dapr using Helm.
|
||||
See [Guidelines for production ready deployments on Kubernetes]({{< ref kubernetes-production.md >}}) for more information on installing and upgrading Dapr using Helm.
|
||||
|
||||
### Uninstall Dapr on Kubernetes
|
||||
|
||||
|
@ -172,6 +184,22 @@ helm uninstall dapr --namespace dapr-system
|
|||
- Read [this guide]({{< ref kubernetes-production.md >}}) for recommended Helm chart values for production setups
|
||||
- See [this page](https://github.com/dapr/dapr/blob/master/charts/dapr/README.md) for details on Dapr Helm charts.
|
||||
|
||||
## Installing the Dapr dashboard as part of the control plane
|
||||
|
||||
If you want to install the Dapr dashboard, use this Helm chart with the additional settings of your choice:
|
||||
|
||||
`helm install dapr dapr/dapr-dashboard --namespace dapr-system`
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
helm repo add dapr https://dapr.github.io/helm-charts/
|
||||
helm repo update
|
||||
kubectl create namespace dapr-system
|
||||
# Install the Dapr dashboard
|
||||
helm install dapr dapr/dapr-dashboard --namespace dapr-system
|
||||
```
|
||||
|
||||
## Verify installation
|
||||
|
||||
Once the installation is complete, verify that the dapr-operator, dapr-placement, dapr-sidecar-injector and dapr-sentry pods are running in the `dapr-system` namespace:
|
||||
|
|
|
@ -75,4 +75,4 @@ By default, tailing is set to /var/log/containers/*.log. To change this setting,
|
|||
* [Telemetry Data Platform](https://newrelic.com/platform/telemetry-data-platform)
|
||||
* [New Relic Logging](https://github.com/newrelic/helm-charts/tree/master/charts/newrelic-logging)
|
||||
* [Types of New Relic API keys](https://docs.newrelic.com/docs/apis/intro-apis/new-relic-api-keys/)
|
||||
* [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/new-relic-alerts/learn-alerts/alerts-ai-transition-guide-2022/)
|
||||
* [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/overview/)
|
||||
|
|
|
@ -40,4 +40,4 @@ This document explains how to install it in your cluster, either using a Helm ch
|
|||
* [Telemetry Data Platform](https://newrelic.com/platform/telemetry-data-platform)
|
||||
* [New Relic Prometheus OpenMetrics Integration](https://github.com/newrelic/helm-charts/tree/master/charts/nri-prometheus)
|
||||
* [Types of New Relic API keys](https://docs.newrelic.com/docs/apis/intro-apis/new-relic-api-keys/)
|
||||
* [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/new-relic-alerts/learn-alerts/alerts-ai-transition-guide-2022/)
|
||||
* [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/overview/)
|
||||
|
|
|
@ -101,7 +101,7 @@ And the exact same dashboard templates from Dapr can be imported to visualize Da
|
|||
|
||||
## New Relic Alerts
|
||||
|
||||
All the data that is collected from Dapr, Kubernetes or any services that run on top of can be used to set-up alerts and notifications into the preferred channel of your choice. See [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/new-relic-alerts/learn-alerts/alerts-ai-transition-guide-2022/).
|
||||
All the data that is collected from Dapr, Kubernetes or any services that run on top of can be used to set-up alerts and notifications into the preferred channel of your choice. See [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/overview/).
|
||||
|
||||
## Related Links/References
|
||||
|
||||
|
@ -111,4 +111,4 @@ All the data that is collected from Dapr, Kubernetes or any services that run on
|
|||
* [New Relic Trace API](https://docs.newrelic.com/docs/distributed-tracing/trace-api/introduction-trace-api/)
|
||||
* [Types of New Relic API keys](https://docs.newrelic.com/docs/apis/intro-apis/new-relic-api-keys/)
|
||||
* [New Relic OpenTelemetry User Experience](https://blog.newrelic.com/product-news/opentelemetry-user-experience/)
|
||||
* [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/new-relic-alerts/learn-alerts/alerts-ai-transition-guide-2022/)
|
||||
* [Alerts and Applied Intelligence](https://docs.newrelic.com/docs/alerts-applied-intelligence/overview/)
|
||||
|
|
|
@ -39,6 +39,12 @@ There is a process for applying breaking changes:
|
|||
- For example, feature X is announced to be deprecated in the 1.0.0 release notes and will then be removed in 1.2.0.
|
||||
|
||||
## Deprecations
|
||||
Deprecations can apply to
|
||||
1. APIs, including alpha APIs
|
||||
1. Preview features
|
||||
1. Components
|
||||
1. CLI
|
||||
1. Features that could result in security vulnerabilities
|
||||
|
||||
Deprecations appear in release notes under a section named “Deprecations”, which indicates:
|
||||
|
||||
|
@ -56,6 +62,7 @@ After announcing a future breaking change, the change will happen in 2 releases
|
|||
| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 |
|
||||
| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{< ref "service_invocation_api.md#request-contents" >}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 |
|
||||
| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{< ref howto-invoke-services-grpc.md >}}) to use the proxy mode.| 1.9.0 | 1.10.0 |
|
||||
| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267)| 1.11.0 | 1.13.0 |
|
||||
|
||||
## Related links
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ Patch support is for supported versions (current and previous).
|
|||
The Dapr's sidecar image is published to both [GitHub Container Registry](https://github.com/dapr/dapr/pkgs/container/daprd) and [Docker Registry](https://hub.docker.com/r/daprio/daprd/tags). The default image contains all components. From version 1.11, Dapr also offers a variation of the sidecar image, containing only stable components.
|
||||
|
||||
* Default sidecar images: `daprio/daprd:<version>` or `ghcr.io/dapr/daprd:<version>` (for example `ghcr.io/dapr/daprd:1.11.0`)
|
||||
* Sidecar images for stable components: `daprio/daprd:<version>-stable` or `ghcr.io/dapr/daprd:<version>-stable` (for example `ghcr.io/dapr/daprd:1.11.0-stable`)
|
||||
* Sidecar images for stable components: `daprio/daprd:<version>-stablecomponents` or `ghcr.io/dapr/daprd:<version>-stablecomponents` (for example `ghcr.io/dapr/daprd:1.11.0-stablecomponents`)
|
||||
|
||||
On Kubernetes, the sidecar image can be overwritten for the application Deployment resource with the `dapr.io/sidecar-image` annotation. See more about [Dapr's arguments and annotations]({{<ref "arguments-annotations-overview.md" >}}). The default 'daprio/daprd:latest' image is used if not specified.
|
||||
|
||||
|
@ -45,9 +45,10 @@ The table below shows the versions of Dapr releases that have been tested togeth
|
|||
|
||||
| Release date | Runtime | CLI | SDKs | Dashboard | Status |
|
||||
|--------------------|:--------:|:--------|---------|---------|---------|
|
||||
| May 15th 2023 | 1.10.7</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 2.5.0 | 0.11.0 | Supported (current) |
|
||||
| May 12th 2023 | 1.10.6</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 2.5.0 | 0.11.0 | Supported (current) |
|
||||
| April 13 2023 |1.10.5</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 2.5.0 | 0.11.0 | Supported (current) |
|
||||
| June 12th 2023 | 1.11.0</br> | 1.11.0 | Java 1.9.0 </br>Go 1.8.0 </br>PHP 1.1.0 </br>Python 1.10.0 </br>.NET 1.11.0 </br>JS 3.1.0 | 0.13.0 | Supported (current) |
|
||||
| May 15th 2023 | 1.10.7</br> | 1.10.0 | Java 1.8.0 </br>Go 1.7.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 3.0.0 | 0.11.0 | Supported |
|
||||
| May 12th 2023 | 1.10.6</br> | 1.10.0 | Java 1.8.0 </br>Go 1.7.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 3.0.0 | 0.11.0 | Supported |
|
||||
| April 13 2023 |1.10.5</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 3.0.0 | 0.11.0 | Supported (current) |
|
||||
| March 16 2023 | 1.10.4</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 2.5.0 | 0.11.0 | Supported |
|
||||
| March 14 2023 | 1.10.3</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 2.5.0 | 0.11.0 | Supported |
|
||||
| February 24 2023 | 1.10.2</br> | 1.10.0 | Java 1.8.0 </br>Go 1.6.0 </br>PHP 1.1.0 </br>Python 1.9.0 </br>.NET 1.10.0 </br>JS 2.5.0 | 0.11.0 | Supported |
|
||||
|
@ -116,6 +117,8 @@ General guidance on upgrading can be found for [self hosted mode]({{< ref self-h
|
|||
| 1.8.0 to 1.8.6 | N/A | 1.9.6 |
|
||||
| 1.9.0 | N/A | 1.9.6 |
|
||||
| 1.10.0 | N/A | 1.10.7 |
|
||||
| 1.11.0 | N/A | 1.11.0 |
|
||||
|
||||
|
||||
## Upgrade on Hosting platforms
|
||||
|
||||
|
|
|
@ -58,8 +58,12 @@ The [components-contrib](https://github.com/dapr/components-contrib/) repo relea
|
|||
Note: Components have a production usage lifecycle status: Alpha, Beta and Stable. These statuses are not related to their versioning. The tables of supported components shows both their versions and their status.
|
||||
* List of [state store components]({{< ref supported-state-stores.md >}})
|
||||
* List of [pub/sub components]({{< ref supported-pubsub.md >}})
|
||||
* List of [secret store components]({{< ref supported-secret-stores.md >}})
|
||||
* List of [binding components]({{< ref supported-bindings.md >}})
|
||||
* List of [secret store components]({{< ref supported-secret-stores.md >}})
|
||||
* List of [configuration store components]({{< ref supported-configuration-stores.md >}})
|
||||
* List of [lock components]({{< ref supported-locks.md >}})
|
||||
* List of [crytpography components]({{< ref supported-cryptography.md >}})
|
||||
* List of [middleware components]({{< ref supported-middleware.md >}})
|
||||
|
||||
For more information on component versioning read [Version 2 and beyond of a component](https://github.com/dapr/components-contrib/blob/master/docs/developing-component.md#version-2-and-beyond-of-a-component)
|
||||
|
||||
|
@ -96,6 +100,8 @@ The version for a component implementation is determined by the `.spec.version`
|
|||
### Component deprecations
|
||||
Deprecations of components will be announced two (2) releases ahead. Deprecation of a component, results in major version update of the component version. After 2 releases, the component is unregistered from the Dapr runtime, and trying to load it will throw a fatal exception.
|
||||
|
||||
Component deprecations and removal are announced in the release notes.
|
||||
|
||||
## Quickstarts and Samples
|
||||
Quickstarts in the [Quickstarts repo](https://github.com/dapr/quickstarts) are versioned with the runtime, where a table of corresponding versions is on the front page of the samples repo. Users should only use Quickstarts corresponding to the version of the runtime being run.
|
||||
|
||||
|
@ -103,3 +109,4 @@ Samples in the [Samples repo](https://github.com/dapr/samples) are each versione
|
|||
|
||||
## Related links
|
||||
* Read the [Supported releases]({{< ref support-release-policy.md >}})
|
||||
* Read the [Breaking Changes and Deprecation Policy]({{< ref breaking-changes-and-deprecations.md >}})
|
||||
|
|
|
@ -6,9 +6,17 @@ description: "Detailed documentation on the Metadata API"
|
|||
weight: 1100
|
||||
---
|
||||
|
||||
Dapr has a metadata API that returns information about the sidecar allowing runtime discoverability. The metadata endpoint returns a list of the resources (components and HttpEndpoints loaded), the activated actors (if present), and attributes with information attached.
|
||||
Dapr has a metadata API that returns information about the sidecar allowing runtime discoverability. The metadata endpoint returns the following information.
|
||||
- Runtime version
|
||||
- List of the loaded resources (`components`, `subscriptions` and `HttpEndpoints`)
|
||||
- Registered actor types
|
||||
- Features enabled
|
||||
- Application connection details
|
||||
- Custom, ephemeral attributes with information.
|
||||
|
||||
## Components
|
||||
## Metadata API
|
||||
|
||||
### Components
|
||||
Each loaded component provides its name, type and version and also information about supported features in the form of component capabilities.
|
||||
These features are available for the [state store]({{< ref supported-state-stores.md >}}) and [binding]({{< ref supported-bindings.md >}}) component types. The table below shows the component type and the list of capabilities for a given version. This list might grow in future and only represents the capabilities of the loaded components.
|
||||
|
||||
|
@ -17,12 +25,21 @@ Component type | Capabilities
|
|||
State Store | ETAG, TRANSACTION, ACTOR, QUERY_API
|
||||
Binding | INPUT_BINDING, OUTPUT_BINDING
|
||||
|
||||
## HTTPEndpoints
|
||||
### HTTPEndpoints
|
||||
Each loaded `HttpEndpoint` provides a name to easily identify the Dapr resource associated with the runtime.
|
||||
|
||||
## Attributes
|
||||
### Subscriptions
|
||||
The metadata API returns a list of pub/sub subscriptions that the app has registered with the Dapr runtime. This includes the pub/sub name, topic, routes, dead letter topic, and the metadata associated with the subscription.
|
||||
|
||||
The metadata API allows you to store additional attribute information in the format of key-value pairs. These are ephemeral in-memory and are not persisted if a sidecar is reloaded. This information should be added at the time of a sidecar creation, for example, after the application has started.
|
||||
### Enabled features
|
||||
A list of features enabled via Configuration spec (including build-time overrides).
|
||||
|
||||
### App connection details
|
||||
The metadata API returns information related to Dapr's connection to the app. This includes the app port, protocol, host, max concurrency, along with health check details.
|
||||
|
||||
### Attributes
|
||||
|
||||
The metadata API allows you to store additional attribute information in the format of key-value pairs. These are ephemeral in-memory and are not persisted if a sidecar is reloaded. This information should be added at the time of a sidecar creation (for example, after the application has started).
|
||||
|
||||
## Get the Dapr sidecar information
|
||||
|
||||
|
@ -57,9 +74,13 @@ Code | Description
|
|||
Name | Type | Description
|
||||
---- | ---- | -----------
|
||||
id | string | Application ID
|
||||
runtimeVersion | string | Version of the Dapr runtime
|
||||
actors | [Metadata API Response Registered Actor](#metadataapiresponseactor)[] | A json encoded array of registered actors metadata.
|
||||
extended.attributeName | string | List of custom attributes as key-value pairs, where key is the attribute name.
|
||||
components | [Metadata API Response Component](#metadataapiresponsecomponent)[] | A json encoded array of loaded components metadata.
|
||||
httpEndpoints | [Metadata API Response HttpEndpoint](#metadataapiresponsehttpendpoint)[] | A json encoded array of loaded HttpEndpoints metadata.
|
||||
subscriptions | [Metadata API Response Subscription](#metadataapiresponsesubscription)[] | A json encoded array of pub/sub subscriptions metadata.
|
||||
appConnectionProperties| [Metadata API Response AppConnectionProperties](#metadataapiresponseappconnectionproperties) | A json encoded object of app connection properties.
|
||||
|
||||
<a id="metadataapiresponseactor"></a>**Metadata API Response Registered Actor**
|
||||
|
||||
|
@ -75,7 +96,50 @@ Name | Type | Description
|
|||
name | string | Name of the component.
|
||||
type | string | Component type.
|
||||
version | string | Component version.
|
||||
capabilities | array | Supported capabilities for this component type and version.
|
||||
capabilities | array | Supported capabilities for this component type and version.
|
||||
|
||||
<a id="metadataapiresponsehttpendpoint"></a>**Metadata API Response HttpEndpoint**
|
||||
|
||||
Name | Type | Description
|
||||
---- | ---- | -----------
|
||||
name | string | Name of the HttpEndpoint.
|
||||
|
||||
<a id="metadataapiresponsesubscription"></a>**Metadata API Response Subscription**
|
||||
|
||||
Name | Type | Description
|
||||
---- | ---- | -----------
|
||||
pubsubname | string | Name of the pub/sub.
|
||||
topic | string | Topic name.
|
||||
metadata | object | Metadata associated with the subscription.
|
||||
rules | [Metadata API Response Subscription Rules](metadataapiresponsesubscriptionrules)[] | List of rules associated with the subscription.
|
||||
deadLetterTopic | string | Dead letter topic name.
|
||||
|
||||
<a id="metadataapiresponsesubscriptionrules"></a>**Metadata API Response Subscription Rules**
|
||||
|
||||
Name | Type | Description
|
||||
---- | ---- | -----------
|
||||
match | string | CEL expression to match the message.
|
||||
path | string | Path to route the message if the match expression is true.
|
||||
|
||||
<a id="metadataapiresponseappconnectionproperties"></a>**Metadata API Response AppConnectionProperties**
|
||||
|
||||
Name | Type | Description
|
||||
---- | ---- | -----------
|
||||
port | integer| Port on which the app is listening.
|
||||
protocol | string | Protocol used by the app.
|
||||
channelAddress| string | Host address on which the app is listening.
|
||||
maxConcurrency| integer| Maximum number of concurrent requests the app can handle.
|
||||
health | [Metadata API Response AppConnectionProperties Health](#metadataapiresponseappconnectionpropertieshealth) | Health check details of the app.
|
||||
|
||||
<a id="metadataapiresponseappconnectionpropertieshealth"></a>**Metadata API Response AppConnectionProperties Health**
|
||||
|
||||
Name | Type | Description
|
||||
---- | ---- | -----------
|
||||
healthCheckPath | string | Health check path, applicable for HTTP protocol.
|
||||
healthProbeInterval | string | Time between each health probe, in go duration format.
|
||||
healthProbeTimeout | string | Timeout for each health probe, in go duration format.
|
||||
healthThreshold | integer | Max number of failed health probes before the app is considered unhealthy.
|
||||
|
||||
|
||||
### Examples
|
||||
|
||||
|
@ -87,32 +151,44 @@ curl http://localhost:3500/v1.0/metadata
|
|||
|
||||
```json
|
||||
{
|
||||
"id":"demo-actor",
|
||||
"actors":[
|
||||
{
|
||||
"type":"DemoActor",
|
||||
"count":1
|
||||
}
|
||||
],
|
||||
"extended": {
|
||||
"cliPID":"1031040",
|
||||
"appCommand":"uvicorn --port 3000 demo_actor_service:app",
|
||||
"daprRuntimeVersion": "1.10.0"
|
||||
"id": "demo-actor",
|
||||
"runtimeVersion": "1.12.0",
|
||||
"enabledFeatures": [
|
||||
"ServiceInvocationStreaming"
|
||||
],
|
||||
"actors": [
|
||||
{
|
||||
"type": "DemoActor"
|
||||
}
|
||||
],
|
||||
"components": [
|
||||
{
|
||||
"name": "pubsub",
|
||||
"type": "pubsub.redis",
|
||||
"version": "v1"
|
||||
},
|
||||
"components":[
|
||||
{
|
||||
"name":"pubsub",
|
||||
"type":"pubsub.redis",
|
||||
"version":"v1",
|
||||
"capabilities": [""]
|
||||
},
|
||||
{
|
||||
"name":"statestore",
|
||||
"type":"state.redis",
|
||||
"version":"v1",
|
||||
"capabilities": ["ETAG", "TRANSACTION", "ACTOR", "QUERY_API"]
|
||||
}
|
||||
]
|
||||
{
|
||||
"name": "statestore",
|
||||
"type": "state.redis",
|
||||
"version": "v1",
|
||||
"capabilities": [
|
||||
"ETAG",
|
||||
"TRANSACTIONAL",
|
||||
"ACTOR"
|
||||
]
|
||||
}
|
||||
],
|
||||
"extended": {
|
||||
"appCommand": "uvicorn --port 3000 demo_actor_service:app",
|
||||
"appPID": "98121",
|
||||
"cliPID": "98114",
|
||||
"daprRuntimeVersion": "1.12.0"
|
||||
},
|
||||
"appConnectionProperties": {
|
||||
"port": 3000,
|
||||
"protocol": "http",
|
||||
"channelAddress": "127.0.0.1"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
@ -172,32 +248,45 @@ Get the metadata information to confirm your custom attribute was added:
|
|||
|
||||
```json
|
||||
{
|
||||
"id":"demo-actor",
|
||||
"actors":[
|
||||
{
|
||||
"type":"DemoActor",
|
||||
"count":1
|
||||
}
|
||||
],
|
||||
"extended": {
|
||||
"myDemoAttribute": "myDemoAttributeValue",
|
||||
"cliPID":"1031040",
|
||||
"appCommand":"uvicorn --port 3000 demo_actor_service:app"
|
||||
"id": "demo-actor",
|
||||
"runtimeVersion": "1.12.0",
|
||||
"enabledFeatures": [
|
||||
"ServiceInvocationStreaming"
|
||||
],
|
||||
"actors": [
|
||||
{
|
||||
"type": "DemoActor"
|
||||
}
|
||||
],
|
||||
"components": [
|
||||
{
|
||||
"name": "pubsub",
|
||||
"type": "pubsub.redis",
|
||||
"version": "v1"
|
||||
},
|
||||
"components":[
|
||||
{
|
||||
"name":"pubsub",
|
||||
"type":"pubsub.redis",
|
||||
"version":"v1",
|
||||
"capabilities": [""]
|
||||
},
|
||||
{
|
||||
"name":"statestore",
|
||||
"type":"state.redis",
|
||||
"version":"v1",
|
||||
"capabilities": ["ETAG", "TRANSACTION", "ACTOR", "QUERY_API"]
|
||||
}
|
||||
]
|
||||
{
|
||||
"name": "statestore",
|
||||
"type": "state.redis",
|
||||
"version": "v1",
|
||||
"capabilities": [
|
||||
"ETAG",
|
||||
"TRANSACTIONAL",
|
||||
"ACTOR"
|
||||
]
|
||||
}
|
||||
],
|
||||
"extended": {
|
||||
"myDemoAttribute": "myDemoAttributeValue",
|
||||
"appCommand": "uvicorn --port 3000 demo_actor_service:app",
|
||||
"appPID": "98121",
|
||||
"cliPID": "98114",
|
||||
"daprRuntimeVersion": "1.12.0"
|
||||
},
|
||||
"appConnectionProperties": {
|
||||
"port": 3000,
|
||||
"protocol": "http",
|
||||
"channelAddress": "127.0.0.1"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
@ -68,6 +68,9 @@ POST http://localhost:3500/v1.0/state/myStore?metadata.contentType=application/j
|
|||
```
|
||||
> All URL parameters are case-sensitive.
|
||||
|
||||
> Since `||` is a reserved string it cannot be used in the `<state key>`
|
||||
> field.
|
||||
|
||||
#### Request Body
|
||||
|
||||
A JSON array of state objects. Each state object is comprised with the following fields:
|
||||
|
|
|
@ -67,6 +67,10 @@ spec:
|
|||
| oidcClientID | N | Input/Output | The OAuth2 client ID that has been provisioned in the identity provider. Required when `authType` is set to `oidc` | `dapr-kafka` |
|
||||
| oidcClientSecret | N | Input/Output | The OAuth2 client secret that has been provisioned in the identity provider: Required when `authType` is set to `oidc` | `"KeFg23!"` |
|
||||
| oidcScopes | N | Input/Output | Comma-delimited list of OAuth2/OIDC scopes to request with the access token. Recommended when `authType` is set to `oidc`. Defaults to `"openid"` | `"openid,kafka-prod"` |
|
||||
| version | N | Input/Output | Kafka cluster version. Defaults to 2.0.0. Please note that this needs to be mandatorily set to `1.0.0` for EventHubs with Kafka. | `1.0.0` |
|
||||
|
||||
#### Note
|
||||
The metadata `version` must be set to `1.0.0` when using Azure EventHubs with Kafka.
|
||||
|
||||
## Binding support
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ spec:
|
|||
| maxMessageBytes | N | The maximum size in bytes allowed for a single Kafka message. Defaults to 1024. | `2048`
|
||||
| consumeRetryInterval | N | The interval between retries when attempting to consume topics. Treats numbers without suffix as milliseconds. Defaults to 100ms. | `200ms` |
|
||||
| consumeRetryEnabled | N | Disable consume retry by setting `"false"` | `"true"`, `"false"` |
|
||||
| version | N | Kafka cluster version. Defaults to 2.0.0.0 | `0.10.2.0` |
|
||||
| version | N | Kafka cluster version. Defaults to 2.0.0. Note that this must be set to `1.0.0` if you are using Azure EventHubs with Kafka. | `0.10.2.0` |
|
||||
| caCert | N | Certificate authority certificate, required for using TLS. Can be `secretKeyRef` to use a secret reference | `"-----BEGIN CERTIFICATE-----\n<base64-encoded DER>\n-----END CERTIFICATE-----"`
|
||||
| clientCert | N | Client certificate, required for `authType` `mtls`. Can be `secretKeyRef` to use a secret reference | `"-----BEGIN CERTIFICATE-----\n<base64-encoded DER>\n-----END CERTIFICATE-----"`
|
||||
| clientKey | N | Client key, required for `authType` `mtls` Can be `secretKeyRef` to use a secret reference | `"-----BEGIN RSA PRIVATE KEY-----\n<base64-encoded PKCS8>\n-----END RSA PRIVATE KEY-----"`
|
||||
|
@ -78,6 +78,9 @@ spec:
|
|||
|
||||
The `secretKeyRef` above is referencing a [kubernetes secrets store]({{< ref kubernetes-secret-store.md >}}) to access the tls information. Visit [here]({{< ref setup-secret-store.md >}}) to learn more about how to configure a secret store component.
|
||||
|
||||
#### Note
|
||||
The metadata `version` must be set to `1.0.0` when using Azure EventHubs with Kafka.
|
||||
|
||||
### Authentication
|
||||
|
||||
Kafka supports a variety of authentication schemes and Dapr supports several: SASL password, mTLS, OIDC/OAuth2. With the added authentication methods, the `authRequired` field has
|
||||
|
|
|
@ -77,6 +77,9 @@ spec:
|
|||
| batchingMaxSize | N | batchingMaxSize sets the maximum number of bytes permitted in a batch. If set to a value greater than 1, messages will be queued until this threshold is reached or batchingMaxMessages (see above) has been reached or the batch interval has elapsed. Default: `"128KB"` | `"131072"`|
|
||||
| <topic-name>.jsonschema | N | Enforces JSON schema validation for the configured topic. |
|
||||
| <topic-name>.avroschema | N | Enforces Avro schema validation for the configured topic. |
|
||||
| publicKey | N | A public key to be used for publisher and consumer encryption. Value can be one of two options: file path for a local PEM cert, or the cert data string value |
|
||||
| privateKey | N | A private key to be used for consumer encryption. Value can be one of two options: file path for a local PEM cert, or the cert data string value |
|
||||
| keys | N | A comma delimited string containing names of [Pulsar session keys](https://pulsar.apache.org/docs/3.0.x/security-encryption/#how-it-works-in-pulsar). Used in conjunction with `publicKey` for publisher encryption |
|
||||
|
||||
### Enabling message delivery retries
|
||||
|
||||
|
@ -115,6 +118,90 @@ curl -X POST http://localhost:3500/v1.0/publish/myPulsar/myTopic?metadata.delive
|
|||
}'
|
||||
```
|
||||
|
||||
### E2E Encryption
|
||||
|
||||
Dapr supports setting public and private key pairs to enable Pulsar's [end-to-end encryption feature](https://pulsar.apache.org/docs/3.0.x/security-encryption/).
|
||||
|
||||
#### Enabling publisher encryption from file certs
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: messagebus
|
||||
spec:
|
||||
type: pubsub.pulsar
|
||||
version: v1
|
||||
metadata:
|
||||
- name: host
|
||||
value: "localhost:6650"
|
||||
- name: publicKey
|
||||
value: ./public.key
|
||||
- name: keys
|
||||
value: myapp.key
|
||||
```
|
||||
|
||||
#### Enabling consumer encryption from file certs
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: messagebus
|
||||
spec:
|
||||
type: pubsub.pulsar
|
||||
version: v1
|
||||
metadata:
|
||||
- name: host
|
||||
value: "localhost:6650"
|
||||
- name: publicKey
|
||||
value: ./public.key
|
||||
- name: privateKey
|
||||
value: ./private.key
|
||||
```
|
||||
|
||||
#### Enabling publisher encryption from value
|
||||
|
||||
> Note: It is recommended to [reference the public key from a secret]({{< ref component-secrets.md >}}).
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: messagebus
|
||||
spec:
|
||||
type: pubsub.pulsar
|
||||
version: v1
|
||||
metadata:
|
||||
- name: host
|
||||
value: "localhost:6650"
|
||||
- name: publicKey
|
||||
value: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1KDAM4L8RtJ+nLaXBrBh\nzVpvTemsKVZoAct8A+ShepOHT9lgHOCGLFGWNla6K6j+b3AV/P/fAAhwj82vwTDd\nruXSflvSdmYeFAw3Ypphc1A5oM53wSRWhg63potBNWqdDzj8ApYgqjpmjYSQdL5/\na3golb36GYFrY0MLFTv7wZ87pmMIPsOgGIcPbCHker2fRZ34WXYLb1hkeUpwx4eK\njpwcg35gccvR6o/UhbKAuc60V1J9Wof2sNgtlRaQej45wnpjWYzZrIyk5qUbn0Qi\nCdpIrXvYtANq0Id6gP8zJvUEdPIgNuYxEmVCl9jI+8eGI6peD0qIt8U80hf9axhJ\n3QIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
- name: keys
|
||||
value: myapp.key
|
||||
```
|
||||
|
||||
#### Enabling consumer encryption from value
|
||||
|
||||
> Note: It is recommended to [reference the public and private keys from a secret]({{< ref component-secrets.md >}}).
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: messagebus
|
||||
spec:
|
||||
type: pubsub.pulsar
|
||||
version: v1
|
||||
metadata:
|
||||
- name: host
|
||||
value: "localhost:6650"
|
||||
- name: publicKey
|
||||
value: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1KDAM4L8RtJ+nLaXBrBh\nzVpvTemsKVZoAct8A+ShepOHT9lgHOCGLFGWNla6K6j+b3AV/P/fAAhwj82vwTDd\nruXSflvSdmYeFAw3Ypphc1A5oM53wSRWhg63potBNWqdDzj8ApYgqjpmjYSQdL5/\na3golb36GYFrY0MLFTv7wZ87pmMIPsOgGIcPbCHker2fRZ34WXYLb1hkeUpwx4eK\njpwcg35gccvR6o/UhbKAuc60V1J9Wof2sNgtlRaQej45wnpjWYzZrIyk5qUbn0Qi\nCdpIrXvYtANq0Id6gP8zJvUEdPIgNuYxEmVCl9jI+8eGI6peD0qIt8U80hf9axhJ\n3QIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
- name: privateKey
|
||||
value: "-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEA1KDAM4L8RtJ+nLaXBrBhzVpvTemsKVZoAct8A+ShepOHT9lg\nHOCGLFGWNla6K6j+b3AV/P/fAAhwj82vwTDdruXSflvSdmYeFAw3Ypphc1A5oM53\nwSRWhg63potBNWqdDzj8ApYgqjpmjYSQdL5/a3golb36GYFrY0MLFTv7wZ87pmMI\nPsOgGIcPbCHker2fRZ34WXYLb1hkeUpwx4eKjpwcg35gccvR6o/UhbKAuc60V1J9\nWof2sNgtlRaQej45wnpjWYzZrIyk5qUbn0QiCdpIrXvYtANq0Id6gP8zJvUEdPIg\nNuYxEmVCl9jI+8eGI6peD0qIt8U80hf9axhJ3QIDAQABAoIBAQCKuHnM4ac/eXM7\nQPDVX1vfgyHc3hgBPCtNCHnXfGFRvFBqavKGxIElBvGOcBS0CWQ+Rg1Ca5kMx3TQ\njSweSYhH5A7pe3Sa5FK5V6MGxJvRhMSkQi/lJZUBjzaIBJA9jln7pXzdHx8ekE16\nBMPONr6g2dr4nuI9o67xKrtfViwRDGaG6eh7jIMlEqMMc6WqyhvI67rlVDSTHFKX\njlMcozJ3IT8BtTzKg2Tpy7ReVuJEpehum8yn1ZVdAnotBDJxI07DC1cbOP4M2fHM\ngfgPYWmchauZuTeTFu4hrlY5jg0/WLs6by8r/81+vX3QTNvejX9UdTHMSIfQdX82\nAfkCKUVhAoGBAOvGv+YXeTlPRcYC642x5iOyLQm+BiSX4jKtnyJiTU2s/qvvKkIu\nxAOk3OtniT9NaUAHEZE9tI71dDN6IgTLQlAcPCzkVh6Sc5eG0MObqOO7WOMCWBkI\nlaAKKBbd6cGDJkwGCJKnx0pxC9f8R4dw3fmXWgWAr8ENiekMuvjSfjZ5AoGBAObd\ns2L5uiUPTtpyh8WZ7rEvrun3djBhzi+d7rgxEGdditeiLQGKyZbDPMSMBuus/5wH\nwfi0xUq50RtYDbzQQdC3T/C20oHmZbjWK5mDaLRVzWS89YG/NT2Q8eZLBstKqxkx\ngoT77zoUDfRy+CWs1xvXzgxagD5Yg8/OrCuXOqWFAoGAPIw3r6ELknoXEvihASxU\nS4pwInZYIYGXpygLG8teyrnIVOMAWSqlT8JAsXtPNaBtjPHDwyazfZrvEmEk51JD\nX0tA8M5ah1NYt+r5JaKNxp3P/8wUT6lyszyoeubWJsnFRfSusuq/NRC+1+KDg/aq\nKnSBu7QGbm9JoT2RrmBv5RECgYBRn8Lj1I1muvHTNDkiuRj2VniOSirkUkA2/6y+\nPMKi+SS0tqcY63v4rNCYYTW1L7Yz8V44U5mJoQb4lvpMbolGhPljjxAAU3hVkItb\nvGVRlSCIZHKczADD4rJUDOS7DYxO3P1bjUN4kkyYx+lKUMDBHFzCa2D6Kgt4dobS\n5qYajQKBgQC7u7MFPkkEMqNqNGu5erytQkBq1v1Ipmf9rCi3iIj4XJLopxMgw0fx\n6jwcwNInl72KzoUBLnGQ9PKGVeBcgEgdI+a+tq+1TJo6Ta+hZSx+4AYiKY18eRKG\neNuER9NOcSVJ7Eqkcw4viCGyYDm2vgNV9HJ0VlAo3RDh8x5spEN+mg==\n-----END RSA PRIVATE KEY-----\n"
|
||||
```
|
||||
|
||||
## Create a Pulsar instance
|
||||
|
||||
{{< tabs "Self-Hosted" "Kubernetes">}}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{{- $groups := dict
|
||||
" Generic" $.Site.Data.components.cryptography.generic
|
||||
" Using the Dapr cryptography engine" $.Site.Data.components.cryptography.daprcryptoengine
|
||||
"Microsoft Azure" $.Site.Data.components.cryptography.azure
|
||||
|
||||
}}
|
||||
|
|
|
@ -5,26 +5,28 @@
|
|||
<div class="col-6 col-sm-2 text-xs-center order-sm-2" style="margin-top: 1rem;">
|
||||
{{ with $links }}
|
||||
{{ with index . "user"}}
|
||||
{{ template "footer-links-block" . }}
|
||||
{{ template "footer-links-block" . }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
</div>
|
||||
<div class="col-6 col-sm-2 text-right text-xs-center order-sm-3" style="margin-top: 1rem;">
|
||||
{{ with $links }}
|
||||
{{ with index . "developer"}}
|
||||
{{ template "footer-links-block" . }}
|
||||
{{ template "footer-links-block" . }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
</div>
|
||||
<div class="col-12 col-sm-6 text-center py-2 order-sm-2">
|
||||
{{ with .Site.Params }}<small class="text-white">© {{ now.Year}} {{ .trademark | markdownify }}</small>{{ end }}
|
||||
{{ if not .Site.Params.ui.footer_about_disable }}
|
||||
{{ with .Site.GetPage "about" }}<p class="mt-2"><a href="{{ .RelPermalink }}">{{ .Title }}</a></p>{{ end }}
|
||||
{{ end }}
|
||||
{{ if not .Site.Params.ui.footer_about_disable }}
|
||||
{{ with .Site.GetPage "about" }}<p class="mt-2"><a href="{{ .RelPermalink }}">{{ .Title }}</a></p>{{ end }}
|
||||
{{ end }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<img referrerpolicy="no-referrer-when-downgrade" src="https://static.scarf.sh/a.png?x-pxid=4848fb3b-3edb-4329-90a9-a9d79afff054" />
|
||||
<script> (function (ss, ex) { window.ldfdr = window.ldfdr || function () { (ldfdr._q = ldfdr._q || []).push([].slice.call(arguments)); }; (function (d, s) { fs = d.getElementsByTagName(s)[0]; function ce(src) { var cs = d.createElement(s); cs.src = src; cs.async = 1; fs.parentNode.insertBefore(cs, fs); }; ce('https://sc.lfeeder.com/lftracker_v1_' + ss + (ex ? '_' + ex : '') + '.js'); })(document, 'script'); })('JMvZ8gblPjda2pOd'); </script>
|
||||
<script async src="https://tag.clearbitscripts.com/v1/pk_3f4df076549ad932eda451778a42b09b/tags.js" referrerpolicy="strict-origin-when-cross-origin"></script>
|
||||
</footer>
|
||||
{{ define "footer-links-block" }}
|
||||
<ul class="list-inline mb-0">
|
||||
|
|
|
@ -1 +1 @@
|
|||
{{- if .Get "short" }}1.10{{ else if .Get "long" }}1.10.7{{ else if .Get "cli" }}1.10.0{{ else }}1.10.7{{ end -}}
|
||||
{{- if .Get "short" }}1.11{{ else if .Get "long" }}1.11.0{{ else if .Get "cli" }}1.11.0{{ else }}1.11.0{{ end -}}
|
||||
|
|
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 113 KiB After Width: | Height: | Size: 89 KiB |
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 8.7 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 12 KiB |
After Width: | Height: | Size: 181 KiB |
|
@ -1 +1 @@
|
|||
Subproject commit f42b690f4c67e6bb4209932f660c46a96d0b0457
|
||||
Subproject commit edb09a08b7a2ca63983f5237b307c40cae86d3bb
|
|
@ -1 +1 @@
|
|||
Subproject commit d02f9524d96779350323128f88fefdd6fbd6787a
|
||||
Subproject commit effc2f0d3c92ad76e11958e427c8d3b0900e1932
|
|
@ -1 +1 @@
|
|||
Subproject commit 10d09619db5981ba45ec1268687c7a104cb338c1
|
||||
Subproject commit d1c61cae40e7c5d933d92705198506d947960aaa
|
|
@ -1 +1 @@
|
|||
Subproject commit ae34854397150715fe440dec831461d70e17d07a
|
||||
Subproject commit 1e3b6eb859be175e12808c0ff345f40398f209d6
|
|
@ -1 +1 @@
|
|||
Subproject commit 5190e79f4275ffcdaffe7efb59c21dffddefb5f8
|
||||
Subproject commit 5051a9d5d92003924322a8ddbdf230fb8a872dd7
|