From d3f04bac75fd276bad9df322e42e865626a91f08 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Fri, 8 Dec 2023 15:09:20 -0500 Subject: [PATCH 01/20] set up for javascript Signed-off-by: Hannah Hunter --- .../workflow/howto-author-workflow.md | 56 ++++++++- .../workflow/howto-manage-workflow.md | 21 +++- .../workflow/workflow-features-concepts.md | 58 +++++++++- .../workflow/workflow-overview.md | 17 +-- .../workflow/workflow-patterns.md | 58 +++++++++- .../quickstarts/workflow-quickstart.md | 109 +++++++++++++++++- 6 files changed, 289 insertions(+), 30 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md index 98f0df760..d894c314c 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md @@ -34,7 +34,7 @@ The Dapr sidecar doesn’t load any workflow definitions. Rather, the sidecar si [Workflow activities]({{< ref "workflow-features-concepts.md#workflow-activites" >}}) are the basic unit of work in a workflow and are the tasks that get orchestrated in the business process. -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -52,6 +52,21 @@ def hello_act(ctx: WorkflowActivityContext, input): [See the `hello_act` workflow activity in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL40C1-L43C59) +{{% /codetab %}} + +{{% codetab %}} + + + +Define the workflow activities you'd like your workflow to perform. Activities are a function definition and can take inputs and outputs. The following example creates a counter (activity) called `hello_act` that notifies users of the current counter value. `hello_act` is a function derived from a class called `WorkflowActivityContext`. + +```javascript + +``` + +[See the workflow activity in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL40C1-L43C59) + + {{% /codetab %}} {{% codetab %}} @@ -172,7 +187,7 @@ public class DemoWorkflowActivity implements WorkflowActivity { Next, register and call the activites in a workflow. -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -193,6 +208,21 @@ def hello_world_wf(ctx: DaprWorkflowContext, input): [See the `hello_world_wf` workflow in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL32C1-L38C51) +{{% /codetab %}} + +{{% codetab %}} + + + +The `hello_world_wf` function is derived from a class called `DaprWorkflowContext` with input and output parameter types. It also includes a `yield` statement that does the heavy lifting of the workflow and calls the workflow activities. + +```javascript + +``` + +[See the `hello_world_wf` workflow in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL32C1-L38C51) + + {{% /codetab %}} {{% codetab %}} @@ -275,7 +305,7 @@ public class DemoWorkflowWorker { Finally, compose the application using the workflow. -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -364,6 +394,25 @@ if __name__ == '__main__': ``` +{{% /codetab %}} + +{{% codetab %}} + + + +[In the following example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py), for a basic JavaScript hello world application using the Go SDK, your project code would include: + +- A JavaScript package called `todo` to receive the Go SDK capabilities. +- A builder with extensions called: + - `WorkflowRuntime`: Allows you to register workflows and workflow activities + - `DaprWorkflowContext`: Allows you to [create workflows]({{< ref "#write-the-workflow" >}}) + - `WorkflowActivityContext`: Allows you to [create workflow activities]({{< ref "#write-the-workflow-activities" >}}) +- API calls. In the example below, these calls start, pause, resume, purge, and terminate the workflow. + +```go + +``` + {{% /codetab %}} {{% codetab %}} @@ -504,5 +553,6 @@ Now that you've authored a workflow, learn how to manage it. - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) + - [JavaScript example](todo) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md index fb7ad9d57..0412b2606 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md @@ -12,7 +12,7 @@ Dapr Workflow is currently in beta. [See known limitations for {{% dapr-latest-v Now that you've [authored the workflow and its activities in your application]({{< ref howto-author-workflow.md >}}), you can start, terminate, and get information about the workflow using HTTP API calls. For more information, read the [workflow API reference]({{< ref workflow_api.md >}}). -{{< tabs Python ".NET" Java HTTP >}} +{{< tabs Python JavaScript ".NET" Java HTTP >}} {{% codetab %}} @@ -63,6 +63,24 @@ d.terminate_workflow(instance_id=instanceId, workflow_component=workflowComponen {{% /codetab %}} + +{{% codetab %}} + +Manage your workflow within your code. In the workflow example from the [Author a workflow]({{< ref "howto-author-workflow.md#write-the-application" >}}) guide, the workflow is registered in the code using the following APIs: +- **start_workflow**: Start an instance of a workflow +- **get_workflow**: Get information on the status of the workflow +- **pause_workflow**: Pauses or suspends a workflow instance that can later be resumed +- **resume_workflow**: Resumes a paused workflow instance +- **raise_workflow_event**: Raise an event on a workflow +- **purge_workflow**: Removes all metadata related to a specific workflow instance +- **terminate_workflow**: Terminate or stop a particular instance of a workflow + +```javascript + +``` + +{{% /codetab %}} + {{% codetab %}} @@ -242,6 +260,7 @@ Learn more about these HTTP calls in the [workflow API reference guide]({{< ref - [Try out the Workflow quickstart]({{< ref workflow-quickstart.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py) + - [JavaScript example](todo) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md index ce39d4bac..9a1a58d0a 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md @@ -162,7 +162,15 @@ APIs that generate random numbers, random UUIDs, or the current date are _non-de For example, instead of this: -{{< tabs ".NET" Java >}} +{{< tabs JavaScript ".NET" Java >}} + +{{% codetab %}} + +```javascript +// DON'T DO THIS! +``` + +{{% /codetab %}} {{% codetab %}} @@ -190,7 +198,15 @@ string randomString = GetRandomString(); Do this: -{{< tabs ".NET" Java >}} +{{< tabs JavaScript ".NET" Java >}} + +{{% codetab %}} + +```javascript +// Do this!! +``` + +{{% /codetab %}} {{% codetab %}} @@ -224,7 +240,14 @@ Instead, workflows should interact with external state _indirectly_ using workfl For example, instead of this: -{{< tabs ".NET" Java >}} +{{< tabs JavaScript ".NET" Java >}} + +{{% codetab %}} + +```javascript +// DON'T DO THIS! +``` +{{% /codetab %}} {{% codetab %}} @@ -251,7 +274,15 @@ HttpResponse response = HttpClient.newBuilder().build().send(request, Ht Do this: -{{< tabs ".NET" Java >}} +{{< tabs JavaScript ".NET" Java >}} + +{{% codetab %}} + +```javascript +// Do this!! +``` + +{{% /codetab %}} {{% codetab %}} @@ -285,7 +316,14 @@ Failure to follow this rule could result in undefined behavior. Any background p For example, instead of this: -{{< tabs ".NET" Java >}} +{{< tabs JavaScript ".NET" Java >}} + +{{% codetab %}} + +```javascript +// DON'T DO THIS! +``` +{{% /codetab %}} {{% codetab %}} @@ -312,7 +350,15 @@ ctx.createTimer(Duration.ofSeconds(5)).await(); Do this: -{{< tabs ".NET" Java >}} +{{< tabs JavaScript ".NET" Java >}} + +{{% codetab %}} + +```javascript +// Do this!! +``` + +{{% /codetab %}} {{% codetab %}} diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index f5b6dae8b..e558c554f 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -73,16 +73,6 @@ Learn more about [different types of workflow patterns]({{< ref workflow-pattern The Dapr Workflow _authoring SDKs_ are language-specific SDKs that contain types and functions to implement workflow logic. The workflow logic lives in your application and is orchestrated by the Dapr Workflow engine running in the Dapr sidecar via a gRPC stream. -### Supported SDKs - -You can use the following SDKs to author a workflow. - -| Language stack | Package | -| - | - | -| Python | [dapr-ext-workflow](https://github.com/dapr/python-sdk/tree/master/ext/dapr-ext-workflow) | -| .NET | [Dapr.Workflow](https://www.nuget.org/profiles/dapr.io) | -| Java | [io.dapr.workflows](https://dapr.github.io/java-sdk/io/dapr/workflows/package-summary.html) | - ## Try out workflows ### Quickstarts and tutorials @@ -93,18 +83,16 @@ Want to put workflows to the test? Walk through the following quickstart and tut | ------------------- | ----------- | | [Workflow quickstart]({{< ref workflow-quickstart.md >}}) | Run a workflow application with four workflow activities to see Dapr Workflow in action | | [Workflow Python SDK example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) | Learn how to create a Dapr Workflow and invoke it using the Python `DaprClient` package. | +| [Workflow JavaScript SDK example](todo) | Learn how to create a Dapr Workflow and invoke it using the JavaScript `todo` package. | | [Workflow .NET SDK example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) | Learn how to create a Dapr Workflow and invoke it using ASP.NET Core web APIs. | | [Workflow Java SDK example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) | Learn how to create a Dapr Workflow and invoke it using the Java `io.dapr.workflows` package. | - ### Start using workflows directly in your app Want to skip the quickstarts? Not a problem. You can try out the workflow building block directly in your application. After [Dapr is installed]({{< ref install-dapr-cli.md >}}), you can begin using workflows, starting with [how to author a workflow]({{< ref howto-author-workflow.md >}}). ## Limitations -With Dapr Workflow in beta stage comes the following limitation(s): - - **State stores:** For the {{% dapr-latest-version cli="true" %}} beta release of Dapr Workflow, using the NoSQL databases as a state store results in limitations around storing internal states. For example, CosmosDB has a maximum single operation item limit of only 100 states in a single request. - **Horizontal scaling:** For the {{% dapr-latest-version cli="true" %}} beta release of Dapr Workflow, if you scale out Dapr sidecars or your application pods to more than 2, then the concurrency of the workflow execution drops. It is recommended to test with 1 or 2 instances, and no more than 2. @@ -123,6 +111,7 @@ Watch [this video for an overview on Dapr Workflow](https://youtu.be/s1p9MNl4VGo - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the full SDK examples: - - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) + - [JavaScript example](todo) + - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index c7aebca4e..51356b354 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -25,7 +25,7 @@ While the pattern is simple, there are many complexities hidden in the implement Dapr Workflow solves these complexities by allowing you to implement the task chaining pattern concisely as a simple function in the programming language of your choice, as shown in the following example. -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -72,6 +72,15 @@ def error_handler(ctx, error): {{% /codetab %}} +{{% codetab %}} + + +```javascript + +``` + +{{% /codetab %}} + {{% codetab %}} @@ -186,7 +195,7 @@ In addition to the challenges mentioned in [the previous pattern]({{< ref "workf Dapr Workflows provides a way to express the fan-out/fan-in pattern as a simple function, as shown in the following example: -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -228,6 +237,15 @@ def process_results(ctx, final_result: int): {{% /codetab %}} +{{% codetab %}} + + +```javascript + +``` + +{{% /codetab %}} + {{% codetab %}} @@ -379,7 +397,7 @@ Depending on the business needs, there may be a single monitor or there may be m Dapr Workflow supports this pattern natively by allowing you to implement _eternal workflows_. Rather than writing infinite while-loops ([which is an anti-pattern]({{< ref "workflow-features-concepts.md#infinite-loops-and-eternal-workflows" >}})), Dapr Workflow exposes a _continue-as-new_ API that workflow authors can use to restart a workflow function from the beginning with a new input. -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -428,6 +446,17 @@ def send_alert(ctx, message: str): {{% /codetab %}} +{{% codetab %}} + + +```javascript + +``` + +> This example assumes you have a predefined `MyEntityState` class with a boolean `IsHealthy` property. + +{{% /codetab %}} + {{% codetab %}} @@ -540,7 +569,7 @@ The following diagram illustrates this flow. The following example code shows how this pattern can be implemented using Dapr Workflow. -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -601,6 +630,15 @@ def place_order(_, order: Order) -> None: {{% /codetab %}} +{{% codetab %}} + + +```javascript + +``` + +{{% /codetab %}} + {{% codetab %}} @@ -686,7 +724,7 @@ public class ExternalSystemInteractionWorkflow extends Workflow { The code that delivers the event to resume the workflow execution is external to the workflow. Workflow events can be delivered to a waiting workflow instance using the [raise event]({{< ref "howto-manage-workflow.md#raise-an-event" >}}) workflow management API, as shown in the following example: -{{< tabs Python ".NET" Java >}} +{{< tabs Python JavaScript ".NET" Java >}} {{% codetab %}} @@ -705,6 +743,15 @@ with DaprClient() as d: {{% /codetab %}} +{{% codetab %}} + + +```javascript +// Raise the workflow event to the waiting workflow +``` + +{{% /codetab %}} + {{% codetab %}} @@ -744,5 +791,6 @@ External events don't have to be directly triggered by humans. They can also be - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) + - [JavaScript](todo) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) \ No newline at end of file diff --git a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md index 0a1f2e779..8af699655 100644 --- a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md +++ b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md @@ -21,7 +21,7 @@ In this guide, you'll: -{{< tabs "Python" ".NET" "Java" >}} +{{< tabs "Python" "JavaScript" ".NET" "Java" >}} {{% codetab %}} @@ -265,6 +265,113 @@ In `workflow.py`, the workflow is defined as a class with all of its associated message=f'Order {order_id} has completed!')) return OrderResult(processed=True) ``` +{{% /codetab %}} + + +{{% codetab %}} + +The `order-processor` console app starts and manages the lifecycle of an order processing workflow that stores and retrieves data in a state store. The workflow consists of four workflow activities, or tasks: +- `NotifyActivity`: Utilizes a logger to print out messages throughout the workflow +- `ReserveInventoryActivity`: Checks the state store to ensure that there is enough inventory for the purchase +- `ProcessPaymentActivity`: Processes and authorizes the payment +- `UpdateInventoryActivity`: Removes the requested items from the state store and updates the store with the new remaining inventory value + + +### Step 1: Pre-requisites + +For this example, you will need: + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- [Latest Node.js installed](https://nodejs.org/download/). + +- [Docker Desktop](https://www.docker.com/products/docker-desktop) + + +### Step 2: Set up the environment + +Clone the [sample provided in the Quickstarts repo](https://github.com/dapr/quickstarts/tree/master/workflows). + +```bash +git clone https://github.com/dapr/quickstarts.git +``` + +In a new terminal window, navigate to the `order-processor` directory: + +```bash +cd workflows/javascript/sdk/order-processor +``` + +### Step 3: Run the order processor app + +In the terminal, start the order processor app alongside a Dapr sidecar: + +```bash +dapr run +``` + +This starts the `order-processor` app with unique workflow ID and runs the workflow activities. + +Expected output: + +``` + +``` + +### (Optional) Step 4: View in Zipkin + +Running `dapr init` launches the [openzipkin/zipkin](https://hub.docker.com/r/openzipkin/zipkin/) Docker container. If the container has stopped running, launch the Zipkin Docker container with the following command: + +``` +docker run -d -p 9411:9411 openzipkin/zipkin +``` + +View the workflow trace spans in the Zipkin web UI (typically at `http://localhost:9411/zipkin/`). + + + +### What happened? + +When you ran `dapr run `: + +1. A unique order ID for the workflow is generated (in the above example, `6d2abcc9`) and the workflow is scheduled. +1. The `NotifyActivity` workflow activity sends a notification saying an order for 10 cars has been received. +1. The `ReserveInventoryActivity` workflow activity checks the inventory data, determines if you can supply the ordered item, and responds with the number of cars in stock. +1. Your workflow starts and notifies you of its status. +1. The `ProcessPaymentActivity` workflow activity begins processing payment for order `6d2abcc9` and confirms if successful. +1. The `UpdateInventoryActivity` workflow activity updates the inventory with the current available cars after the order has been processed. +1. The `NotifyActivity` workflow activity sends a notification saying that order `6d2abcc9` has completed. +1. The workflow terminates as completed. + +#### `order-processor/index.js` + +In the application's program file: +- The unique workflow order ID is generated +- The workflow is scheduled +- The workflow status is retrieved +- The workflow and the workflow activities it invokes are registered + +```javascript + +``` + +#### `order-processor/Workflows/OrderProcessingWorkflow.js` + +In `OrderProcessingWorkflow.js`, the workflow is defined as a class with all of its associated tasks (determined by workflow activities). + +```javascript + +``` + +#### `order-processor/Activities` directory + +The `Activities` directory holds the four workflow activities used by the workflow, defined in the following files: + +## Watch the demo + +Watch [this video to walk through the Dapr Workflow .NET demo](https://youtu.be/BxiKpEmchgQ?t=2564): + + + {{% /codetab %}} From 5d3f8688cff12b8422f7a22ed681aff2c8462c9c Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Wed, 3 Jan 2024 09:59:49 -0500 Subject: [PATCH 02/20] cross link Signed-off-by: Hannah Hunter --- .../building-blocks/workflow/workflow-architecture.md | 1 + .../building-blocks/workflow/workflow-features-concepts.md | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md index 18ec9110b..c2398d772 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md @@ -195,5 +195,6 @@ See the [Reminder usage and execution guarantees section]({{< ref "workflow-arch - [Try out the Workflow quickstart]({{< ref workflow-quickstart.md >}}) - Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) + - [JavaScript example](todo) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) \ No newline at end of file diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md index 9a1a58d0a..3dfb0affb 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md @@ -409,4 +409,8 @@ To work around these constraints: - [Try out Dapr Workflow using the quickstart]({{< ref workflow-quickstart.md >}}) - [Workflow overview]({{< ref workflow-overview.md >}}) - [Workflow API reference]({{< ref workflow_api.md >}}) -- [Try out the .NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) +- Try out the following examples: + - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) + - [JavaScript example](todo) + - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) + - [Java](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) \ No newline at end of file From 3c9c255f88e1b80f3489befaa487f870c823f5ab Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Wed, 3 Jan 2024 12:04:59 -0500 Subject: [PATCH 03/20] add early js code examples Signed-off-by: Hannah Hunter --- .../workflow/howto-author-workflow.md | 224 +++++++++++++++- .../workflow/workflow-patterns.md | 242 ++++++++++++++++++ 2 files changed, 460 insertions(+), 6 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md index d894c314c..688ffecb9 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md @@ -58,13 +58,29 @@ def hello_act(ctx: WorkflowActivityContext, input): -Define the workflow activities you'd like your workflow to perform. Activities are a function definition and can take inputs and outputs. The following example creates a counter (activity) called `hello_act` that notifies users of the current counter value. `hello_act` is a function derived from a class called `WorkflowActivityContext`. +Define the workflow activities you'd like your workflow to perform. Activities are wrapped in the `WorkflowActivityContext` class, which implements the workflow activities. ```javascript +export default class WorkflowActivityContext { + private readonly _innerContext: ActivityContext; + constructor(innerContext: ActivityContext) { + if (!innerContext) { + throw new Error("ActivityContext cannot be undefined"); + } + this._innerContext = innerContext; + } + public getWorkflowInstanceId(): string { + return this._innerContext.orchestrationId; + } + + public getWorkflowActivityId(): number { + return this._innerContext.taskId; + } +} ``` -[See the workflow activity in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL40C1-L43C59) +[See the workflow activity in context.](todo) {{% /codetab %}} @@ -214,13 +230,43 @@ def hello_world_wf(ctx: DaprWorkflowContext, input): -The `hello_world_wf` function is derived from a class called `DaprWorkflowContext` with input and output parameter types. It also includes a `yield` statement that does the heavy lifting of the workflow and calls the workflow activities. +Next, register the workflow with the `WorkflowRuntime` class and start the workflow runtime. ```javascript +export default class WorkflowRuntime { + //.. + // Register workflow + public registerWorkflow(workflow: TWorkflow): WorkflowRuntime { + const name = getFunctionName(workflow); + const workflowWrapper = (ctx: OrchestrationContext, input: any): any => { + const workflowContext = new WorkflowContext(ctx); + return workflow(workflowContext, input); + }; + this.worker.addNamedOrchestrator(name, workflowWrapper); + return this; + } + + // Register workflow activities + public registerActivity(fn: TWorkflowActivity): WorkflowRuntime { + const name = getFunctionName(fn); + const activityWrapper = (ctx: ActivityContext, intput: TInput): TOutput => { + const wfActivityContext = new WorkflowActivityContext(ctx); + return fn(wfActivityContext, intput); + }; + this.worker.addNamedActivity(name, activityWrapper); + return this; + } + + // Start the workflow runtime processing items and block. + public async start() { + await this.worker.start(); + } + +} ``` -[See the `hello_world_wf` workflow in context.](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py#LL32C1-L38C51) +[See the `hello_world_wf` workflow in context.](todo) {{% /codetab %}} @@ -400,7 +446,7 @@ if __name__ == '__main__': -[In the following example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py), for a basic JavaScript hello world application using the Go SDK, your project code would include: +[In the following example](todo), for a basic JavaScript hello world application using the Go SDK, your project code would include: - A JavaScript package called `todo` to receive the Go SDK capabilities. - A builder with extensions called: @@ -409,8 +455,174 @@ if __name__ == '__main__': - `WorkflowActivityContext`: Allows you to [create workflow activities]({{< ref "#write-the-workflow-activities" >}}) - API calls. In the example below, these calls start, pause, resume, purge, and terminate the workflow. -```go +```javascript +import { TaskHubGrpcClient } from "kaibocai-durabletask-js"; +import * as grpc from "@grpc/grpc-js"; +import { WorkflowState } from "./WorkflowState"; +import { generateInterceptors } from "../internal/ApiTokenClientInterceptor"; +import { TWorkflow } from "../types/Workflow.type"; +import { getFunctionName } from "../internal"; +export default class WorkflowClient { + private readonly _innerClient: TaskHubGrpcClient; + + /** + * Initializes a new instance of the DaprWorkflowClient. + * @param {string | undefined} hostAddress - The address of the Dapr runtime hosting the workflow services. + * @param {grpc.ChannelOptions | undefined} options - Additional options for configuring the gRPC channel. + */ + constructor(hostAddress?: string, options?: grpc.ChannelOptions) { + this._innerClient = this._buildInnerClient(hostAddress, options); + } + + _buildInnerClient(hostAddress = "127.0.0.1:50001", options: grpc.ChannelOptions = {}): TaskHubGrpcClient { + const innerOptions = { + ...options, + interceptors: [generateInterceptors(), ...(options?.interceptors ?? [])], + }; + return new TaskHubGrpcClient(hostAddress, innerOptions); + } + + /** + * Schedules a new workflow using the DurableTask client. + * + * @param {TWorkflow | string} workflow - The Workflow or the name of the workflow to be scheduled. + * @return {Promise} A Promise resolving to the unique ID of the scheduled workflow instance. + */ + public async scheduleNewWorkflow( + workflow: TWorkflow | string, + input?: any, + instanceId?: string, + startAt?: Date, + ): Promise { + if (typeof workflow === "string") { + return await this._innerClient.scheduleNewOrchestration(workflow, input, instanceId, startAt); + } + return await this._innerClient.scheduleNewOrchestration(getFunctionName(workflow), input, instanceId, startAt); + } + + /** + * Terminates the workflow associated with the provided instance id. + * + * @param {string} workflowInstanceId - Workflow instance id to terminate. + * @param {any} output - The optional output to set for the terminated workflow instance. + */ + public async terminateWorkflow(workflowInstanceId: string, output: any) { + await this._innerClient.terminateOrchestration(workflowInstanceId, output); + } + + /** + * Fetches workflow instance metadata from the configured durable store. + * + * @param {string} workflowInstanceId - The unique identifier of the workflow instance to fetch. + * @param {boolean} getInputsAndOutputs - Indicates whether to fetch the workflow instance's + * inputs, outputs, and custom status (true) or omit them (false). + * @returns {Promise} A Promise that resolves to a metadata record describing + * the workflow instance and its execution status, or undefined + * if the instance is not found. + */ + public async getWorkflowState( + workflowInstanceId: string, + getInputsAndOutputs: boolean, + ): Promise { + const state = await this._innerClient.getOrchestrationState(workflowInstanceId, getInputsAndOutputs); + if (state !== undefined) { + return new WorkflowState(state); + } + } + + /** + * Waits for a workflow to start running and returns a {@link WorkflowState} object + * containing metadata about the started instance, and optionally, its input, output, + * and custom status payloads. + * + * A "started" workflow instance refers to any instance not in the Pending state. + * + * If a workflow instance is already running when this method is called, it returns immediately. + * + * @param {string} workflowInstanceId - The unique identifier of the workflow instance to wait for. + * @param {boolean} fetchPayloads - Indicates whether to fetch the workflow instance's + * inputs, outputs (true) or omit them (false). + * @param {number} timeout - The amount of time, in seconds, to wait for the workflow instance to start. + * @returns {Promise} A Promise that resolves to the workflow instance metadata + * or undefined if no such instance is found. + */ + public async waitForWorkflowStart( + workflowInstanceId: string, + fetchPayloads?: boolean, + timeout?: number, + ): Promise { + const state = await this._innerClient.waitForOrchestrationStart(workflowInstanceId, fetchPayloads, timeout); + if (state !== undefined) { + return new WorkflowState(state); + } + } + + /** + * Waits for a workflow to complete running and returns a {@link WorkflowState} object + * containing metadata about the completed instance, and optionally, its input, output, + * and custom status payloads. + * + * A "completed" workflow instance refers to any instance in one of the terminal states. + * For example, the Completed, Failed, or Terminated states. + * + * If a workflow instance is already running when this method is called, it returns immediately. + * + * @param {string} workflowInstanceId - The unique identifier of the workflow instance to wait for. + * @param {boolean} fetchPayloads - Indicates whether to fetch the workflow instance's + * inputs, outputs (true) or omit them (false). + * @param {number} timeout - The amount of time, in seconds, to wait for the workflow instance to start. + * @returns {Promise} A Promise that resolves to the workflow instance metadata + * or undefined if no such instance is found. + */ + public async waitForWorkflowCompletion( + workflowInstanceId: string, + fetchPayloads = true, + timeout: number, + ): Promise { + const state = await this._innerClient.waitForOrchestrationCompletion(workflowInstanceId, fetchPayloads, timeout); + if (state != undefined) { + return new WorkflowState(state); + } + } + + /** + * Sends an event notification message to an awaiting workflow instance. + * + * This method triggers the specified event in a running workflow instance, + * allowing the workflow to respond to the event if it has defined event handlers. + * + * @param {string} workflowInstanceId - The unique identifier of the workflow instance that will handle the event. + * @param {string} eventName - The name of the event. Event names are case-insensitive. + * @param {any} [eventPayload] - An optional serializable data payload to include with the event. + */ + public async raiseEvent(workflowInstanceId: string, eventName: string, eventPayload?: any) { + this._innerClient.raiseOrchestrationEvent(workflowInstanceId, eventName, eventPayload); + } + + /** + * Purges the workflow instance state from the workflow state store. + * + * This method removes the persisted state associated with a workflow instance from the state store. + * + * @param {string} workflowInstanceId - The unique identifier of the workflow instance to purge. + * @return {Promise} A Promise that resolves to true if the workflow state was found and purged successfully, otherwise false. + */ + public async purgeWorkflow(workflowInstanceId: string): Promise { + const purgeResult = await this._innerClient.purgeOrchestration(workflowInstanceId); + if (purgeResult !== undefined) { + return purgeResult.deletedInstanceCount > 0; + } + return false; + } + + /** + * Closes the inner DurableTask client and shutdown the GRPC channel. + */ + public async stop() { + await this._innerClient.stop(); + } +} ``` {{% /codetab %}} diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index 51356b354..74dd1aa48 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -76,7 +76,60 @@ def error_handler(ctx, error): ```javascript +import WorkflowClient from "../client/WorkflowClient"; +import WorkflowActivityContext from "../runtime/WorkflowActivityContext"; +import WorkflowContext from "../runtime/WorkflowContext"; +import WorkflowRuntime from "../runtime/WorkflowRuntime"; +import { TWorkflow } from "../types/Workflow.type"; +(async () => { + const grpcEndpoint = "localhost:4001"; + const workflowClient = new WorkflowClient(grpcEndpoint); + const workflowRuntime = new WorkflowRuntime(grpcEndpoint); + + const hello = async (_: WorkflowActivityContext, name: string) => { + return `Hello ${name}!`; + }; + + const sequence: TWorkflow = async function* (ctx: WorkflowContext): any { + const cities: string[] = []; + + const result1 = yield ctx.callActivity(hello, "Tokyo"); + cities.push(result1); + const result2 = yield ctx.callActivity(hello, "Seattle"); // Correct the spelling of "Seattle" + cities.push(result2); + const result3 = yield ctx.callActivity(hello, "London"); + cities.push(result3); + + return cities; + }; + + workflowRuntime.registerWorkflow(sequence).registerActivity(hello); + + // Wrap the worker startup in a try-catch block to handle any errors during startup + try { + await workflowRuntime.start(); + console.log("Workflow runtime started successfully"); + } catch (error) { + console.error("Error starting workflow runtime:", error); + } + + // Schedule a new orchestration + try { + const id = await workflowClient.scheduleNewWorkflow(sequence); + console.log(`Orchestration scheduled with ID: ${id}`); + + // Wait for orchestration completion + const state = await workflowClient.waitForWorkflowCompletion(id, undefined, 30); + + console.log(`Orchestration completed! Result: ${state?.serializedOutput}`); + } catch (error) { + console.error("Error scheduling or waiting for orchestration:", error); + } + + await workflowRuntime.stop(); + await workflowClient.stop(); +})(); ``` {{% /codetab %}} @@ -241,7 +294,87 @@ def process_results(ctx, final_result: int): ```javascript +import { Task } from "kaibocai-durabletask-js/task/task"; +import WorkflowClient from "../client/WorkflowClient"; +import WorkflowActivityContext from "../runtime/WorkflowActivityContext"; +import WorkflowContext from "../runtime/WorkflowContext"; +import WorkflowRuntime from "../runtime/WorkflowRuntime"; +import { TWorkflow } from "../types/Workflow.type"; +// Wrap the entire code in an immediately-invoked async function +(async () => { + // Update the gRPC client and worker to use a local address and port + const grpcServerAddress = "localhost:4001"; + const workflowClient: WorkflowClient = new WorkflowClient(grpcServerAddress); + const workflowRuntime: WorkflowRuntime = new WorkflowRuntime(grpcServerAddress); + + function getRandomInt(min: number, max: number): number { + return Math.floor(Math.random() * (max - min + 1)) + min; + } + + async function getWorkItemsActivity(_: WorkflowActivityContext): Promise { + const count: number = getRandomInt(2, 10); + console.log(`generating ${count} work items...`); + + const workItems: string[] = Array.from({ length: count }, (_, i) => `work item ${i}`); + return workItems; + } + + function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); + } + + async function processWorkItemActivity(context: WorkflowActivityContext, item: string): Promise { + console.log(`processing work item: ${item}`); + + // Simulate some work that takes a variable amount of time + const sleepTime = Math.random() * 5000; + await sleep(sleepTime); + + // Return a result for the given work item, which is also a random number in this case + return Math.floor(Math.random() * 11); + } + + const workflow: TWorkflow = async function* (ctx: WorkflowContext): any { + const tasks: Task[] = []; + const workItems = yield ctx.callActivity(getWorkItemsActivity); + for (const workItem of workItems) { + tasks.push(ctx.callActivity(processWorkItemActivity, workItem)); + } + const results: number[] = yield ctx.whenAll(tasks); + const sum: number = results.reduce((accumulator, currentValue) => accumulator + currentValue, 0); + return sum; + }; + + workflowRuntime.registerWorkflow(workflow); + workflowRuntime.registerActivity(getWorkItemsActivity); + workflowRuntime.registerActivity(processWorkItemActivity); + + // Wrap the worker startup in a try-catch block to handle any errors during startup + try { + await workflowRuntime.start(); + console.log("Worker started successfully"); + } catch (error) { + console.error("Error starting worker:", error); + } + + // Schedule a new orchestration + try { + const id = await workflowClient.scheduleNewWorkflow(workflow); + console.log(`Orchestration scheduled with ID: ${id}`); + + // Wait for orchestration completion + const state = await workflowClient.waitForWorkflowCompletion(id, undefined, 30); + + console.log(`Orchestration completed! Result: ${state?.serializedOutput}`); + } catch (error) { + console.error("Error scheduling or waiting for orchestration:", error); + } + + // stop worker and client + await workflowRuntime.stop(); + await workflowClient.stop(); +})(); ``` {{% /codetab %}} @@ -634,7 +767,116 @@ def place_order(_, order: Order) -> None: ```javascript +import { Task } from "kaibocai-durabletask-js/task/task"; +import WorkflowClient from "../client/WorkflowClient"; +import WorkflowActivityContext from "../runtime/WorkflowActivityContext"; +import WorkflowContext from "../runtime/WorkflowContext"; +import WorkflowRuntime from "../runtime/WorkflowRuntime"; +import { TWorkflow } from "../types/Workflow.type"; +import * as readlineSync from "readline-sync"; +// Wrap the entire code in an immediately-invoked async function +(async () => { + class Order { + cost: number; + product: string; + quantity: number; + constructor(cost: number, product: string, quantity: number) { + this.cost = cost; + this.product = product; + this.quantity = quantity; + } + } + + function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); + } + + // Update the gRPC client and worker to use a local address and port + const grpcServerAddress = "localhost:4001"; + let workflowClient: WorkflowClient = new WorkflowClient(grpcServerAddress); + let workflowRuntime: WorkflowRuntime = new WorkflowRuntime(grpcServerAddress); + + //Activity function that sends an approval request to the manager + const sendApprovalRequest = async (_: WorkflowActivityContext, order: Order) => { + // Simulate some work that takes an amount of time + await sleep(3000); + console.log(`Sending approval request for order: ${order.product}`); + }; + + // Activity function that places an order + const placeOrder = async (_: WorkflowActivityContext, order: Order) => { + console.log(`Placing order: ${order.product}`); + }; + + // Orchestrator function that represents a purchase order workflow + const purchaseOrderWorkflow: TWorkflow = async function* (ctx: WorkflowContext, order: Order): any { + // Orders under $1000 are auto-approved + if (order.cost < 1000) { + return "Auto-approved"; + } + + // Orders of $1000 or more require manager approval + yield ctx.callActivity(sendApprovalRequest, order); + + // Approvals must be received within 24 hours or they will be cancled. + const tasks: Task[] = []; + const approvalEvent = ctx.waitForExternalEvent("approval_received"); + const timeoutEvent = ctx.createTimer(24 * 60 * 60); + tasks.push(approvalEvent); + tasks.push(timeoutEvent); + const winner = ctx.whenAny(tasks); + + if (winner == timeoutEvent) { + return "Cancelled"; + } + + yield ctx.callActivity(placeOrder, order); + const approvalDetails = approvalEvent.getResult(); + return `Approved by ${approvalDetails.approver}`; + }; + + workflowRuntime + .registerWorkflow(purchaseOrderWorkflow) + .registerActivity(sendApprovalRequest) + .registerActivity(placeOrder); + + // Wrap the worker startup in a try-catch block to handle any errors during startup + try { + await workflowRuntime.start(); + console.log("Worker started successfully"); + } catch (error) { + console.error("Error starting worker:", error); + } + + // Schedule a new orchestration + try { + const cost = readlineSync.questionInt("Cost of your order:"); + const approver = readlineSync.question("Approver of your order:"); + const timeout = readlineSync.questionInt("Timeout for your order in seconds:"); + const order = new Order(cost, "MyProduct", 1); + const id = await workflowClient.scheduleNewWorkflow(purchaseOrderWorkflow, order); + console.log(`Orchestration scheduled with ID: ${id}`); + + if (readlineSync.keyInYN("Press [Y] to approve the order... Y/yes, N/no")) { + const approvalEvent = { approver: approver }; + await workflowClient.raiseEvent(id, "approval_received", approvalEvent); + } else { + return "Order rejected"; + } + + // Wait for orchestration completion + const state = await workflowClient.waitForWorkflowCompletion(id, undefined, timeout + 2); + + console.log(`Orchestration completed! Result: ${state?.serializedOutput}`); + } catch (error) { + console.error("Error scheduling or waiting for orchestration:", error); + } + + // stop worker and client + await workflowRuntime.stop(); + await workflowClient.stop(); +})(); ``` {{% /codetab %}} From 11ab77a82e62a2a308149be720cfbcae3188f8ab Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Thu, 25 Jan 2024 13:58:16 -0500 Subject: [PATCH 04/20] update howtos and patterns Signed-off-by: Hannah Hunter --- .../workflow/howto-author-workflow.md | 128 ++++++----------- .../workflow/howto-manage-workflow.md | 69 +++++++-- .../workflow/workflow-patterns.md | 133 ++++++++++++------ 3 files changed, 199 insertions(+), 131 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md index 688ffecb9..00330236b 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md @@ -80,7 +80,7 @@ export default class WorkflowActivityContext { } ``` -[See the workflow activity in context.](todo) +[See the workflow activity in context.](https://github.com/dapr/js-sdk/blob/main/src/workflow/runtime/WorkflowActivityContext.ts) {{% /codetab %}} @@ -236,7 +236,7 @@ Next, register the workflow with the `WorkflowRuntime` class and start the workf export default class WorkflowRuntime { //.. - // Register workflow + // Register workflow implementation for handling orchestrations public registerWorkflow(workflow: TWorkflow): WorkflowRuntime { const name = getFunctionName(workflow); const workflowWrapper = (ctx: OrchestrationContext, input: any): any => { @@ -266,7 +266,7 @@ export default class WorkflowRuntime { } ``` -[See the `hello_world_wf` workflow in context.](todo) +[See the `WorkflowRuntime` in context.](https://github.com/dapr/js-sdk/blob/main/src/workflow/runtime/WorkflowRuntime.ts) {{% /codetab %}} @@ -446,48 +446,48 @@ if __name__ == '__main__': -[In the following example](todo), for a basic JavaScript hello world application using the Go SDK, your project code would include: +[The following example](https://github.com/dapr/js-sdk/blob/main/src/workflow/client/DaprWorkflowClient.ts) is a basic JavaScript application using the JavaScript SDK. As in this example, your project code would include: -- A JavaScript package called `todo` to receive the Go SDK capabilities. - A builder with extensions called: - `WorkflowRuntime`: Allows you to register workflows and workflow activities - `DaprWorkflowContext`: Allows you to [create workflows]({{< ref "#write-the-workflow" >}}) - `WorkflowActivityContext`: Allows you to [create workflow activities]({{< ref "#write-the-workflow-activities" >}}) -- API calls. In the example below, these calls start, pause, resume, purge, and terminate the workflow. +- API calls. In the example below, these calls start, terminate, get status, pause, resume, raise event, and purge the workflow. ```javascript -import { TaskHubGrpcClient } from "kaibocai-durabletask-js"; -import * as grpc from "@grpc/grpc-js"; +import { TaskHubGrpcClient } from "@microsoft/durabletask-js"; import { WorkflowState } from "./WorkflowState"; -import { generateInterceptors } from "../internal/ApiTokenClientInterceptor"; -import { TWorkflow } from "../types/Workflow.type"; +import { generateApiTokenClientInterceptors, generateEndpoint, getDaprApiToken } from "../internal/index"; +import { TWorkflow } from "../../types/workflow/Workflow.type"; import { getFunctionName } from "../internal"; +import { WorkflowClientOptions } from "../../types/workflow/WorkflowClientOption"; -export default class WorkflowClient { +/** DaprWorkflowClient class defines client operations for managing workflow instances. */ + +export default class DaprWorkflowClient { private readonly _innerClient: TaskHubGrpcClient; - /** - * Initializes a new instance of the DaprWorkflowClient. - * @param {string | undefined} hostAddress - The address of the Dapr runtime hosting the workflow services. - * @param {grpc.ChannelOptions | undefined} options - Additional options for configuring the gRPC channel. + /** Initialize a new instance of the DaprWorkflowClient. */ - constructor(hostAddress?: string, options?: grpc.ChannelOptions) { - this._innerClient = this._buildInnerClient(hostAddress, options); + constructor(options: Partial = {}) { + const grpcEndpoint = generateEndpoint(options); + options.daprApiToken = getDaprApiToken(options); + this._innerClient = this.buildInnerClient(grpcEndpoint.endpoint, options); } - _buildInnerClient(hostAddress = "127.0.0.1:50001", options: grpc.ChannelOptions = {}): TaskHubGrpcClient { - const innerOptions = { - ...options, - interceptors: [generateInterceptors(), ...(options?.interceptors ?? [])], - }; + private buildInnerClient(hostAddress: string, options: Partial): TaskHubGrpcClient { + let innerOptions = options?.grpcOptions; + if (options.daprApiToken !== undefined && options.daprApiToken !== "") { + innerOptions = { + ...innerOptions, + interceptors: [generateApiTokenClientInterceptors(options), ...(innerOptions?.interceptors ?? [])], + }; + } return new TaskHubGrpcClient(hostAddress, innerOptions); } /** - * Schedules a new workflow using the DurableTask client. - * - * @param {TWorkflow | string} workflow - The Workflow or the name of the workflow to be scheduled. - * @return {Promise} A Promise resolving to the unique ID of the scheduled workflow instance. + * Schedule a new workflow using the DurableTask client. */ public async scheduleNewWorkflow( workflow: TWorkflow | string, @@ -502,7 +502,7 @@ export default class WorkflowClient { } /** - * Terminates the workflow associated with the provided instance id. + * Terminate the workflow associated with the provided instance id. * * @param {string} workflowInstanceId - Workflow instance id to terminate. * @param {any} output - The optional output to set for the terminated workflow instance. @@ -512,14 +512,7 @@ export default class WorkflowClient { } /** - * Fetches workflow instance metadata from the configured durable store. - * - * @param {string} workflowInstanceId - The unique identifier of the workflow instance to fetch. - * @param {boolean} getInputsAndOutputs - Indicates whether to fetch the workflow instance's - * inputs, outputs, and custom status (true) or omit them (false). - * @returns {Promise} A Promise that resolves to a metadata record describing - * the workflow instance and its execution status, or undefined - * if the instance is not found. + * Fetch workflow instance metadata from the configured durable store. */ public async getWorkflowState( workflowInstanceId: string, @@ -532,69 +525,43 @@ export default class WorkflowClient { } /** - * Waits for a workflow to start running and returns a {@link WorkflowState} object - * containing metadata about the started instance, and optionally, its input, output, - * and custom status payloads. - * - * A "started" workflow instance refers to any instance not in the Pending state. - * - * If a workflow instance is already running when this method is called, it returns immediately. - * - * @param {string} workflowInstanceId - The unique identifier of the workflow instance to wait for. - * @param {boolean} fetchPayloads - Indicates whether to fetch the workflow instance's - * inputs, outputs (true) or omit them (false). - * @param {number} timeout - The amount of time, in seconds, to wait for the workflow instance to start. - * @returns {Promise} A Promise that resolves to the workflow instance metadata - * or undefined if no such instance is found. + * Waits for a workflow to start running */ public async waitForWorkflowStart( workflowInstanceId: string, - fetchPayloads?: boolean, - timeout?: number, + fetchPayloads = true, + timeoutInSeconds = 60, ): Promise { - const state = await this._innerClient.waitForOrchestrationStart(workflowInstanceId, fetchPayloads, timeout); + const state = await this._innerClient.waitForOrchestrationStart( + workflowInstanceId, + fetchPayloads, + timeoutInSeconds, + ); if (state !== undefined) { return new WorkflowState(state); } } /** - * Waits for a workflow to complete running and returns a {@link WorkflowState} object - * containing metadata about the completed instance, and optionally, its input, output, - * and custom status payloads. - * - * A "completed" workflow instance refers to any instance in one of the terminal states. - * For example, the Completed, Failed, or Terminated states. - * - * If a workflow instance is already running when this method is called, it returns immediately. - * - * @param {string} workflowInstanceId - The unique identifier of the workflow instance to wait for. - * @param {boolean} fetchPayloads - Indicates whether to fetch the workflow instance's - * inputs, outputs (true) or omit them (false). - * @param {number} timeout - The amount of time, in seconds, to wait for the workflow instance to start. - * @returns {Promise} A Promise that resolves to the workflow instance metadata - * or undefined if no such instance is found. + * Waits for a workflow to complete running */ public async waitForWorkflowCompletion( workflowInstanceId: string, fetchPayloads = true, - timeout: number, + timeoutInSeconds = 60, ): Promise { - const state = await this._innerClient.waitForOrchestrationCompletion(workflowInstanceId, fetchPayloads, timeout); + const state = await this._innerClient.waitForOrchestrationCompletion( + workflowInstanceId, + fetchPayloads, + timeoutInSeconds, + ); if (state != undefined) { return new WorkflowState(state); } } /** - * Sends an event notification message to an awaiting workflow instance. - * - * This method triggers the specified event in a running workflow instance, - * allowing the workflow to respond to the event if it has defined event handlers. - * - * @param {string} workflowInstanceId - The unique identifier of the workflow instance that will handle the event. - * @param {string} eventName - The name of the event. Event names are case-insensitive. - * @param {any} [eventPayload] - An optional serializable data payload to include with the event. + * Sends an event notification message to an awaiting workflow instance */ public async raiseEvent(workflowInstanceId: string, eventName: string, eventPayload?: any) { this._innerClient.raiseOrchestrationEvent(workflowInstanceId, eventName, eventPayload); @@ -602,11 +569,6 @@ export default class WorkflowClient { /** * Purges the workflow instance state from the workflow state store. - * - * This method removes the persisted state associated with a workflow instance from the state store. - * - * @param {string} workflowInstanceId - The unique identifier of the workflow instance to purge. - * @return {Promise} A Promise that resolves to true if the workflow state was found and purged successfully, otherwise false. */ public async purgeWorkflow(workflowInstanceId: string): Promise { const purgeResult = await this._innerClient.purgeOrchestration(workflowInstanceId); @@ -765,6 +727,6 @@ Now that you've authored a workflow, learn how to manage it. - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - - [JavaScript example](todo) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/src/workflow) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md index 0412b2606..148cc258d 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md @@ -67,16 +67,69 @@ d.terminate_workflow(instance_id=instanceId, workflow_component=workflowComponen {{% codetab %}} Manage your workflow within your code. In the workflow example from the [Author a workflow]({{< ref "howto-author-workflow.md#write-the-application" >}}) guide, the workflow is registered in the code using the following APIs: -- **start_workflow**: Start an instance of a workflow -- **get_workflow**: Get information on the status of the workflow -- **pause_workflow**: Pauses or suspends a workflow instance that can later be resumed -- **resume_workflow**: Resumes a paused workflow instance -- **raise_workflow_event**: Raise an event on a workflow -- **purge_workflow**: Removes all metadata related to a specific workflow instance -- **terminate_workflow**: Terminate or stop a particular instance of a workflow +- **client.workflow.start**: Start an instance of a workflow +- **client.workflow.get**: Get information on the status of the workflow +- **client.workflow.pause**: Pauses or suspends a workflow instance that can later be resumed +- **client.workflow.resume**: Resumes a paused workflow instance +- **client.workflow.purge**: Removes all metadata related to a specific workflow instance +- **client.workflow.terminate**: Terminate or stop a particular instance of a workflow ```javascript +import { DaprClient } from "@dapr/dapr"; +async function printWorkflowStatus(client: DaprClient, instanceId: string) { + const workflow = await client.workflow.get(instanceId); + console.log( + `Workflow ${workflow.workflowName}, created at ${workflow.createdAt.toUTCString()}, has status ${ + workflow.runtimeStatus + }`, + ); + console.log(`Additional properties: ${JSON.stringify(workflow.properties)}`); + console.log("--------------------------------------------------\n\n"); +} + +async function start() { + const client = new DaprClient(); + + // Start a new workflow instance + const instanceId = await client.workflow.start("OrderProcessingWorkflow", { + Name: "Paperclips", + TotalCost: 99.95, + Quantity: 4, + }); + console.log(`Started workflow instance ${instanceId}`); + await printWorkflowStatus(client, instanceId); + + // Pause a workflow instance + await client.workflow.pause(instanceId); + console.log(`Paused workflow instance ${instanceId}`); + await printWorkflowStatus(client, instanceId); + + // Resume a workflow instance + await client.workflow.resume(instanceId); + console.log(`Resumed workflow instance ${instanceId}`); + await printWorkflowStatus(client, instanceId); + + // Terminate a workflow instance + await client.workflow.terminate(instanceId); + console.log(`Terminated workflow instance ${instanceId}`); + await printWorkflowStatus(client, instanceId); + + // Wait for the workflow to complete, 30 seconds! + await new Promise((resolve) => setTimeout(resolve, 30000)); + await printWorkflowStatus(client, instanceId); + + // Purge a workflow instance + await client.workflow.purge(instanceId); + console.log(`Purged workflow instance ${instanceId}`); + // This will throw an error because the workflow instance no longer exists. + await printWorkflowStatus(client, instanceId); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); ``` {{% /codetab %}} @@ -260,7 +313,7 @@ Learn more about these HTTP calls in the [workflow API reference guide]({{< ref - [Try out the Workflow quickstart]({{< ref workflow-quickstart.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py) - - [JavaScript example](todo) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/src/workflow) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index 74dd1aa48..a31d9588b 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -76,16 +76,20 @@ def error_handler(ctx, error): ```javascript -import WorkflowClient from "../client/WorkflowClient"; -import WorkflowActivityContext from "../runtime/WorkflowActivityContext"; -import WorkflowContext from "../runtime/WorkflowContext"; -import WorkflowRuntime from "../runtime/WorkflowRuntime"; -import { TWorkflow } from "../types/Workflow.type"; +import { DaprWorkflowClient, WorkflowActivityContext, WorkflowContext, WorkflowRuntime, TWorkflow } from "@dapr/dapr"; -(async () => { - const grpcEndpoint = "localhost:4001"; - const workflowClient = new WorkflowClient(grpcEndpoint); - const workflowRuntime = new WorkflowRuntime(grpcEndpoint); +async function start() { + // Update the gRPC client and worker to use a local address and port + const daprHost = "localhost"; + const daprPort = "50001"; + const workflowClient = new DaprWorkflowClient({ + daprHost, + daprPort, + }); + const workflowRuntime = new WorkflowRuntime({ + daprHost, + daprPort, + }); const hello = async (_: WorkflowActivityContext, name: string) => { return `Hello ${name}!`; @@ -96,7 +100,7 @@ import { TWorkflow } from "../types/Workflow.type"; const result1 = yield ctx.callActivity(hello, "Tokyo"); cities.push(result1); - const result2 = yield ctx.callActivity(hello, "Seattle"); // Correct the spelling of "Seattle" + const result2 = yield ctx.callActivity(hello, "Seattle"); cities.push(result2); const result3 = yield ctx.callActivity(hello, "London"); cities.push(result3); @@ -129,7 +133,15 @@ import { TWorkflow } from "../types/Workflow.type"; await workflowRuntime.stop(); await workflowClient.stop(); -})(); + + // stop the dapr side car + process.exit(0); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); ``` {{% /codetab %}} @@ -294,19 +306,28 @@ def process_results(ctx, final_result: int): ```javascript -import { Task } from "kaibocai-durabletask-js/task/task"; -import WorkflowClient from "../client/WorkflowClient"; -import WorkflowActivityContext from "../runtime/WorkflowActivityContext"; -import WorkflowContext from "../runtime/WorkflowContext"; -import WorkflowRuntime from "../runtime/WorkflowRuntime"; -import { TWorkflow } from "../types/Workflow.type"; +import { + Task, + DaprWorkflowClient, + WorkflowActivityContext, + WorkflowContext, + WorkflowRuntime, + TWorkflow, +} from "@dapr/dapr"; // Wrap the entire code in an immediately-invoked async function -(async () => { +async function start() { // Update the gRPC client and worker to use a local address and port - const grpcServerAddress = "localhost:4001"; - const workflowClient: WorkflowClient = new WorkflowClient(grpcServerAddress); - const workflowRuntime: WorkflowRuntime = new WorkflowRuntime(grpcServerAddress); + const daprHost = "localhost"; + const daprPort = "50001"; + const workflowClient = new DaprWorkflowClient({ + daprHost, + daprPort, + }); + const workflowRuntime = new WorkflowRuntime({ + daprHost, + daprPort, + }); function getRandomInt(min: number, max: number): number { return Math.floor(Math.random() * (max - min + 1)) + min; @@ -332,6 +353,8 @@ import { TWorkflow } from "../types/Workflow.type"; await sleep(sleepTime); // Return a result for the given work item, which is also a random number in this case + // For more information about random numbers in workflow please check + // https://learn.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-code-constraints?tabs=csharp#random-numbers return Math.floor(Math.random() * 11); } @@ -374,7 +397,15 @@ import { TWorkflow } from "../types/Workflow.type"; // stop worker and client await workflowRuntime.stop(); await workflowClient.stop(); -})(); + + // stop the dapr side car + process.exit(0); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); ``` {{% /codetab %}} @@ -767,16 +798,18 @@ def place_order(_, order: Order) -> None: ```javascript -import { Task } from "kaibocai-durabletask-js/task/task"; -import WorkflowClient from "../client/WorkflowClient"; -import WorkflowActivityContext from "../runtime/WorkflowActivityContext"; -import WorkflowContext from "../runtime/WorkflowContext"; -import WorkflowRuntime from "../runtime/WorkflowRuntime"; -import { TWorkflow } from "../types/Workflow.type"; +import { + Task, + DaprWorkflowClient, + WorkflowActivityContext, + WorkflowContext, + WorkflowRuntime, + TWorkflow, +} from "@dapr/dapr"; import * as readlineSync from "readline-sync"; // Wrap the entire code in an immediately-invoked async function -(async () => { +async function start() { class Order { cost: number; product: string; @@ -793,11 +826,18 @@ import * as readlineSync from "readline-sync"; } // Update the gRPC client and worker to use a local address and port - const grpcServerAddress = "localhost:4001"; - let workflowClient: WorkflowClient = new WorkflowClient(grpcServerAddress); - let workflowRuntime: WorkflowRuntime = new WorkflowRuntime(grpcServerAddress); + const daprHost = "localhost"; + const daprPort = "50001"; + const workflowClient = new DaprWorkflowClient({ + daprHost, + daprPort, + }); + const workflowRuntime = new WorkflowRuntime({ + daprHost, + daprPort, + }); - //Activity function that sends an approval request to the manager + // Activity function that sends an approval request to the manager const sendApprovalRequest = async (_: WorkflowActivityContext, order: Order) => { // Simulate some work that takes an amount of time await sleep(3000); @@ -858,12 +898,8 @@ import * as readlineSync from "readline-sync"; const id = await workflowClient.scheduleNewWorkflow(purchaseOrderWorkflow, order); console.log(`Orchestration scheduled with ID: ${id}`); - if (readlineSync.keyInYN("Press [Y] to approve the order... Y/yes, N/no")) { - const approvalEvent = { approver: approver }; - await workflowClient.raiseEvent(id, "approval_received", approvalEvent); - } else { - return "Order rejected"; - } + // prompt for approval asynchronously + promptForApproval(approver, workflowClient, id); // Wait for orchestration completion const state = await workflowClient.waitForWorkflowCompletion(id, undefined, timeout + 2); @@ -876,7 +912,24 @@ import * as readlineSync from "readline-sync"; // stop worker and client await workflowRuntime.stop(); await workflowClient.stop(); -})(); + + // stop the dapr side car + process.exit(0); +} + +async function promptForApproval(approver: string, workflowClient: DaprWorkflowClient, id: string) { + if (readlineSync.keyInYN("Press [Y] to approve the order... Y/yes, N/no")) { + const approvalEvent = { approver: approver }; + await workflowClient.raiseEvent(id, "approval_received", approvalEvent); + } else { + return "Order rejected"; + } +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); ``` {{% /codetab %}} From cb78f17ea475f1c7f69a800bea243200dc753e33 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Thu, 25 Jan 2024 14:02:01 -0500 Subject: [PATCH 05/20] add link to js sdk Signed-off-by: Hannah Hunter --- .../building-blocks/workflow/howto-author-workflow.md | 2 +- .../building-blocks/workflow/howto-manage-workflow.md | 2 +- .../building-blocks/workflow/workflow-architecture.md | 2 +- .../building-blocks/workflow/workflow-features-concepts.md | 2 +- .../building-blocks/workflow/workflow-overview.md | 4 ++-- .../building-blocks/workflow/workflow-patterns.md | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md index 00330236b..4dfb0fa56 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md @@ -727,6 +727,6 @@ Now that you've authored a workflow, learn how to manage it. - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/src/workflow) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md index 148cc258d..5efd602fc 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md @@ -313,7 +313,7 @@ Learn more about these HTTP calls in the [workflow API reference guide]({{< ref - [Try out the Workflow quickstart]({{< ref workflow-quickstart.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/blob/master/examples/demo_workflow/app.py) - - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/src/workflow) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md index c2398d772..186723d13 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md @@ -195,6 +195,6 @@ See the [Reminder usage and execution guarantees section]({{< ref "workflow-arch - [Try out the Workflow quickstart]({{< ref workflow-quickstart.md >}}) - Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - - [JavaScript example](todo) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) \ No newline at end of file diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md index 3dfb0affb..5d74e1084 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md @@ -411,6 +411,6 @@ To work around these constraints: - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - - [JavaScript example](todo) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) \ No newline at end of file diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index e558c554f..1ecc40df3 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -83,7 +83,7 @@ Want to put workflows to the test? Walk through the following quickstart and tut | ------------------- | ----------- | | [Workflow quickstart]({{< ref workflow-quickstart.md >}}) | Run a workflow application with four workflow activities to see Dapr Workflow in action | | [Workflow Python SDK example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) | Learn how to create a Dapr Workflow and invoke it using the Python `DaprClient` package. | -| [Workflow JavaScript SDK example](todo) | Learn how to create a Dapr Workflow and invoke it using the JavaScript `todo` package. | +| [Workflow JavaScript SDK example](https://github.com/dapr/js-sdk/tree/main/examples/workflow) | Learn how to create a Dapr Workflow and invoke it using the JavaScript SDK. | | [Workflow .NET SDK example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) | Learn how to create a Dapr Workflow and invoke it using ASP.NET Core web APIs. | | [Workflow Java SDK example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) | Learn how to create a Dapr Workflow and invoke it using the Java `io.dapr.workflows` package. | @@ -112,6 +112,6 @@ Watch [this video for an overview on Dapr Workflow](https://youtu.be/s1p9MNl4VGo - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the full SDK examples: - [Python example](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - - [JavaScript example](todo) + - [JavaScript example](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET example](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index a31d9588b..4bd73a792 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -1086,6 +1086,6 @@ External events don't have to be directly triggered by humans. They can also be - [Workflow API reference]({{< ref workflow_api.md >}}) - Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - - [JavaScript](todo) + - [JavaScript](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - [Java](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows) \ No newline at end of file From fd5807499011bba749f6702b8745d5672fcc58c5 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Thu, 25 Jan 2024 14:05:49 -0500 Subject: [PATCH 06/20] fix localized link Signed-off-by: Hannah Hunter --- .../building-blocks/workflow/workflow-patterns.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index 4bd73a792..9749e2142 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -354,7 +354,7 @@ async function start() { // Return a result for the given work item, which is also a random number in this case // For more information about random numbers in workflow please check - // https://learn.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-code-constraints?tabs=csharp#random-numbers + // https://learn.microsoft.com/azure/azure-functions/durable/durable-functions-code-constraints?tabs=csharp#random-numbers return Math.floor(Math.random() * 11); } From 5027a746fafc96ed042021bc6523192969ce31df Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Fri, 26 Jan 2024 11:11:30 -0500 Subject: [PATCH 07/20] tweak for limitations Signed-off-by: Hannah Hunter --- .../building-blocks/workflow/workflow-overview.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index 1ecc40df3..509085708 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -7,7 +7,7 @@ description: "Overview of Dapr Workflow" --- {{% alert title="Note" color="primary" %}} -Dapr Workflow is currently in beta. [See known limitations for {{% dapr-latest-version cli="true" %}}]({{< ref "#limitations" >}}). +Dapr Workflow is currently in beta. [See known limitations]({{< ref "#limitations" >}}). {{% /alert %}} Dapr workflow makes it easy for developers to write business logic and integrations in a reliable way. Since Dapr workflows are stateful, they support long-running and fault-tolerant applications, ideal for orchestrating microservices. Dapr workflow works seamlessly with other Dapr building blocks, such as service invocation, pub/sub, state management, and bindings. @@ -93,9 +93,9 @@ Want to skip the quickstarts? Not a problem. You can try out the workflow buildi ## Limitations -- **State stores:** For the {{% dapr-latest-version cli="true" %}} beta release of Dapr Workflow, using the NoSQL databases as a state store results in limitations around storing internal states. For example, CosmosDB has a maximum single operation item limit of only 100 states in a single request. +- **State stores:** As of the 1.12.0 beta release of Dapr Workflow, using the NoSQL databases as a state store results in limitations around storing internal states. For example, CosmosDB has a maximum single operation item limit of only 100 states in a single request. -- **Horizontal scaling:** For the {{% dapr-latest-version cli="true" %}} beta release of Dapr Workflow, if you scale out Dapr sidecars or your application pods to more than 2, then the concurrency of the workflow execution drops. It is recommended to test with 1 or 2 instances, and no more than 2. +- **Horizontal scaling:** As of the 1.12.0 beta release of Dapr Workflow, if you scale out Dapr sidecars or your application pods to more than 2, then the concurrency of the workflow execution drops. It is recommended to test with 1 or 2 instances, and no more than 2. ## Watch the demo From af916d61f95df1d16397d02f972f5475122ec36e Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Mon, 29 Jan 2024 10:47:03 -0500 Subject: [PATCH 08/20] add monitor pattern Signed-off-by: Hannah Hunter --- .../workflow/workflow-patterns.md | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index 9749e2142..b91b81009 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -614,11 +614,28 @@ def send_alert(ctx, message: str): ```javascript +const statusMonitorWorkflow: TWorkflow = async function* (ctx: WorkflowContext): any { + let duration; + const status = yield ctx.callActivity(checkStatusActivity); + if (status === "healthy") { + // Check less frequently when in a healthy state + // set duration to 1 hour + duration = 60 * 60; + } else { + yield ctx.callActivity(alertActivity, "job unhealthy"); + // Check more frequently when in an unhealthy state + // set duration to 5 minutes + duration = 5 * 60; + } + // Put the workflow to sleep until the determined time + ctx.createTimer(duration); + + // Restart from the beginning with the updated state + ctx.continueAsNew(); + }; ``` -> This example assumes you have a predefined `MyEntityState` class with a boolean `IsHealthy` property. - {{% /codetab %}} {{% codetab %}} From c462418dbcc68a5b665a3d603fefe5788b7e247f Mon Sep 17 00:00:00 2001 From: ItalyPaleAle <43508+ItalyPaleAle@users.noreply.github.com> Date: Wed, 31 Jan 2024 17:06:47 -0800 Subject: [PATCH 09/20] [1.13] Azure Blob Storage v2 docs Fixes #3203 Signed-off-by: ItalyPaleAle <43508+ItalyPaleAle@users.noreply.github.com> --- .../setup-azure-blobstorage.md | 37 +++++++++++++------ .../data/components/state_stores/azure.yaml | 4 +- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md index 61846c3be..c8ba985b2 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md @@ -18,7 +18,9 @@ metadata: name: spec: type: state.azure.blobstorage - version: v1 + # Supports v1 and v2. Users should always use v2 by default. There is no + # migration path from v1 to v2, see `versioning` below. + version: v2 metadata: - name: accountName value: "[your_account_name]" @@ -32,21 +34,32 @@ spec: The above example uses secrets as plain strings. It is recommended to use a secret store for the secrets as described [here]({{< ref component-secrets.md >}}). {{% /alert %}} +## Versioning + +Dapr has 2 versions of the Azure Blob Storage state store component: `v1` and `v2`. It is recommended to use `v2`, as `v1` is deprecated. + +In `v1`, we identified a longstanding implementation issue where the [key prefix]({{< ref howto-share-state.md >}}) was incorrectly stripped by the component, essentially behaving as if `keyPrefix` was always set to `none`. +The updated `v2` of the component fixes the incorrect behavior and makes the state store correctly respect the `keyPrefix` property. + +While `v1` and `v2` have the same metadata fields, they are otherwise incompatible, with no automatic data migration path for `v1` to `v2`. + +If you are using `v1` of this component, you should continue to use `v1` until you create a new state store. + ## Spec metadata fields -| Field | Required | Details | Example | +| Field | Required | Details | Example | |--------------------|:--------:|---------|---------| -| `accountName` | Y | The storage account name | `"mystorageaccount"`. -| `accountKey` | Y (unless using Microsoft Entra ID) | Primary or secondary storage key | `"key"` -| `containerName` | Y | The name of the container to be used for Dapr state. The container will be created for you if it doesn't exist | `"container"` -| `azureEnvironment` | N | Optional name for the Azure environment if using a different Azure cloud | `"AZUREPUBLICCLOUD"` (default value), `"AZURECHINACLOUD"`, `"AZUREUSGOVERNMENTCLOUD"`, `"AZUREGERMANCLOUD"` +| `accountName` | Y | The storage account name | `"mystorageaccount"`. | +| `accountKey` | Y (unless using Microsoft Entra ID) | Primary or secondary storage key | `"key"` | +| `containerName` | Y | The name of the container to be used for Dapr state. The container will be created for you if it doesn't exist | `"container"` | +| `azureEnvironment` | N | Optional name for the Azure environment if using a different Azure cloud | `"AZUREPUBLICCLOUD"` (default value), `"AZURECHINACLOUD"`, `"AZUREUSGOVERNMENTCLOUD"` | | `endpoint` | N | Optional custom endpoint URL. This is useful when using the [Azurite emulator](https://github.com/Azure/azurite) or when using custom domains for Azure Storage (although this is not officially supported). The endpoint must be the full base URL, including the protocol (`http://` or `https://`), the IP or FQDN, and optional port. | `"http://127.0.0.1:10000"` -| `ContentType` | N | The blob's content type | `"text/plain"` -| `ContentMD5` | N | The blob's MD5 hash | `"vZGKbMRDAnMs4BIwlXaRvQ=="` -| `ContentEncoding` | N | The blob's content encoding | `"UTF-8"` -| `ContentLanguage` | N | The blob's content language | `"en-us"` -| `ContentDisposition` | N | The blob's content disposition. Conveys additional information about how to process the response payload | `"attachment"` -| `CacheControl` | N | The blob's cache control | `"no-cache"` +| `ContentType` | N | The blob's content type | `"text/plain"` | +| `ContentMD5` | N | The blob's MD5 hash | `"vZGKbMRDAnMs4BIwlXaRvQ=="` | +| `ContentEncoding` | N | The blob's content encoding | `"UTF-8"` | +| `ContentLanguage` | N | The blob's content language | `"en-us"` | +| `ContentDisposition` | N | The blob's content disposition. Conveys additional information about how to process the response payload | `"attachment"` | +| `CacheControl`| N | The blob's cache control | `"no-cache"` | ## Setup Azure Blob Storage diff --git a/daprdocs/data/components/state_stores/azure.yaml b/daprdocs/data/components/state_stores/azure.yaml index 6f37e4493..6f490f842 100644 --- a/daprdocs/data/components/state_stores/azure.yaml +++ b/daprdocs/data/components/state_stores/azure.yaml @@ -1,8 +1,8 @@ - component: Azure Blob Storage link: setup-azure-blobstorage state: Stable - version: v1 - since: "1.0" + version: v2 + since: "1.13" features: crud: true transactions: false From a63d3287fed4bf3a434b54f2bf274e8e5390e967 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Fri, 2 Feb 2024 15:50:25 -0500 Subject: [PATCH 10/20] add name resolution to config overview and schema Signed-off-by: Hannah Hunter --- .../configuration/configuration-overview.md | 22 +++++++++++++++++++ .../resource-specs/configuration-schema.md | 5 +++++ 2 files changed, 27 insertions(+) diff --git a/daprdocs/content/en/operations/configuration/configuration-overview.md b/daprdocs/content/en/operations/configuration/configuration-overview.md index ca9601671..62b9806da 100644 --- a/daprdocs/content/en/operations/configuration/configuration-overview.md +++ b/daprdocs/content/en/operations/configuration/configuration-overview.md @@ -50,6 +50,7 @@ The following configuration settings can be applied to Dapr application sidecars - [Metrics](#metrics) - [Logging](#logging) - [Middleware](#middleware) +- [Name resolution](#name-resolution) - [Scope secret store access](#scope-secret-store-access) - [Access Control allow lists for building block APIs](#access-control-allow-lists-for-building-block-apis) - [Access Control allow lists for service invocation API](#access-control-allow-lists-for-service-invocation-api) @@ -189,6 +190,27 @@ The following table lists the properties for HTTP handlers: See [Middleware pipelines]({{< ref "middleware.md" >}}) for more information +#### Name resolution + +You can set name resolution within the Configuration YAML. Set the `spec.nameResolution.component` property to `"sqlite"`, then pass configuration options in the `spec.nameResolution.configuration` dictionary. + +This is the basic example of a Configuration resource: + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Configuration +metadata: + name: appconfig +spec: + nameResolution: + component: "sqlite" + version: "v1" + configuration: + connectionString: "/home/user/.dapr/nr.db" +``` + +See [the Name Resolution spec documentation]({{< ref supported-name-resolution >}}) to learn more about how to configure name resolution per component. + #### Scope secret store access See the [Scoping secrets]({{< ref "secret-scope.md" >}}) guide for information and examples on how to scope secrets to an application. diff --git a/daprdocs/content/en/reference/resource-specs/configuration-schema.md b/daprdocs/content/en/reference/resource-specs/configuration-schema.md index 14a8bb2b8..b10115fbc 100644 --- a/daprdocs/content/en/reference/resource-specs/configuration-schema.md +++ b/daprdocs/content/en/reference/resource-specs/configuration-schema.md @@ -46,6 +46,11 @@ spec: handlers: - name: type: + nameResolution: + component: "sqlite" + version: "v1" + configuration: + connectionString: "/home/user/.dapr/nr.db" secrets: scopes: - storeName: From 330be0a82b0d78ccd9dae5f647d0e610499bf456 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Fri, 2 Feb 2024 16:03:45 -0500 Subject: [PATCH 11/20] update titles, add links, update service invo overview Signed-off-by: Hannah Hunter --- .../service-invocation/service-invocation-overview.md | 6 +++++- .../en/operations/configuration/configuration-overview.md | 2 +- .../supported-name-resolution/_index.md | 2 ++ .../supported-name-resolution/nr-kubernetes.md | 2 +- .../supported-name-resolution/nr-mdns.md | 2 +- .../supported-name-resolution/nr-sqlite.md | 2 +- .../supported-name-resolution/setup-nr-consul.md | 2 +- 7 files changed, 12 insertions(+), 6 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md b/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md index 42d6b304d..2f212e23d 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md @@ -92,7 +92,11 @@ The diagram below shows an example of how this works. If you have 1 instance of ### Pluggable service discovery -Dapr can run on a variety of [hosting platforms]({{< ref hosting >}}). To enable service discovery and service invocation, Dapr uses pluggable [name resolution components]({{< ref supported-name-resolution >}}). For example, the Kubernetes name resolution component uses the Kubernetes DNS service to resolve the location of other applications running in the cluster. Self-hosted machines can use the mDNS name resolution component. The Consul name resolution component can be used in any hosting environment, including Kubernetes or self-hosted. +Dapr can run on a variety of [hosting platforms]({{< ref hosting >}}). To enable service discovery and service invocation, Dapr uses pluggable [name resolution components]({{< ref supported-name-resolution >}}). For example, the Kubernetes name resolution component uses the Kubernetes DNS service to resolve the location of other applications running in the cluster. + +Self-hosted machines can use the mDNS name resolution component. As an alternative, you can use the SQLite name resolution component to run Dapr on single-node environments and for local development scenarios. Dapr sidecars that are part of the cluster store their information in a SQLite database on the local machine. + +The Consul name resolution component can be used in any hosting environment, including Kubernetes or self-hosted. ### Streaming for HTTP service invocation diff --git a/daprdocs/content/en/operations/configuration/configuration-overview.md b/daprdocs/content/en/operations/configuration/configuration-overview.md index 62b9806da..e14067684 100644 --- a/daprdocs/content/en/operations/configuration/configuration-overview.md +++ b/daprdocs/content/en/operations/configuration/configuration-overview.md @@ -209,7 +209,7 @@ spec: connectionString: "/home/user/.dapr/nr.db" ``` -See [the Name Resolution spec documentation]({{< ref supported-name-resolution >}}) to learn more about how to configure name resolution per component. +See [the Name Resolution spec documentation]({{< ref supported-name-resolution >}}) and the [Configuration YAML documentation]({{< ref configuration-schema.md >}}) to learn more about how to configure name resolution per component. #### Scope secret store access diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md index 59a956ce5..fc072eab7 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md @@ -7,6 +7,8 @@ description: The supported name resolution providers to enable Dapr service invo no_list: true --- +Name resolution is configured via the [Dapr Configuration]({{< ref configuration-overview.md >}}). + The following components provide name resolution for the service invocation building block. {{< partial "components/description.html" >}} diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-kubernetes.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-kubernetes.md index 0f36aaacf..f370464bb 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-kubernetes.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-kubernetes.md @@ -1,6 +1,6 @@ --- type: docs -title: "Kubernetes DNS name resolution provider spec" +title: "Kubernetes DNS" linkTitle: "Kubernetes DNS" description: Detailed information on the Kubernetes DNS name resolution component --- diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-mdns.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-mdns.md index 2c9d0ea64..1c2e1c9a3 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-mdns.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-mdns.md @@ -1,6 +1,6 @@ --- type: docs -title: "mDNS name resolution provider spec" +title: "mDNS" linkTitle: "mDNS" description: Detailed information on the mDNS name resolution component --- diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-sqlite.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-sqlite.md index 0be4e1860..b596a5b14 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-sqlite.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/nr-sqlite.md @@ -1,6 +1,6 @@ --- type: docs -title: "SQLite name resolution provider" +title: "SQLite" linkTitle: "SQLite" description: Detailed information on the SQLite name resolution component --- diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/setup-nr-consul.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/setup-nr-consul.md index b5c8140e7..fb7945572 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/setup-nr-consul.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/setup-nr-consul.md @@ -1,6 +1,6 @@ --- type: docs -title: "HashiCorp Consul name resolution provider spec" +title: "HashiCorp Consul" linkTitle: "HashiCorp Consul" description: Detailed information on the HashiCorp Consul name resolution component --- From 525baca80a3ac1d5313d4e0fa88620b2a1c37d6c Mon Sep 17 00:00:00 2001 From: "Alessandro (Ale) Segala" <43508+ItalyPaleAle@users.noreply.github.com> Date: Mon, 5 Feb 2024 08:56:35 -0800 Subject: [PATCH 12/20] Update daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> --- .../supported-state-stores/setup-azure-blobstorage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md index c8ba985b2..0681daf1b 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md @@ -38,7 +38,7 @@ The above example uses secrets as plain strings. It is recommended to use a secr Dapr has 2 versions of the Azure Blob Storage state store component: `v1` and `v2`. It is recommended to use `v2`, as `v1` is deprecated. -In `v1`, we identified a longstanding implementation issue where the [key prefix]({{< ref howto-share-state.md >}}) was incorrectly stripped by the component, essentially behaving as if `keyPrefix` was always set to `none`. +In `v1`, a longstanding implementation issue was identified, where the [key prefix]({{< ref howto-share-state.md >}}) was incorrectly stripped by the component, essentially behaving as if `keyPrefix` was always set to `none`. The updated `v2` of the component fixes the incorrect behavior and makes the state store correctly respect the `keyPrefix` property. While `v1` and `v2` have the same metadata fields, they are otherwise incompatible, with no automatic data migration path for `v1` to `v2`. From 98dca03b33c82ede340c6a2dcc2a9139aa83f533 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Mon, 5 Feb 2024 12:04:48 -0500 Subject: [PATCH 13/20] mark edit Signed-off-by: Hannah Hunter --- .../building-blocks/workflow/workflow-overview.md | 11 +++++++++++ .../quickstarts/workflow-quickstart.md | 6 ------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index 509085708..d96cf7bf2 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -73,6 +73,17 @@ Learn more about [different types of workflow patterns]({{< ref workflow-pattern The Dapr Workflow _authoring SDKs_ are language-specific SDKs that contain types and functions to implement workflow logic. The workflow logic lives in your application and is orchestrated by the Dapr Workflow engine running in the Dapr sidecar via a gRPC stream. +### Supported SDKs + +You can use the following SDKs to author a workflow. + +| Language stack | Package | +| - | - | +| Python | [dapr-ext-workflow](https://github.com/dapr/python-sdk/tree/master/ext/dapr-ext-workflow) | +| JavaScript | [DaprWorkflowClient](https://github.com/dapr/js-sdk/blob/main/src/workflow/client/DaprWorkflowClient.ts) | +| .NET | [Dapr.Workflow](https://www.nuget.org/profiles/dapr.io) | +| Java | [io.dapr.workflows](https://dapr.github.io/java-sdk/io/dapr/workflows/package-summary.html) | + ## Try out workflows ### Quickstarts and tutorials diff --git a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md index 8af699655..1e72a1d45 100644 --- a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md +++ b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md @@ -366,12 +366,6 @@ In `OrderProcessingWorkflow.js`, the workflow is defined as a class with all of The `Activities` directory holds the four workflow activities used by the workflow, defined in the following files: -## Watch the demo - -Watch [this video to walk through the Dapr Workflow .NET demo](https://youtu.be/BxiKpEmchgQ?t=2564): - - - {{% /codetab %}} From 6efa7091b9633d5e778fa8173092e047fedcdec2 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Mon, 5 Feb 2024 15:25:36 -0500 Subject: [PATCH 14/20] remove older versions Signed-off-by: Hannah Hunter --- daprdocs/config.toml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/daprdocs/config.toml b/daprdocs/config.toml index 415e86f5f..dde35e8ae 100644 --- a/daprdocs/config.toml +++ b/daprdocs/config.toml @@ -209,18 +209,6 @@ url_latest_version = "https://docs.dapr.io" [[params.versions]] version = "v1.7" url = "https://v1-7.docs.dapr.io" -[[params.versions]] - version = "v1.6" - url = "https://v1-6.docs.dapr.io" -[[params.versions]] - version = "v1.5" - url = "https://v1-5.docs.dapr.io" -[[params.versions]] - version = "v1.4" - url = "https://v1-4.docs.dapr.io" -[[params.versions]] - version = "v1.3" - url = "https://v1-3.docs.dapr.io" # UI Customization [params.ui] From 90d7be84addc6796a4749fbe8fdb04ff2ce25a2e Mon Sep 17 00:00:00 2001 From: "Alessandro (Ale) Segala" <43508+ItalyPaleAle@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:40:59 -0800 Subject: [PATCH 15/20] Update daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md Signed-off-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> --- .../supported-state-stores/setup-azure-blobstorage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md index 0681daf1b..6f99ba27f 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-blobstorage.md @@ -36,7 +36,7 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Versioning -Dapr has 2 versions of the Azure Blob Storage state store component: `v1` and `v2`. It is recommended to use `v2`, as `v1` is deprecated. +Dapr has 2 versions of the Azure Blob Storage state store component: `v1` and `v2`. It is recommended to use `v2` for all new applications. `v1` is considered legacy and is preserved for compatibility with existing applications only. In `v1`, a longstanding implementation issue was identified, where the [key prefix]({{< ref howto-share-state.md >}}) was incorrectly stripped by the component, essentially behaving as if `keyPrefix` was always set to `none`. The updated `v2` of the component fixes the incorrect behavior and makes the state store correctly respect the `keyPrefix` property. From 3018badb59697b0b2935be60263b3815b0bf2283 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Mon, 5 Feb 2024 16:52:48 -0500 Subject: [PATCH 16/20] add js limitation Signed-off-by: Hannah Hunter --- .../workflow/workflow-features-concepts.md | 31 +++++++++---------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md index 553651c42..2fab89e64 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md @@ -334,14 +334,7 @@ Failure to follow this rule could result in undefined behavior. Any background p For example, instead of this: -{{< tabs JavaScript ".NET" Java >}} - -{{% codetab %}} - -```javascript -// DON'T DO THIS! -``` -{{% /codetab %}} +{{< tabs ".NET" Java JavaScript >}} {{% codetab %}} @@ -364,19 +357,17 @@ ctx.createTimer(Duration.ofSeconds(5)).await(); {{% /codetab %}} +{{% codetab %}} + +Don't declare JavaScript workflow as `async`. The Node.js runtime doesn't guarantee that asynchronous functions are deterministic. + +{{% /codetab %}} + {{< /tabs >}} Do this: -{{< tabs JavaScript ".NET" Java >}} - -{{% codetab %}} - -```javascript -// Do this!! -``` - -{{% /codetab %}} +{{< tabs ".NET" Java JavaScript >}} {{% codetab %}} @@ -398,6 +389,12 @@ ctx.createTimer(Duration.ofSeconds(5)).await(); {{% /codetab %}} +{{% codetab %}} + +Since the Node.js runtime doesn't guarantee that asynchronous functions are deterministic, always declare JavaScript workflow as synchronous generator functions. + +{{% /codetab %}} + {{< /tabs >}} From 5202af18d1123b2b95dff8871cafa4e9975c982b Mon Sep 17 00:00:00 2001 From: "Alessandro (Ale) Segala" <43508+ItalyPaleAle@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:29:29 -0800 Subject: [PATCH 17/20] [1.13] Docs for PostgreSQL v2 state store (#3996) * [1.13] Docs for PostgreSQL v2 state store Fixes #3911 Signed-off-by: ItalyPaleAle <43508+ItalyPaleAle@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> * Update daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v1.md Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Signed-off-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> * Update daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v2.md Signed-off-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> --------- Signed-off-by: ItalyPaleAle <43508+ItalyPaleAle@users.noreply.github.com> Signed-off-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> Co-authored-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> --- ...p-postgresql.md => setup-postgresql-v1.md} | 42 +++-- .../setup-postgresql-v2.md | 165 ++++++++++++++++++ .../data/components/state_stores/generic.yaml | 15 +- 3 files changed, 204 insertions(+), 18 deletions(-) rename daprdocs/content/en/reference/components-reference/supported-state-stores/{setup-postgresql.md => setup-postgresql-v1.md} (76%) create mode 100644 daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v2.md diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v1.md similarity index 76% rename from daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql.md rename to daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v1.md index 5035d8fae..7026dcc92 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v1.md @@ -1,13 +1,23 @@ --- type: docs -title: "PostgreSQL" -linkTitle: "PostgreSQL" -description: Detailed information on the PostgreSQL state store component +title: "PostgreSQL v1" +linkTitle: "PostgreSQL v1" +description: Detailed information on the PostgreSQL v1 state store component aliases: - "/operations/components/setup-state-store/supported-state-stores/setup-postgresql/" + - "/operations/components/setup-state-store/supported-state-stores/setup-postgres/" + - "/operations/components/setup-state-store/supported-state-stores/setup-postgresql-v1/" + - "/operations/components/setup-state-store/supported-state-stores/setup-postgres-v1/" --- -This component allows using PostgreSQL (Postgres) as state store for Dapr. See [this guide]({{< ref "howto-get-save-state.md#step-1-setup-a-state-store" >}}) on how to create and apply a state store configuration. +{{% alert title="Note" color="primary" %}} +Starting with Dapr 1.13, you can leverage the [PostgreSQL v2]({{< ref setup-postgresql-v2.md >}}) state store component, which contains some improvements to performance and reliability. +The v2 component is not compatible with v1, and data cannot be migrated between the two components. The v2 component does not offer support for state store query APIs. + +There are no plans to deprecate the v1 component. +{{% /alert %}} + +This component allows using PostgreSQL (Postgres) as state store for Dapr, using the "v1" component. See [this guide]({{< ref "howto-get-save-state.md#step-1-setup-a-state-store" >}}) on how to create and apply a state store configuration. ```yaml apiVersion: dapr.io/v1alpha1 @@ -21,8 +31,8 @@ spec: # Connection string - name: connectionString value: "" - # Timeout for database operations, in seconds (optional) - #- name: timeoutInSeconds + # Timeout for database operations, as a Go duration or number of seconds (optional) + #- name: timeout # value: 20 # Name of the table where to store the state (optional) #- name: tableName @@ -31,8 +41,8 @@ spec: #- name: metadataTableName # value: "dapr_metadata" # Cleanup interval in seconds, to remove expired rows (optional) - #- name: cleanupIntervalInSeconds - # value: 3600 + #- name: cleanupInterval + # value: "1h" # Maximum number of connections pooled by this component (optional) #- name: maxConns # value: 0 @@ -59,7 +69,7 @@ The following metadata options are **required** to authenticate using a PostgreS | Field | Required | Details | Example | |--------|:--------:|---------|---------| -| `connectionString` | Y | The connection string for the PostgreSQL database. See the PostgreSQL [documentation on database connections](https://www.postgresql.org/docs/current/libpq-connect.html) for information on how to define a connection string. | `"host=localhost user=postgres password=example port=5432 connect_timeout=10 database=my_db"` +| `connectionString` | Y | The connection string for the PostgreSQL database. See the PostgreSQL [documentation on database connections](https://www.postgresql.org/docs/current/libpq-connect.html) for information on how to define a connection string. | `"host=localhost user=postgres password=example port=5432 connect_timeout=10 database=my_db"` | ### Authenticate using Microsoft Entra ID @@ -77,10 +87,10 @@ Authenticating with Microsoft Entra ID is supported with Azure Database for Post | Field | Required | Details | Example | |--------------------|:--------:|---------|---------| -| `timeoutInSeconds` | N | Timeout, in seconds, for all database operations. Defaults to `20` | `30` | `tableName` | N | Name of the table where the data is stored. Defaults to `state`. Can optionally have the schema name as prefix, such as `public.state` | `"state"`, `"public.state"` | `metadataTableName` | N | Name of the table Dapr uses to store a few metadata properties. Defaults to `dapr_metadata`. Can optionally have the schema name as prefix, such as `public.dapr_metadata` | `"dapr_metadata"`, `"public.dapr_metadata"` -| `cleanupIntervalInSeconds` | N | Interval, in seconds, to clean up rows with an expired TTL. Default: `3600` (i.e. 1 hour). Setting this to values <=0 disables the periodic cleanup. | `1800`, `-1` +| `timeout` | N | Timeout for operations on the database, as a [Go duration](https://pkg.go.dev/time#ParseDuration). Integers are interpreted as number of seconds. Defaults to `20s` | `"30s"`, `30` | +| `cleanupInterval` | N | Interval, as a Go duration or number of seconds, to clean up rows with an expired TTL. Default: `1h` (1 hour). Setting this to values <=0 disables the periodic cleanup. | `"30m"`, `1800`, `-1` | `maxConns` | N | Maximum number of connections pooled by this component. Set to 0 or lower to use the default value, which is the greater of 4 or the number of CPUs. | `"4"` | `connectionMaxIdleTime` | N | Max idle time before unused connections are automatically closed in the connection pool. By default, there's no value and this is left to the database driver to choose. | `"5m"` | `queryExecMode` | N | Controls the default mode for executing queries. By default Dapr uses the extended protocol and automatically prepares and caches prepared statements. However, this may be incompatible with proxies such as PGBouncer. In this case it may be preferrable to use `exec` or `simple_protocol`. | `"simple_protocol"` @@ -100,8 +110,8 @@ Authenticating with Microsoft Entra ID is supported with Azure Database for Post > This example does not describe a production configuration because it sets the password in plain text and the user name is left as the PostgreSQL default of "postgres". -2. Create a database for state data. -Either the default "postgres" database can be used, or create a new database for storing state data. +1. Create a database for state data. + Either the default "postgres" database can be used, or create a new database for storing state data. To create a new database in PostgreSQL, run the following SQL command: @@ -121,10 +131,10 @@ This state store supports [Time-To-Live (TTL)]({{< ref state-store-ttl.md >}}) f Because PostgreSQL doesn't have built-in support for TTLs, this is implemented in Dapr by adding a column in the state table indicating when the data is to be considered "expired". Records that are "expired" are not returned to the caller, even if they're still physically stored in the database. A background "garbage collector" periodically scans the state table for expired rows and deletes them. -The interval at which the deletion of expired records happens is set with the `cleanupIntervalInSeconds` metadata property, which defaults to 3600 seconds (that is, 1 hour). +You can set the deletion interval of expired records with the `cleanupInterval` metadata property, which defaults to 3600 seconds (that is, 1 hour). -- Longer intervals require less frequent scans for expired rows, but can require storing expired records for longer, potentially requiring more storage space. If you plan to store many records in your state table, with short TTLs, consider setting `cleanupIntervalInSeconds` to a smaller value, for example `300` (300 seconds, or 5 minutes). -- If you do not plan to use TTLs with Dapr and the PostgreSQL state store, you should consider setting `cleanupIntervalInSeconds` to a value <= 0 (e.g. `0` or `-1`) to disable the periodic cleanup and reduce the load on the database. +- Longer intervals require less frequent scans for expired rows, but can require storing expired records for longer, potentially requiring more storage space. If you plan to store many records in your state table, with short TTLs, consider setting `cleanupInterval` to a smaller value; for example, `5m` (5 minutes). +- If you do not plan to use TTLs with Dapr and the PostgreSQL state store, you should consider setting `cleanupInterval` to a value <= 0 (for example, `0` or `-1`) to disable the periodic cleanup and reduce the load on the database. The column in the state table where the expiration date for records is stored in, `expiredate`, **does not have an index by default**, so each periodic cleanup must perform a full-table scan. If you have a table with a very large number of records, and only some of them use a TTL, you may find it useful to create an index on that column. Assuming that your state table name is `state` (the default), you can use this query: diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v2.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v2.md new file mode 100644 index 000000000..bcda2558b --- /dev/null +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-postgresql-v2.md @@ -0,0 +1,165 @@ +--- +type: docs +title: "PostgreSQL" +linkTitle: "PostgreSQL" +description: Detailed information on the PostgreSQL state store component +aliases: + - "/operations/components/setup-state-store/supported-state-stores/setup-postgresql-v2/" + - "/operations/components/setup-state-store/supported-state-stores/setup-postgres-v2/" +--- + +{{% alert title="Note" color="primary" %}} +This is the v2 of the PostgreSQL state store component, which contains some improvements to performance and reliability. New applications are encouraged to use v2. + +The PostgreSQL v2 state store component is not compatible with the [v1 component]({{< ref setup-postgresql-v1.md >}}), and data cannot be migrated between the two components. The v2 component does not offer support for state store query APIs. + +There are no plans to deprecate the v1 component. +{{% /alert %}} + +This component allows using PostgreSQL (Postgres) as state store for Dapr, using the "v2" component. See [this guide]({{< ref "howto-get-save-state.md#step-1-setup-a-state-store" >}}) on how to create and apply a state store configuration. + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: +spec: + type: state.postgresql + # Note: setting "version" to "v2" is required to use the v2 of the component + version: v2 + metadata: + # Connection string + - name: connectionString + value: "" + # Timeout for database operations, as a Go duration or number of seconds (optional) + #- name: timeout + # value: 20 + # Prefix for the table where the data is stored (optional) + #- name: tablePrefix + # value: "" + # Name of the table where to store metadata used by Dapr (optional) + #- name: metadataTableName + # value: "dapr_metadata" + # Cleanup interval in seconds, to remove expired rows (optional) + #- name: cleanupInterval + # value: "1h" + # Maximum number of connections pooled by this component (optional) + #- name: maxConns + # value: 0 + # Max idle time for connections before they're closed (optional) + #- name: connectionMaxIdleTime + # value: 0 + # Controls the default mode for executing queries. (optional) + #- name: queryExecMode + # value: "" + # Uncomment this if you wish to use PostgreSQL as a state store for actors (optional) + #- name: actorStateStore + # value: "true" +``` + +{{% alert title="Warning" color="warning" %}} +The above example uses secrets as plain strings. It is recommended to use a secret store for the secrets as described [here]({{< ref component-secrets.md >}}). +{{% /alert %}} + +## Spec metadata fields + +### Authenticate using a connection string + +The following metadata options are **required** to authenticate using a PostgreSQL connection string. + +| Field | Required | Details | Example | +|--------|:--------:|---------|---------| +| `connectionString` | Y | The connection string for the PostgreSQL database. See the PostgreSQL [documentation on database connections](https://www.postgresql.org/docs/current/libpq-connect.html) for information on how to define a connection string. | `"host=localhost user=postgres password=example port=5432 connect_timeout=10 database=my_db"` | + +### Authenticate using Microsoft Entra ID + +Authenticating with Microsoft Entra ID is supported with Azure Database for PostgreSQL. All authentication methods supported by Dapr can be used, including client credentials ("service principal") and Managed Identity. + +| Field | Required | Details | Example | +|--------|:--------:|---------|---------| +| `useAzureAD` | Y | Must be set to `true` to enable the component to retrieve access tokens from Microsoft Entra ID. | `"true"` | +| `connectionString` | Y | The connection string for the PostgreSQL database.
This must contain the user, which corresponds to the name of the user created inside PostgreSQL that maps to the Microsoft Entra ID identity. This is often the name of the corresponding principal (for example, the name of the Microsoft Entra ID application). This connection string should not contain any password. | `"host=mydb.postgres.database.azure.com user=myapplication port=5432 database=my_db sslmode=require"` | +| `azureTenantId` | N | ID of the Microsoft Entra ID tenant | `"cd4b2887-304c-…"` | +| `azureClientId` | N | Client ID (application ID) | `"c7dd251f-811f-…"` | +| `azureClientSecret` | N | Client secret (application password) | `"Ecy3X…"` | + +### Other metadata options + +| Field | Required | Details | Example | +|--------------------|:--------:|---------|---------| +| `tablePrefix` | N | Prefix for the table where the data is stored. Can optionally have the schema name as prefix, such as `public.prefix_` | `"prefix_"`, `"public.prefix_"` | +| `metadataTableName` | N | Name of the table Dapr uses to store a few metadata properties. Defaults to `dapr_metadata`. Can optionally have the schema name as prefix, such as `public.dapr_metadata` | `"dapr_metadata"`, `"public.dapr_metadata"` | +| `timeout` | N | Timeout for operations on the database, as a [Go duration](https://pkg.go.dev/time#ParseDuration). Integers are interpreted as number of seconds. Defaults to `20s` | `"30s"`, `30` | +| `cleanupInterval` | N | Interval, as a Go duration or number of seconds, to clean up rows with an expired TTL. Default: `1h` (1 hour). Setting this to values <=0 disables the periodic cleanup. | `"30m"`, `1800`, `-1` | +| `maxConns` | N | Maximum number of connections pooled by this component. Set to 0 or lower to use the default value, which is the greater of 4 or the number of CPUs. | `"4"` | +| `connectionMaxIdleTime` | N | Max idle time before unused connections are automatically closed in the connection pool. By default, there's no value and this is left to the database driver to choose. | `"5m"` | +| `queryExecMode` | N | Controls the default mode for executing queries. By default Dapr uses the extended protocol and automatically prepares and caches prepared statements. However, this may be incompatible with proxies such as PGBouncer. In this case, it may be preferrable to use `exec` or `simple_protocol`. | `"simple_protocol"` | +| `actorStateStore` | N | Consider this state store for actors. Defaults to `"false"` | `"true"`, `"false"` | + +## Setup PostgreSQL + +{{< tabs "Self-Hosted" >}} + +{{% codetab %}} + +1. Run an instance of PostgreSQL. You can run a local instance of PostgreSQL in Docker with the following command: + + ```bash + docker run -p 5432:5432 -e POSTGRES_PASSWORD=example postgres + ``` + + > This example does not describe a production configuration because it sets the password in plain text and the user name is left as the PostgreSQL default of "postgres". + +2. Create a database for state data. + Either the default "postgres" database can be used, or create a new database for storing state data. + + To create a new database in PostgreSQL, run the following SQL command: + + ```sql + CREATE DATABASE my_dapr; + ``` + +{{% /codetab %}} + +{{% /tabs %}} + +## Advanced + +### Differences between v1 and v2 + +The PostgreSQL state store v2 was introduced in Dapr 1.13. The [pre-existing v1]({{< ref setup-postgresql-v1.md >}}) remains available and is not deprecated. + +In the v2 component, the table schema has been changed significantly, with the goal of increasing performance and reliability. Most notably, the value stored by Dapr is now of type _BYTEA_, which allows faster queries and, in some cases, is more space-efficient than the previously-used _JSONB_ column. +However, due to this change, the v2 component does not support the [Dapr state store query APIs]({{< ref howto-state-query-api.md >}}). + +Also, in the v2 component, ETags are now random UUIDs, which ensures better compatibility with other PostgreSQL-compatible databases, such as CockroachDB. + +Because of these changes, v1 and v2 components are not able to read or write data from the same table. At this stage, it's also impossible to migrate data between the two versions of the component. + +### Displaying the data in human-readable format + +The PostgreSQL v2 component stores the state's value in the `value` column, which is of type _BYTEA_. Most PostgreSQL tools, including pgAdmin, consider the value as binary and do not display it in human-readable form by default. + +If you want to inspect the value in the state store, and you know it's not binary (for example, JSON data), you can have the value displayed in human-readable form using a query like the following: + +```sql +-- Replace "state" with the name of the state table in your environment +SELECT *, convert_from(value, 'utf-8') FROM state; +``` + +### TTLs and cleanups + +This state store supports [Time-To-Live (TTL)]({{< ref state-store-ttl.md >}}) for records stored with Dapr. When storing data using Dapr, you can set the `ttlInSeconds` metadata property to indicate after how many seconds the data should be considered "expired". + +Because PostgreSQL doesn't have built-in support for TTLs, this is implemented in Dapr by adding a column in the state table indicating when the data is to be considered "expired". Records that are "expired" are not returned to the caller, even if they're still physically stored in the database. A background "garbage collector" periodically scans the state table for expired rows and deletes them. + +You can set the deletion interval of expired records with the `cleanupInterval` metadata property, which defaults to 3600 seconds (that is, 1 hour). + +- Longer intervals require less frequent scans for expired rows, but can require storing expired records for longer, potentially requiring more storage space. If you plan to store many records in your state table, with short TTLs, consider setting `cleanupInterval` to a smaller value; for example, `5m` (5 minutes). +- If you do not plan to use TTLs with Dapr and the PostgreSQL state store, you should consider setting `cleanupInterval` to a value <= 0 (for example, `0` or `-1`) to disable the periodic cleanup and reduce the load on the database. + +## Related links + +- [Basic schema for a Dapr component]({{< ref component-schema >}}) +- Read [this guide]({{< ref "howto-get-save-state.md#step-2-save-and-retrieve-a-single-state" >}}) for instructions on configuring state store components +- [State management building block]({{< ref state-management >}}) diff --git a/daprdocs/data/components/state_stores/generic.yaml b/daprdocs/data/components/state_stores/generic.yaml index e0b685b64..ee5ca782c 100644 --- a/daprdocs/data/components/state_stores/generic.yaml +++ b/daprdocs/data/components/state_stores/generic.yaml @@ -141,8 +141,8 @@ etag: true ttl: true query: false -- component: PostgreSQL - link: setup-postgresql +- component: PostgreSQL v1 + link: setup-postgresql-v1 state: Stable version: v1 since: "1.0" @@ -152,6 +152,17 @@ etag: true ttl: true query: true +- component: PostgreSQL v2 + link: setup-postgresql-v2 + state: Stable + version: v2 + since: "1.13" + features: + crud: true + transactions: true + etag: true + ttl: true + query: false - component: Redis link: setup-redis state: Stable From 2961bc43582b357ef1c160e7750a1ae38bb18865 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Tue, 6 Feb 2024 12:44:20 -0500 Subject: [PATCH 18/20] mark review Signed-off-by: Hannah Hunter --- .../service-invocation/service-invocation-overview.md | 6 +++--- .../operations/configuration/configuration-overview.md | 10 ++++++---- .../supported-name-resolution/_index.md | 4 ++-- .../reference/resource-specs/configuration-schema.md | 4 ++-- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md b/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md index 2f212e23d..a8c4ff039 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/service-invocation/service-invocation-overview.md @@ -90,13 +90,13 @@ The diagram below shows an example of how this works. If you have 1 instance of **Note**: App ID is unique per _application_, not application instance. Regardless how many instances of that application exist (due to scaling), all of them will share the same app ID. -### Pluggable service discovery +### Swappable service discovery -Dapr can run on a variety of [hosting platforms]({{< ref hosting >}}). To enable service discovery and service invocation, Dapr uses pluggable [name resolution components]({{< ref supported-name-resolution >}}). For example, the Kubernetes name resolution component uses the Kubernetes DNS service to resolve the location of other applications running in the cluster. +Dapr can run on a variety of [hosting platforms]({{< ref hosting >}}). To enable swappable service discovery with service invocation, Dapr uses [name resolution components]({{< ref supported-name-resolution >}}). For example, the Kubernetes name resolution component uses the Kubernetes DNS service to resolve the location of other applications running in the cluster. Self-hosted machines can use the mDNS name resolution component. As an alternative, you can use the SQLite name resolution component to run Dapr on single-node environments and for local development scenarios. Dapr sidecars that are part of the cluster store their information in a SQLite database on the local machine. -The Consul name resolution component can be used in any hosting environment, including Kubernetes or self-hosted. +The Consul name resolution component is particularly suited to multi-machine deployments and can be used in any hosting environment, including Kubernetes, multiple VMs, or self-hosted. ### Streaming for HTTP service invocation diff --git a/daprdocs/content/en/operations/configuration/configuration-overview.md b/daprdocs/content/en/operations/configuration/configuration-overview.md index e14067684..601f178ee 100644 --- a/daprdocs/content/en/operations/configuration/configuration-overview.md +++ b/daprdocs/content/en/operations/configuration/configuration-overview.md @@ -190,11 +190,11 @@ The following table lists the properties for HTTP handlers: See [Middleware pipelines]({{< ref "middleware.md" >}}) for more information -#### Name resolution +#### Name resolution component -You can set name resolution within the Configuration YAML. Set the `spec.nameResolution.component` property to `"sqlite"`, then pass configuration options in the `spec.nameResolution.configuration` dictionary. +You can set name resolution component to use within the configuration YAML. For example, to set the `spec.nameResolution.component` property to `"sqlite"`, pass configuration options in the `spec.nameResolution.configuration` dictionary as shown below. -This is the basic example of a Configuration resource: +This is the basic example of a configuration resource: ```yaml apiVersion: dapr.io/v1alpha1 @@ -209,7 +209,9 @@ spec: connectionString: "/home/user/.dapr/nr.db" ``` -See [the Name Resolution spec documentation]({{< ref supported-name-resolution >}}) and the [Configuration YAML documentation]({{< ref configuration-schema.md >}}) to learn more about how to configure name resolution per component. +For more information, see: +- [The name resolution component documentation]({{< ref supported-name-resolution >}}) for more examples. +- - [The Configuration YAML documentation]({{< ref configuration-schema.md >}}) to learn more about how to configure name resolution per component. #### Scope secret store access diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md index fc072eab7..7ebd05eb9 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md @@ -7,10 +7,10 @@ description: The supported name resolution providers to enable Dapr service invo no_list: true --- -Name resolution is configured via the [Dapr Configuration]({{< ref configuration-overview.md >}}). - The following components provide name resolution for the service invocation building block. +Name resolution components are configured via the [configuration]({{< ref configuration-overview.md >}}). + {{< partial "components/description.html" >}} {{< partial "components/name-resolution.html" >}} diff --git a/daprdocs/content/en/reference/resource-specs/configuration-schema.md b/daprdocs/content/en/reference/resource-specs/configuration-schema.md index b10115fbc..6587863dc 100644 --- a/daprdocs/content/en/reference/resource-specs/configuration-schema.md +++ b/daprdocs/content/en/reference/resource-specs/configuration-schema.md @@ -47,8 +47,8 @@ spec: - name: type: nameResolution: - component: "sqlite" - version: "v1" + component: + version: configuration: connectionString: "/home/user/.dapr/nr.db" secrets: From 87bfa451653b87706f83b0a53f4bd9615710add2 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Tue, 6 Feb 2024 13:36:27 -0500 Subject: [PATCH 19/20] forgot one Signed-off-by: Hannah Hunter --- .../content/en/reference/resource-specs/configuration-schema.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/resource-specs/configuration-schema.md b/daprdocs/content/en/reference/resource-specs/configuration-schema.md index 6587863dc..746d4b5c3 100644 --- a/daprdocs/content/en/reference/resource-specs/configuration-schema.md +++ b/daprdocs/content/en/reference/resource-specs/configuration-schema.md @@ -50,7 +50,7 @@ spec: component: version: configuration: - connectionString: "/home/user/.dapr/nr.db" + secrets: scopes: - storeName: From cc5095b19e29a0d79ee67687124cad56dc1f0327 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Tue, 6 Feb 2024 14:25:58 -0500 Subject: [PATCH 20/20] remove nats streaming Signed-off-by: Hannah Hunter --- .../supported-pubsub/setup-nats-streaming.md | 133 ------------------ daprdocs/data/components/pubsub/generic.yaml | 8 -- 2 files changed, 141 deletions(-) delete mode 100644 daprdocs/content/en/reference/components-reference/supported-pubsub/setup-nats-streaming.md diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-nats-streaming.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-nats-streaming.md deleted file mode 100644 index 6f997f2b3..000000000 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-nats-streaming.md +++ /dev/null @@ -1,133 +0,0 @@ ---- -type: docs -title: "NATS Streaming" -linkTitle: "NATS Streaming" -description: "Detailed documentation on the NATS Streaming pubsub component" -aliases: - - "/operations/components/setup-pubsub/supported-pubsub/setup-nats-streaming/" ---- - -## ⚠️ Deprecation notice - -{{% alert title="Warning" color="warning" %}} -This component is **deprecated** because the [NATS Streaming Server](https://nats-io.gitbook.io/legacy-nats-docs/nats-streaming-server-aka-stan/developing-with-stan) was deprecated in June 2023 and no longer receives updates. Users are encouraged to switch to using [JetStream]({{< ref setup-jetstream >}}) as an alternative. - -This component will be **removed in the Dapr v1.13 release**. -{{% /alert %}} - -## Component format - -To set up NATS Streaming pub/sub, create a component of type `pubsub.natsstreaming`. See the [pub/sub broker component file]({{< ref setup-pubsub.md >}}) to learn how ConsumerID is automatically generated. Read the [How-to: Publish and Subscribe guide]({{< ref "howto-publish-subscribe.md#step-1-setup-the-pubsub-component" >}}) on how to create and apply a pub/sub configuration. - -```yaml -apiVersion: dapr.io/v1alpha1 -kind: Component -metadata: - name: natsstreaming-pubsub -spec: - type: pubsub.natsstreaming - version: v1 - metadata: - - name: natsURL - value: "nats://localhost:4222" - - name: natsStreamingClusterID - value: "clusterId" - - name: concurrencyMode - value: parallel - - name: consumerID # Optional. If not supplied, runtime will create one. - value: "channel1" - # below are subscription configuration. - - name: subscriptionType - value: # Required. Allowed values: topic, queue. - - name: ackWaitTime - value: "" # Optional. - - name: maxInFlight - value: "" # Optional. - - name: durableSubscriptionName - value: "" # Optional. - # following subscription options - only one can be used - - name: deliverNew - value: - - name: startAtSequence - value: 1 - - name: startWithLastReceived - value: false - - name: deliverAll - value: false - - name: startAtTimeDelta - value: "" - - name: startAtTime - value: "" - - name: startAtTimeFormat - value: "" -``` - -{{% alert title="Warning" color="warning" %}} -The above example uses secrets as plain strings. It is recommended to [use a secret store for the secrets]({{< ref component-secrets.md >}}). -{{% /alert %}} - -## Spec metadata fields - -| Field | Required | Details | Example | -|--------------------|:--------:|---------|---------| -| natsURL | Y | NATS server address URL | "`nats://localhost:4222`"| -| natsStreamingClusterID | Y | NATS cluster ID |`"clusterId"`| -| subscriptionType | Y | Subscription type. Allowed values `"topic"`, `"queue"` | `"topic"` | -| consumerID | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | `"channel1"` -| ackWaitTime | N | See [here](https://docs.nats.io/developing-with-nats-streaming/acks#acknowledgements) | `"300ms"`| -| maxInFlight | N | See [here](https://docs.nats.io/developing-with-nats-streaming/acks#acknowledgements) | `"25"` | -| durableSubscriptionName | N | [Durable subscriptions](https://docs.nats.io/developing-with-nats-streaming/durables) identification name. | `"my-durable"`| -| deliverNew | N | Subscription Options. Only one can be used. Deliver new messages only | `"true"`, `"false"` | -| startAtSequence | N | Subscription Options. Only one can be used. Sets the desired start sequence position and state | `"100000"`, `"230420"` | -| startWithLastReceived | N | Subscription Options. Only one can be used. Sets the start position to last received. | `"true"`, `"false"` | -| deliverAll | N | Subscription Options. Only one can be used. Deliver all available messages | `"true"`, `"false"` | -| startAtTimeDelta | N | Subscription Options. Only one can be used. Sets the desired start time position and state using the delta | `"10m"`, `"23s"` | -| startAtTime | N | Subscription Options. Only one can be used. Sets the desired start time position and state | `"Feb 3, 2013 at 7:54pm (PST)"` | -| startAtTimeFormat | N | Must be used with `startAtTime`. Sets the format for the time | `"Jan 2, 2006 at 3:04pm (MST)"` | -| concurrencyMode | N | Call the subscriber sequentially (“single” message at a time), or concurrently (in “parallel”). Default: `"parallel"` | `"single"`, `"parallel"` - -## Create a NATS server - -{{< tabs "Self-Hosted" "Kubernetes">}} - -{{% codetab %}} -Run a NATS server locally using Docker: - -```bash -docker run -d --name nats-streaming -p 4222:4222 -p 8222:8222 nats-streaming -``` - -Interact with the server using the client port: `localhost:4222`. -{{% /codetab %}} - -{{% codetab %}} -Install NATS on Kubernetes by using the [kubectl](https://docs.nats.io/running-a-nats-service/introduction/running/nats-kubernetes/): - -```bash -# Single server NATS - -kubectl apply -f https://raw.githubusercontent.com/nats-io/k8s/master/nats-server/single-server-nats.yml - -kubectl apply -f https://raw.githubusercontent.com/nats-io/k8s/master/nats-streaming-server/single-server-stan.yml -``` - -This installs a single NATS-Streaming and NATS into the `default` namespace. To interact with NATS, find the service with: - -```bash -kubectl get svc stan -``` - -For example, if installing using the example above, the NATS Streaming address would be: - -`:4222` - -{{% /codetab %}} - -{{< /tabs >}} - -## Related links - -- [Basic schema for a Dapr component]({{< ref component-schema >}}). -- Read [this guide]({{< ref "howto-publish-subscribe.md#step-2-publish-a-topic" >}}) for instructions on configuring pub/sub components. -- [Pub/Sub building block]({{< ref pubsub >}}). -- [NATS Streaming Deprecation Notice](https://github.com/nats-io/nats-streaming-server/#warning--deprecation-notice-warning). diff --git a/daprdocs/data/components/pubsub/generic.yaml b/daprdocs/data/components/pubsub/generic.yaml index 99fa5cd20..b953aeab5 100644 --- a/daprdocs/data/components/pubsub/generic.yaml +++ b/daprdocs/data/components/pubsub/generic.yaml @@ -46,14 +46,6 @@ features: bulkPublish: false bulkSubscribe: false -- component: NATS Streaming - link: setup-nats-streaming - state: Deprecated - version: v1 - since: "1.11" - features: - bulkPublish: false - bulkSubscribe: false - component: RabbitMQ link: setup-rabbitmq state: Stable