mirror of https://github.com/dapr/quickstarts.git
Merge pull request #1191 from marcduiker/add-workflow-tutorials-python
Workflow tutorials for Python
This commit is contained in:
commit
a7ee51c88c
|
@ -13,7 +13,7 @@ For more information on workflow management, see the [Dapr docs](https://docs.da
|
|||
|
||||
## Inspect the code
|
||||
|
||||
Open the `Program.cs` file in the `tutorials/workflow/csharp/child-workflows/WorkflowManagement` folder. This file contains the endpoint definitions that use the workflow management API. The workflow that is being managed is named `NeverEndingWorkflow` and is a counter that will keep running once it's started.
|
||||
Open the `Program.cs` file in the `tutorials/workflow/csharp/workflow-management/WorkflowManagement` folder. This file contains the endpoint definitions that use the workflow management API. The workflow that is being managed is named `NeverEndingWorkflow` and is a counter that will keep running once it's started.
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
# Using the Dapr Workflow API with Python
|
||||
|
||||
This folder contains tutorials of using the Dapr Workflow API with Python. All examples can be run locally on your machine.
|
||||
|
||||
Before you start, it's recommended to read though the Dapr docs to get familiar with the many [Workflow features, concepts, and patterns](https://docs.dapr.io/developing-applications/building-blocks/workflow/).
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Docker Desktop](https://www.docker.com/products/docker-desktop/)
|
||||
- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) & [Initialization](https://docs.dapr.io/getting-started/install-dapr-selfhost/)
|
||||
- [Python 3](https://www.python.org/downloads/)
|
||||
- Optional: An IDE such as [VSCode](https://code.visualstudio.com/download) with a [REST client](https://marketplace.visualstudio.com/items?itemName=humao.rest-client).
|
||||
|
||||
## Tutorials
|
||||
|
||||
- [Workflow Basics](./fundamentals/README.md)
|
||||
- [Task Chaining](./task-chaining/README.md)
|
||||
- [Fan-out/Fan-in](./fan-out-fan-in/README.md)
|
||||
- [Monitor](./monitor-pattern/README.md)
|
||||
- [External Events](./external-system-interaction/README.md)
|
||||
- [Child Workflows](./child-workflows/README.md)
|
||||
- [Resiliency & Compensation](./resiliency-and-compensation/README.md)
|
||||
- [Combined Patterns](./combined-patterns/README.md)
|
||||
- [WorkflowManagement](./workflow-management/README.md)
|
||||
- [Challenges & Tips](./challenges-tips/README.md)
|
|
@ -0,0 +1,10 @@
|
|||
# Workflow Challenges & Tips
|
||||
|
||||
Workflow systems are very powerful tools but also have their challenges & limitations as described in the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-features-concepts/#limitations).
|
||||
|
||||
This section provides some tips with code snippets to understand the limitations and get the most out of the Dapr Workflow API. Read through the following examples to learn best practices to develop Dapr workflows.
|
||||
|
||||
- [Deterministic workflows](deterministic_workflow.py)
|
||||
- [Idempotent activities](idempotent_activity.py)
|
||||
- [Versioning workflows](versioning_workflow.py)
|
||||
- [Workflow & activity payload size](payload_size_workflow.py)
|
|
@ -0,0 +1,36 @@
|
|||
import dapr.ext.workflow as wf
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='non_deterministic_workflow')
|
||||
def non_deterministic_workflow(ctx: wf.DaprWorkflowContext, wf_input: str):
|
||||
|
||||
"""
|
||||
Do not use non-deterministic operations in a workflow!
|
||||
These operations will create a new value every time the
|
||||
workflow is replayed.
|
||||
"""
|
||||
order_id = str(uuid.uuid4())
|
||||
order_date = datetime.now()
|
||||
yield ctx.call_activity(submit_id, input=order_id)
|
||||
yield ctx.call_activity(submit_date, input=order_date)
|
||||
|
||||
return order_id
|
||||
|
||||
|
||||
@wf_runtime.workflow(name='deterministic_workflow')
|
||||
def deterministic_workflow(ctx: wf.DaprWorkflowContext, wf_input: str):
|
||||
|
||||
"""
|
||||
Either wrap non-deterministic operations in an activity. Or use deterministic
|
||||
alternatives on the DaprWorkflowContext instead. These operations create the
|
||||
same value when the workflow is replayed.
|
||||
"""
|
||||
order_id = yield ctx.call_activity(create_order_id, input=wf_input)
|
||||
order_date = ctx.current_utc_datetime
|
||||
yield ctx.call_activity(submit_id, input=order_id)
|
||||
yield ctx.call_activity(submit_date, input=order_date)
|
||||
|
||||
return order_id
|
|
@ -0,0 +1,14 @@
|
|||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.activity(name='idempotent_activity')
|
||||
def idempotent_activity(ctx: wf.WorkflowActivityContext, order_item: Order) -> bool:
|
||||
"""
|
||||
Beware of non-idempotent operations in an activity.
|
||||
Dapr Workflow guarantees at-least-once execution of activities, so activities might be executed more than once
|
||||
in case an activity is not ran to completion successfully.
|
||||
For instance, can you insert a record to a database twice without side effects?
|
||||
var insertSql = $"INSERT INTO Orders (Id, Description, UnitPrice, Quantity) VALUES ('{order_item.id}', '{order_item.description}', {order_item.unit_price}, {order_item.quantity})";
|
||||
It's best to perform a check if an record already exists before inserting it.
|
||||
"""
|
|
@ -0,0 +1,29 @@
|
|||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='large_payload_size_workflow')
|
||||
def large_payload_size_workflow(ctx: wf.DaprWorkflowContext, doc_id: str):
|
||||
"""
|
||||
Do not pass large payloads between activities.
|
||||
They are stored in the Dapr state store twice, one as output argument
|
||||
for GetDocument, and once as input argument for UpdateDocument.
|
||||
"""
|
||||
document = yield ctx.call_activity(get_document, input=doc_id)
|
||||
updated_document = yield ctx.call_activity(update_document, input=document)
|
||||
|
||||
# More activities to process the updated document
|
||||
|
||||
return updated_document
|
||||
|
||||
@wf_runtime.workflow(name='small_payload_size_workflow')
|
||||
def small_payload_size_workflow(ctx: wf.DaprWorkflowContext, doc_id: str):
|
||||
"""
|
||||
Do pass small payloads between activities, preferably IDs only, or objects that are quick to (de)serialize in large volumes.
|
||||
Combine multiple actions, such as document retrieval and update, into a single activity, or use the Dapr State Store API to store more data.
|
||||
"""
|
||||
updated_doc_id = yield ctx.call_activity(get_and_update_document, input=doc_id)
|
||||
|
||||
# More activities to process the updated document
|
||||
|
||||
return updated_doc_id
|
|
@ -0,0 +1,64 @@
|
|||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
"""
|
||||
This is the initial version of the workflow.
|
||||
Note that the input argument for both activities is the orderItem (string).
|
||||
"""
|
||||
@wf_runtime.workflow(name='versioning_workflow_1')
|
||||
def versioning_workflow_1(ctx: wf.DaprWorkflowContext, order_item: str):
|
||||
result_a = yield ctx.call_activity(activity_a, input=order_item)
|
||||
result_b = yield ctx.call_activity(activity_b, input=order_item)
|
||||
|
||||
return result_a + result_b
|
||||
|
||||
@wf_runtime.activity(name='activity_a')
|
||||
def activity_a(ctx: wf.WorkflowActivityContext, order_item: str) -> int:
|
||||
"""
|
||||
This activity processes the order item and returns an integer result.
|
||||
"""
|
||||
print(f'activity_a: Received input: {order_item}.', flush=True)
|
||||
return 10
|
||||
|
||||
@wf_runtime.activity(name='activity_b')
|
||||
def activity_b(ctx: wf.WorkflowActivityContext, order_item: str) -> int:
|
||||
"""
|
||||
This activity processes the order item and returns another integer result.
|
||||
"""
|
||||
print(f'activity_b: Received input: {order_item}.', flush=True)
|
||||
return 20
|
||||
|
||||
"""
|
||||
This is the updated version of the workflow.
|
||||
The input for activity_b has changed from order_item (string) to result_a (int).
|
||||
If there are in-flight workflow instances that were started with the previous version
|
||||
of this workflow, these will fail when the new version of the workflow is deployed
|
||||
and the workflow name remains the same, since the runtime parameters do not match with the persisted state.
|
||||
It is recommended to version workflows by creating a new workflow class with a new name:
|
||||
{workflowname}_1 -> {workflowname}_2
|
||||
Try to avoid making breaking changes in perpetual workflows (that use the `continue_as_new` method)
|
||||
since these are difficult to replace with a new version.
|
||||
"""
|
||||
@wf_runtime.workflow(name='versioning_workflow_2')
|
||||
def versioning_workflow_2(ctx: wf.DaprWorkflowContext, order_item: str):
|
||||
result_a = yield ctx.call_activity(activity_a, input=order_item)
|
||||
result_b = yield ctx.call_activity(activity_b, input=result_a)
|
||||
|
||||
return result_a + result_b
|
||||
|
||||
@wf_runtime.activity(name='activity_a')
|
||||
def activity_a(ctx: wf.WorkflowActivityContext, order_item: str) -> int:
|
||||
"""
|
||||
This activity processes the order item and returns an integer result.
|
||||
"""
|
||||
print(f'activity_a: Received input: {order_item}.', flush=True)
|
||||
return 10
|
||||
|
||||
@wf_runtime.activity(name='activity_b')
|
||||
def activity_b(ctx: wf.WorkflowActivityContext, number: int) -> int:
|
||||
"""
|
||||
This activity processes a number and returns another integer result.
|
||||
"""
|
||||
print(f'activity_b: Received input: {number}.', flush=True)
|
||||
return number + 10
|
|
@ -0,0 +1,112 @@
|
|||
# Child Workflows
|
||||
|
||||
This tutorial demonstrates how a workflow can call child workflows that are part of the same application. Child workflows can be used to break up large workflows into smaller, reusable parts. For more information about child workflows see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-features-concepts/#child-workflows).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `parent_child_workflow.py` file in the `tutorials/workflow/python/child-workflows/child_workflows` folder. This file contains the definition for the workflows and activities.
|
||||
|
||||
The parent workflow iterates over the input array and schedules an instance of the `child_workflow` for each of the input elements. The `child_workflow` is a basic task-chaining workflow that contains a sequence of two activities. When all of the instances of the `child_workflow` complete, then the `parent_workflow` finishes.
|
||||
|
||||
### Parent workflow
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
subgraph for each word in the input
|
||||
GWL[Call child workflow]
|
||||
end
|
||||
ALL[Wait until all tasks
|
||||
are completed]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> GWL
|
||||
GWL --> ALL
|
||||
ALL --> EW
|
||||
```
|
||||
|
||||
### Child workflow
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
A1[activity1]
|
||||
A2[activity2]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> A1
|
||||
A1 --> A2
|
||||
A2 --> EW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/child-workflows/child_workflows` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate one level back to the `child-workflows` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "childworkflows"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`childworkflows.http`](./childworkflows.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST \
|
||||
--url http://localhost:5259/start \
|
||||
--header 'content-type: application/json' \
|
||||
--data '["Item 1","Item 2"]'
|
||||
```
|
||||
|
||||
The input of the workflow is an array with two strings:
|
||||
|
||||
```json
|
||||
[
|
||||
"Item 1",
|
||||
"Item 2"
|
||||
]
|
||||
```
|
||||
|
||||
The app logs should show both the items in the input values array being processed by each activity in the child workflow as follows:
|
||||
|
||||
```text
|
||||
== APP - childworkflows == activity1: Received input: Item 1.
|
||||
== APP - childworkflows == activity2: Received input: Item 1 is processed.
|
||||
== APP - childworkflows == activity1: Received input: Item 2.
|
||||
== APP - childworkflows == activity2: Received input: Item 2 is processed.
|
||||
```
|
||||
|
||||
5. Use the GET request in the [`childworkflows.http`](./childworkflows.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3559/v1.0/workflows/dapr/<INSTANCEID>
|
||||
```
|
||||
|
||||
Where `<INSTANCEID>` is the workflow instance ID you received in the `instance_id` property in the previous step.
|
||||
|
||||
The expected serialized output of the workflow is an array with two strings:
|
||||
|
||||
```txt
|
||||
"[\"Item 1 is processed as a child workflow.\",\"Item 2 is processed as a child workflow.\"]"
|
||||
```
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,26 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import List
|
||||
from parent_child_workflow import wf_runtime, parent_workflow
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(items: List[str]):
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=parent_workflow,
|
||||
input=items
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5259)
|
|
@ -0,0 +1,30 @@
|
|||
from typing import List
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='parent_workflow')
|
||||
def parent_workflow(ctx: wf.DaprWorkflowContext, items: List[str]):
|
||||
|
||||
child_wf_tasks = [
|
||||
ctx.call_child_workflow(child_workflow, input=item) for item in items
|
||||
]
|
||||
wf_result = yield wf.when_all(child_wf_tasks)
|
||||
|
||||
return wf_result
|
||||
|
||||
@wf_runtime.workflow(name='child_workflow')
|
||||
def child_workflow(ctx: wf.DaprWorkflowContext, wf_input: str):
|
||||
result1 = yield ctx.call_activity(activity1, input=wf_input)
|
||||
wf_result = yield ctx.call_activity(activity2, input=result1)
|
||||
return wf_result
|
||||
|
||||
@wf_runtime.activity(name='activity1')
|
||||
def activity1(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity1: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} is processed"
|
||||
|
||||
@wf_runtime.activity(name='activity2')
|
||||
def activity2(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity2: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} as a child workflow."
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,16 @@
|
|||
@apphost=http://localhost:5259
|
||||
|
||||
### Start the parent_workflow
|
||||
# @name startWorkflowRequest
|
||||
POST {{ apphost }}/start
|
||||
Content-Type: application/json
|
||||
|
||||
[
|
||||
"Item 1",
|
||||
"Item 2"
|
||||
]
|
||||
|
||||
### Get the workflow status
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3559
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: childworkflows
|
||||
appDirPath: child_workflows
|
||||
appPort: 5259
|
||||
daprHTTPPort: 3559
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,150 @@
|
|||
# Combined Workflow Patterns
|
||||
|
||||
This tutorial demonstrates how several workflow patterns can be combined in a single, more realistic, workflow. Some of the workflow activities are using other Dapr APIs, such as state management, service invocation, and Pub/Sub.
|
||||
|
||||
## Inspect the code
|
||||
|
||||
The demo consist of two applications:
|
||||
|
||||
- `workflow_app` is the main application that orchestrates an order process in the `order_workflow`.
|
||||
- `shipping_app` is a supporting service that is being called by the `order_workflow`.
|
||||
|
||||
The `order_workflow` combines task chaining, fan-out/fan-in, and waiting for external event patterns. The workflow contains a number of activities for order processing including checking inventory, register shipment, process payment and more with a final order status being returned with the results of the order. It uses compensating logic in case the shipment fails to get registered and the customer needs to be reimbursed for the payment.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
EW((End
|
||||
Workflow))
|
||||
subgraph order_workflow
|
||||
direction LR
|
||||
CHKI[Check inventory]
|
||||
CHKD[Check shipping
|
||||
destination]
|
||||
IF1{Success?}
|
||||
PAY[Process
|
||||
payment]
|
||||
UPD[Update
|
||||
inventory]
|
||||
REG[Register
|
||||
shipment]
|
||||
WAIT[Wait for
|
||||
confirmation]
|
||||
IF2{Success?}
|
||||
RI[Reimburse
|
||||
customer]
|
||||
end
|
||||
subgraph Shipping
|
||||
direction LR
|
||||
REG2[register_shipment]
|
||||
CHKD2[check_destination]
|
||||
end
|
||||
SW --> CHKI
|
||||
SW --> CHKD <--> CHKD2
|
||||
CHKI --> IF1
|
||||
CHKD --> IF1
|
||||
IF1 --> PAY
|
||||
PAY --> UPD
|
||||
UPD --> REG -.->|pub/sub| REG2
|
||||
REG2 -.->|pub/sub| WAIT
|
||||
REG --> WAIT
|
||||
WAIT --> IF2
|
||||
IF2 -->|Yes| EW
|
||||
IF2 -->|No| RI
|
||||
RI --> EW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/combined-patterns` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
cd workflow_app
|
||||
pip3 install -r requirements.txt
|
||||
cd ..
|
||||
cd shipping_app
|
||||
pip3 install -r requirements.txt
|
||||
cd ..
|
||||
```
|
||||
|
||||
3. Use the Dapr CLI to run the Dapr Multi-App run file. This starts both applications `order-workflow` and `shipping` with the Dapr components in the [resources](./resources) folder.
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "order-workflow"'
|
||||
- 'Started Dapr with app id "shipping"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`order-workflow.http`](./order-workflow.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST \
|
||||
--url http://localhost:5260/start \
|
||||
--header 'content-type: application/json' \
|
||||
--data '{"id": "b0d38481-5547-411e-ae7b-255761cce17a","order_item" : {"product_id": "RBD001","product_name": "Rubber Duck","quantity": 10,"total_price": 15.00},"customer_info" : {"id" : "Customer1","country" : "The Netherlands"}}'
|
||||
```
|
||||
|
||||
The input for the workflow is an `Order` object:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "b0d38481-5547-411e-ae7b-255761cce17a",
|
||||
"order_item" : {
|
||||
"product_id": "RBD001",
|
||||
"product_name": "Rubber Duck",
|
||||
"quantity": 10,
|
||||
"total_price": 15.00
|
||||
},
|
||||
"customer_info" : {
|
||||
"id" : "Customer1",
|
||||
"country" : "The Netherlands"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The app logs should come from both services executing all activities as follows:
|
||||
|
||||
```text
|
||||
== APP - order-workflow == start: Received input: id='b0d38481-5547-411e-ae7b-255761cce17a' order_item=OrderItem(product_id='RBD001', product_name='Rubber Duck', quantity=10, total_price=15.0) customer_info=CustomerInfo(id='Customer1', country='The Netherlands')
|
||||
== APP - order-workflow == order_workflow: Received order id: b0d38481-5547-411e-ae7b-255761cce17a.
|
||||
== APP - order-workflow == check_shipping_destination: Received input: id='Customer1' country='The Netherlands'.
|
||||
== APP - order-workflow == check_inventory: Received input: product_id='RBD001' product_name='Rubber Duck' quantity=10 total_price=15.0.
|
||||
== APP - order-workflow == get_inventory_item: product_id='RBD001' product_name='Rubber Duck' quantity=50
|
||||
== APP - shipping == checkDestination: Received input: id='Customer1' country='The Netherlands'.
|
||||
== APP - order-workflow == process_payment: Received input: id='b0d38481-5547-411e-ae7b-255761cce17a' order_item=OrderItem(product_id='RBD001', product_name='Rubber Duck', quantity=10, total_price=15.0) customer_info=CustomerInfo(id='Customer1', country='The Netherlands').
|
||||
== APP - order-workflow == order_workflow: Payment result: is_success=True.
|
||||
== APP - order-workflow == update_inventory: Received input: product_id='RBD001' product_name='Rubber Duck' quantity=10 total_price=15.0.
|
||||
== APP - order-workflow == get_inventory_item: product_id='RBD001' product_name='Rubber Duck' quantity=50
|
||||
== APP - order-workflow == register_shipment: Received input: id='b0d38481-5547-411e-ae7b-255761cce17a' order_item=OrderItem(product_id='RBD001', product_name='Rubber Duck', quantity=10, total_price=15.0) customer_info=CustomerInfo(id='Customer1', country='The Netherlands').
|
||||
== APP - shipping == registerShipment: Received input: id='b0d38481-5547-411e-ae7b-255761cce17a' order_item=OrderItem(product_id='RBD001', product_name='Rubber Duck', quantity=10, total_price=15.0) customer_info=CustomerInfo(id='Customer1', country='The Netherlands').
|
||||
== APP - order-workflow == shipmentRegistered: Received input: order_id='b0d38481-5547-411e-ae7b-255761cce17a' is_success=True message=None.
|
||||
```
|
||||
|
||||
5. Use the GET request in the [`order-workflow.http`](./order-workflow.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3560/v1.0/workflows/dapr/b0d38481-5547-411e-ae7b-255761cce17a
|
||||
```
|
||||
|
||||
The expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
{\"is_success\":true,\"message\":\"Order b0d38481-5547-411e-ae7b-255761cce17a processed successfully.\"}"
|
||||
```
|
||||
|
||||
*If the order-workflow.http is used, the order_id is generated when making the request and is different each time.*
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,18 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: resources
|
||||
apps:
|
||||
- appID: order-workflow
|
||||
appDirPath: workflow_app
|
||||
appPort: 5260
|
||||
daprHTTPPort: 3560
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: fileAndConsole
|
||||
daprdLogDestination: fileAndConsole
|
||||
- appID: shipping
|
||||
appDirPath: shipping_app
|
||||
appPort: 5261
|
||||
daprHTTPPort: 3561
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,26 @@
|
|||
@apphost=http://localhost:5260
|
||||
|
||||
### Start the order_workflow
|
||||
# @name startWorkflowRequest
|
||||
@orderId={{$guid}}
|
||||
POST {{ apphost }}/start
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"id": "{{orderId}}",
|
||||
"order_item" : {
|
||||
"product_id": "RBD001",
|
||||
"product_name": "Rubber Duck",
|
||||
"quantity": 10,
|
||||
"total_price": 15.00
|
||||
},
|
||||
"customer_info" : {
|
||||
"id" : "Customer1",
|
||||
"country" : "The Netherlands"
|
||||
}
|
||||
}
|
||||
|
||||
### Get the workflow status
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3560
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: shippingpubsub
|
||||
spec:
|
||||
type: pubsub.redis
|
||||
version: v1
|
||||
metadata:
|
||||
- name: redisHost
|
||||
value: localhost:6379
|
||||
- name: redisPassword
|
||||
value: ""
|
|
@ -0,0 +1,8 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: inventory
|
||||
spec:
|
||||
type: state.in-memory
|
||||
version: v1
|
||||
metadata:
|
|
@ -0,0 +1,15 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: statestore
|
||||
spec:
|
||||
type: state.redis
|
||||
version: v1
|
||||
initTimeout: 1m
|
||||
metadata:
|
||||
- name: redisHost
|
||||
value: localhost:6379
|
||||
- name: redisPassword
|
||||
value: ""
|
||||
- name: actorStateStore
|
||||
value: "true"
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: shipment-registration
|
||||
spec:
|
||||
topic: shipment-registration-events
|
||||
routes:
|
||||
default: /registerShipment
|
||||
pubsubname: shippingpubsub
|
||||
scopes:
|
||||
- shipping
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: shipment-confirmed
|
||||
spec:
|
||||
topic: shipment-registration-confirmed-events
|
||||
routes:
|
||||
default: /shipmentRegistered
|
||||
pubsubname: shippingpubsub
|
||||
scopes:
|
||||
- order-workflow
|
|
@ -0,0 +1,35 @@
|
|||
from fastapi import FastAPI, status
|
||||
from dapr.clients import DaprClient
|
||||
from models import Order, CustomerInfo, ShipmentRegistrationStatus, ShippingDestinationResult
|
||||
from fastapi_cloudevents import CloudEvent
|
||||
import uvicorn
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
DAPR_PUBSUB_COMPONENT = "shippingpubsub"
|
||||
DAPR_PUBSUB_CONFIRMED_TOPIC = "shipment-registration-confirmed-events"
|
||||
|
||||
@app.post("/checkDestination", status_code=status.HTTP_200_OK)
|
||||
async def check_destination(customer_info: CustomerInfo):
|
||||
customer_info = CustomerInfo.model_validate(customer_info)
|
||||
print(f"checkDestination: Received input: {customer_info}.", flush=True)
|
||||
return ShippingDestinationResult(is_success=True)
|
||||
|
||||
@app.post("/registerShipment", status_code=status.HTTP_201_CREATED)
|
||||
async def register_shipment(cloud_event: CloudEvent) -> None:
|
||||
order = Order.model_validate(cloud_event.data)
|
||||
print(f"registerShipment: Received input: {order}.", flush=True)
|
||||
|
||||
status = ShipmentRegistrationStatus(order_id=order.id, is_success=True)
|
||||
|
||||
with DaprClient() as dapr_client:
|
||||
dapr_client.publish_event(
|
||||
pubsub_name=DAPR_PUBSUB_COMPONENT,
|
||||
topic_name=DAPR_PUBSUB_CONFIRMED_TOPIC,
|
||||
data=status.model_dump_json(),
|
||||
data_content_type='application/json'
|
||||
)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5261)
|
|
@ -0,0 +1,26 @@
|
|||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
class ShipmentRegistrationStatus(BaseModel):
|
||||
order_id: str
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
class ShippingDestinationResult(BaseModel):
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
class CustomerInfo(BaseModel):
|
||||
id: str
|
||||
country: str
|
||||
|
||||
class OrderItem(BaseModel):
|
||||
product_id: str
|
||||
product_name: str
|
||||
quantity: int
|
||||
total_price: float
|
||||
|
||||
class Order(BaseModel):
|
||||
id: str
|
||||
order_item: OrderItem
|
||||
customer_info: CustomerInfo
|
|
@ -0,0 +1,6 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
fastapi-cloudevents>=2.0.2
|
||||
pydantic>=2.11.4
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,54 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from order_workflow import wf_runtime, order_workflow, SHIPMENT_REGISTERED_EVENT
|
||||
from models import Order, ShipmentRegistrationStatus
|
||||
from fastapi_cloudevents import CloudEvent
|
||||
import inventory_management as im
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(order: Order) -> None:
|
||||
"""
|
||||
create_default_inventory is used to ensure to have enough inventory for the order.
|
||||
"""
|
||||
im.create_default_inventory();
|
||||
|
||||
print(f"start: Received input: {order}.", flush=True)
|
||||
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=order_workflow,
|
||||
input=order.model_dump(),
|
||||
instance_id=order.id
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
"""
|
||||
This endpoint handles messages that are published to the shipment-registration-confirmed-events topic.
|
||||
It uses the workflow management API to raise an event to the workflow instance to indicate that the
|
||||
shipment has been registered by the ShippingApp.
|
||||
"""
|
||||
@app.post("/shipmentRegistered", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def shipment_registered(cloud_event: CloudEvent) -> None:
|
||||
status = ShipmentRegistrationStatus.model_validate(cloud_event.data)
|
||||
print(f"shipmentRegistered: Received input: {status}.", flush=True)
|
||||
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
wf_client.raise_workflow_event(
|
||||
instance_id=status.order_id,
|
||||
event_name=SHIPMENT_REGISTERED_EVENT,
|
||||
data=status.model_dump()
|
||||
)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5260, log_level="debug")
|
|
@ -0,0 +1,62 @@
|
|||
from dapr.clients import DaprClient
|
||||
from models import ProductInventory, ProductInventoryItem, OrderItem, UpdateInventoryResult
|
||||
import pickle
|
||||
|
||||
DAPR_INVENTORY_COMPONENT = "inventory"
|
||||
|
||||
def create_default_inventory() -> None:
|
||||
product_inventory_item = ProductInventoryItem(
|
||||
product_id="RBD001",
|
||||
product_name="Rubber Duck",
|
||||
quantity=50
|
||||
)
|
||||
|
||||
with DaprClient() as dapr_client:
|
||||
dapr_client.save_state(
|
||||
store_name=DAPR_INVENTORY_COMPONENT,
|
||||
key=product_inventory_item.product_id,
|
||||
value=pickle.dumps(product_inventory_item)
|
||||
)
|
||||
|
||||
def get_inventory_item(product_id: str) -> ProductInventoryItem | None:
|
||||
with DaprClient() as dapr_client:
|
||||
state_response = dapr_client.get_state(
|
||||
store_name=DAPR_INVENTORY_COMPONENT,
|
||||
key=product_id
|
||||
)
|
||||
|
||||
if not state_response.data:
|
||||
print(f'get_inventory_item: no state response', flush=True)
|
||||
return None
|
||||
product_inventory_item = pickle.loads(state_response.data)
|
||||
print(f'get_inventory_item: {product_inventory_item}', flush=True)
|
||||
return product_inventory_item
|
||||
|
||||
def check_inventory(order_item: OrderItem) -> bool:
|
||||
product_inventory_item = get_inventory_item(order_item.product_id)
|
||||
if product_inventory_item is None:
|
||||
return False
|
||||
else:
|
||||
is_available = product_inventory_item.quantity >= order_item.quantity
|
||||
return is_available
|
||||
|
||||
def update_inventory(order_item: OrderItem) -> UpdateInventoryResult:
|
||||
product_inventory_item = get_inventory_item(order_item.product_id)
|
||||
|
||||
if product_inventory_item is None:
|
||||
return UpdateInventoryResult(is_success=False, message=f"Product not in inventory: {order_item.ProductName}")
|
||||
|
||||
if product_inventory_item.quantity < order_item.quantity:
|
||||
return UpdateInventoryResult(is_success=False, message=f"Inventory not sufficient for: {order_item.ProductName}")
|
||||
|
||||
updated_inventory = ProductInventory(
|
||||
product_id=product_inventory_item.product_id,
|
||||
quantity=product_inventory_item.quantity - order_item.quantity
|
||||
)
|
||||
with DaprClient() as dapr_client:
|
||||
dapr_client.save_state(
|
||||
store_name=DAPR_INVENTORY_COMPONENT,
|
||||
key=updated_inventory.product_id,
|
||||
value=pickle.dumps(updated_inventory)
|
||||
)
|
||||
return UpdateInventoryResult(is_success=True, message=f"Inventory updated for {order_item.product_name}")
|
|
@ -0,0 +1,72 @@
|
|||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
class ProductInventory(BaseModel):
|
||||
product_id: str
|
||||
quantity: int
|
||||
|
||||
class ProductInventoryItem(BaseModel):
|
||||
product_id: str
|
||||
product_name: str
|
||||
quantity: int
|
||||
|
||||
class ShippingDestinationResult(BaseModel):
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dict):
|
||||
return ShippingDestinationResult(**dict)
|
||||
|
||||
class ShipmentRegistrationStatus(BaseModel):
|
||||
order_id: str
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dict):
|
||||
return ShipmentRegistrationStatus(**dict)
|
||||
|
||||
class CustomerInfo(BaseModel):
|
||||
id: str
|
||||
country: str
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dict):
|
||||
return CustomerInfo(**dict)
|
||||
|
||||
class OrderItem(BaseModel):
|
||||
product_id: str
|
||||
product_name: str
|
||||
quantity: int
|
||||
total_price: float
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dict):
|
||||
return OrderItem(**dict)
|
||||
|
||||
class Order(BaseModel):
|
||||
id: str
|
||||
order_item: OrderItem
|
||||
customer_info: CustomerInfo
|
||||
|
||||
class OrderStatus(BaseModel):
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
class ActivityResult(BaseModel):
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
class PaymentResult(BaseModel):
|
||||
is_success: bool
|
||||
|
||||
class RegisterShipmentResult(BaseModel):
|
||||
is_success: bool
|
||||
|
||||
class ReimburseCustomerResult(BaseModel):
|
||||
is_success: bool
|
||||
|
||||
class UpdateInventoryResult(BaseModel):
|
||||
is_success: bool
|
||||
message: Optional[str] = None
|
|
@ -0,0 +1,114 @@
|
|||
from dapr.clients import DaprClient
|
||||
from datetime import timedelta
|
||||
from models import Order, OrderItem, CustomerInfo, OrderStatus, ActivityResult, ShippingDestinationResult, ShipmentRegistrationStatus, PaymentResult, RegisterShipmentResult, ReimburseCustomerResult
|
||||
import dapr.ext.workflow as wf
|
||||
import inventory_management as im
|
||||
|
||||
SHIPMENT_REGISTERED_EVENT = "shipment-registered-events"
|
||||
DAPR_PUBSUB_COMPONENT = "shippingpubsub"
|
||||
DAPR_PUBSUB_REGISTRATION_TOPIC = "shipment-registration-events";
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='order_workflow')
|
||||
def order_workflow(ctx: wf.DaprWorkflowContext, order: Order):
|
||||
order = Order.model_validate(order);
|
||||
if not ctx.is_replaying:
|
||||
print(f'order_workflow: Received order id: {order.id}.', flush=True)
|
||||
|
||||
# First, two independent activities are called in parallel (fan-out/fan-in pattern):
|
||||
tasks = [
|
||||
ctx.call_activity(check_inventory, input=order.order_item.model_dump()),
|
||||
ctx.call_activity(check_shipping_destination, input=order.customer_info.model_dump())
|
||||
]
|
||||
task_results = yield wf.when_all(tasks);
|
||||
task_results = [ActivityResult.model_validate(task_result) for task_result in task_results]
|
||||
|
||||
if any(not task_result.is_success for task_result in task_results):
|
||||
message = f"Order processing failed. Reason: {next(task_result for task_result in task_results if not task_result.is_success).message}"
|
||||
return OrderStatus(is_success=False, message=message).model_dump()
|
||||
|
||||
# Two activities are called in sequence (chaining pattern) where the update_inventory
|
||||
# activity is dependent on the result of the process_payment activity:
|
||||
payment_result = yield ctx.call_activity(process_payment, input=order.model_dump())
|
||||
payment_result = PaymentResult.model_validate(payment_result)
|
||||
if not ctx.is_replaying:
|
||||
print(f'order_workflow: Payment result: {payment_result}.', flush=True)
|
||||
if payment_result.is_success:
|
||||
yield ctx.call_activity(update_inventory, input=order.order_item.model_dump())
|
||||
|
||||
# The register_shipment activity is using pub/sub messaging to communicate with the shipping_app.
|
||||
yield ctx.call_activity(register_shipment, input=order.model_dump())
|
||||
|
||||
# The shipping_app will also use pub/sub messaging back to the workflow_app and raise an event.
|
||||
# The workflow will wait for the event to be received or until the timeout occurs.
|
||||
shipment_registered_task = ctx.wait_for_external_event(name=SHIPMENT_REGISTERED_EVENT)
|
||||
timeout_task = ctx.create_timer(fire_at=timedelta(seconds=300))
|
||||
winner = yield wf.when_any([shipment_registered_task, timeout_task])
|
||||
if winner == timeout_task:
|
||||
# Timeout occurred, the shipment-registered-event was not received.
|
||||
message = f"Shipment registration status for {order.id} timed out."
|
||||
return OrderStatus(is_success=False, message=message).model_dump()
|
||||
|
||||
shipment_registration_status = ShipmentRegistrationStatus.model_validate(shipment_registered_task.get_result())
|
||||
if not shipment_registration_status.is_success:
|
||||
# This is the compensation step in case the shipment registration event was not successful.
|
||||
yield ctx.call_activity(reimburse_customer, input=order.model_dump())
|
||||
message = f"Shipment registration status for {order.id} failed. Customer is reimbursed."
|
||||
return OrderStatus(is_success = False, message = message).model_dump()
|
||||
return OrderStatus(is_success=True, message=f"Order {order.id} processed successfully.").model_dump()
|
||||
|
||||
@wf_runtime.activity(name='check_inventory')
|
||||
def check_inventory(ctx: wf.WorkflowActivityContext, order_item: OrderItem) -> ActivityResult:
|
||||
order_item = OrderItem.model_validate(order_item);
|
||||
print(f'check_inventory: Received input: {order_item}.', flush=True)
|
||||
is_available = im.check_inventory(order_item)
|
||||
return ActivityResult(is_success=is_available).model_dump()
|
||||
|
||||
@wf_runtime.activity(name='check_shipping_destination')
|
||||
def check_shipping_destination(ctx: wf.WorkflowActivityContext, customer_info: CustomerInfo) -> ActivityResult:
|
||||
customer_info = CustomerInfo.model_validate(customer_info);
|
||||
print(f'check_shipping_destination: Received input: {customer_info}.', flush=True)
|
||||
with DaprClient() as dapr_client:
|
||||
response = dapr_client.invoke_method(
|
||||
app_id="shipping",
|
||||
method_name="checkDestination",
|
||||
http_verb="POST",
|
||||
data=customer_info.model_dump_json())
|
||||
if not response.status_code == 200:
|
||||
print(f'Failed to register shipment. Reason: {response.text}.', flush=True)
|
||||
raise Exception(f"Failed to register shipment. Reason: {response.text}.")
|
||||
result = ShippingDestinationResult.model_validate(response.json())
|
||||
return ActivityResult(is_success=result.is_success).model_dump()
|
||||
|
||||
@wf_runtime.activity(name='process_payment')
|
||||
def process_payment(ctx: wf.WorkflowActivityContext, order: Order) -> PaymentResult:
|
||||
order = Order.model_validate(order);
|
||||
print(f'process_payment: Received input: {order}.', flush=True)
|
||||
return PaymentResult(is_success=True).model_dump()
|
||||
|
||||
@wf_runtime.activity(name='update_inventory')
|
||||
def update_inventory(ctx: wf.WorkflowActivityContext, order_item: OrderItem) -> ActivityResult:
|
||||
order_item = OrderItem.model_validate(order_item);
|
||||
print(f'update_inventory: Received input: {order_item}.', flush=True)
|
||||
update_inventory_result = im.update_inventory(order_item)
|
||||
return update_inventory_result.model_dump()
|
||||
|
||||
@wf_runtime.activity(name='reimburse_customer')
|
||||
def reimburse_customer(ctx: wf.WorkflowActivityContext, order: Order) -> ReimburseCustomerResult:
|
||||
order = Order.model_validate(order);
|
||||
print(f'reimburse_customer: Received input: {order}.', flush=True)
|
||||
return ReimburseCustomerResult(is_success=True).model_dump()
|
||||
|
||||
@wf_runtime.activity(name='register_shipment')
|
||||
def register_shipment(ctx: wf.WorkflowActivityContext, order: Order) -> RegisterShipmentResult:
|
||||
order = Order.model_validate(order);
|
||||
print(f'register_shipment: Received input: {order}.', flush=True)
|
||||
with DaprClient() as dapr_client:
|
||||
response = dapr_client.publish_event(
|
||||
pubsub_name=DAPR_PUBSUB_COMPONENT,
|
||||
topic_name=DAPR_PUBSUB_REGISTRATION_TOPIC,
|
||||
data=order.model_dump_json(),
|
||||
data_content_type='application/json'
|
||||
)
|
||||
return RegisterShipmentResult(is_success=True).model_dump()
|
|
@ -0,0 +1,6 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
fastapi-cloudevents>=2.0.2
|
||||
pydantic>=2.11.4
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,126 @@
|
|||
# External Events
|
||||
|
||||
This tutorial demonstrates how to author a workflow where the workflow will wait until it receives an external event. This pattern is often applied in workflows that require an approval step. For more information about the external system interaction pattern see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-patterns/#external-system-interaction).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `external_events_workflow.py` file in the `tutorials/workflow/python/external-system-interaction/external_events` folder. This file contains the definition for the workflow. It is an order workflow that requests an external approval if the order has a total price greater than 250 dollars.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
IF1{Is TotalPrice
|
||||
> 250?}
|
||||
IF2{Is Order Approved
|
||||
or TotalPrice < 250?}
|
||||
WAIT[Wait for
|
||||
approval event]
|
||||
EX{Event
|
||||
received?}
|
||||
PO[Process Order]
|
||||
SN[Send Notification]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> IF1
|
||||
IF1 -->|Yes| WAIT
|
||||
IF1 -->|No| IF2
|
||||
EX -->|Yes| IF2
|
||||
EX -->|No| SN
|
||||
WAIT --> EX
|
||||
IF2 -->|Yes| PO
|
||||
PO --> SN
|
||||
IF2 -->|No| SN
|
||||
SN --> EW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/external-system-interaction/external_events` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate one level back to the `external-system-interaction` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "externalevents"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`externalevents.http`](./externalevents.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST \
|
||||
--url http://localhost:5258/start \
|
||||
--header 'content-type: application/json' \
|
||||
--data '{"id": "b7dd836b-e913-4446-9912-d400befebec5","description": "Rubber ducks","quantity": 100,"total_price": 500}'
|
||||
```
|
||||
|
||||
The input for the workflow is an `Order` object:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "b7dd836b-e913-4446-9912-d400befebec5",
|
||||
"description": "Rubber ducks",
|
||||
"quantity": 100,
|
||||
"total_price": 500
|
||||
}
|
||||
```
|
||||
|
||||
5. Use the GET request in the [`externalevents.http`](./externalevents.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3558/v1.0/workflows/dapr/b7dd836b-e913-4446-9912-d400befebec5
|
||||
```
|
||||
|
||||
The workflow should still be running since it is waiting for an external event.
|
||||
|
||||
The app logs should look similar to the following:
|
||||
|
||||
```text
|
||||
== APP - externalevents == Received order: Order { id = b7dd836b-e913-4446-9912-d400befebec5, description = Rubber ducks, quantity = 100, total_price = 500.0 }.
|
||||
```
|
||||
|
||||
6. Use the POST request in the [`externalevents.http`](./externalevents.http) file to send an `approval-event` to the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST \
|
||||
--url http://localhost:3558/v1.0/workflows/dapr/b7dd836b-e913-4446-9912-d400befebec5/raiseEvent/approval-event \
|
||||
--header 'content-type: application/json' \
|
||||
--data '{"order_id": "b7dd836b-e913-4446-9912-d400befebec5","is_approved": true}'
|
||||
```
|
||||
|
||||
The payload for the event is an `ApprovalStatus` object:
|
||||
|
||||
```json
|
||||
{
|
||||
"order_id": "b7dd836b-e913-4446-9912-d400befebec5",
|
||||
"is_approved": true
|
||||
}
|
||||
```
|
||||
|
||||
*The workflow will only wait for the external approval event for 2 minutes before timing out. In this case you will need to start a new order workflow instance.*
|
||||
|
||||
7. Again use the GET request in the [`externalevents.http`](./externalevents.http) file to get the status of the workflow. The workflow should now be completed.
|
||||
|
||||
The expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
"\"Order b7dd836b-e913-4446-9912-d400befebec5 has been approved.\""
|
||||
```
|
||||
|
||||
8. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: externalevents
|
||||
appDirPath: external_events
|
||||
appPort: 5258
|
||||
daprHTTPPort: 3558
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,32 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from external_events_workflow import wf_runtime, external_events_workflow, Order
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(order: Order):
|
||||
print(f"Received order: {order}")
|
||||
|
||||
"""
|
||||
The DaprWorkflowClient is the API to manage workflows.
|
||||
Here it is used to schedule a new workflow instance.
|
||||
"""
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=external_events_workflow,
|
||||
input=order,
|
||||
instance_id=order.id
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5258)
|
|
@ -0,0 +1,61 @@
|
|||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@dataclass
|
||||
class ApprovalStatus:
|
||||
order_id: str
|
||||
is_approved: bool
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dict):
|
||||
return ApprovalStatus(**dict)
|
||||
|
||||
@dataclass
|
||||
class Order:
|
||||
id: str
|
||||
description: str
|
||||
quantity: int
|
||||
total_price: float
|
||||
|
||||
@wf_runtime.workflow(name='external_events_workflow')
|
||||
def external_events_workflow(ctx: wf.DaprWorkflowContext, order: Order):
|
||||
approval_status = ApprovalStatus(order.id, True)
|
||||
|
||||
if order.total_price > 250:
|
||||
yield ctx.call_activity(request_approval, input=order)
|
||||
approval_status_task = ctx.wait_for_external_event(name='approval-event')
|
||||
timeout_task = ctx.create_timer(fire_at=timedelta(minutes=2))
|
||||
winner = yield wf.when_any([approval_status_task, timeout_task])
|
||||
|
||||
if winner == timeout_task:
|
||||
notification_message = f"Approval request for order {order.id} timed out."
|
||||
yield ctx.call_activity(send_notification, input=order)
|
||||
return notification_message
|
||||
|
||||
approval_status = ApprovalStatus.from_dict(approval_status_task.get_result())
|
||||
|
||||
if approval_status.is_approved:
|
||||
yield ctx.call_activity(process_order, input=order)
|
||||
|
||||
notification_message = f"Order {order.id} has been approved." if approval_status.is_approved else f"Order {order.id} has been rejected."
|
||||
yield ctx.call_activity(send_notification, input=notification_message)
|
||||
|
||||
return notification_message
|
||||
|
||||
@wf_runtime.activity(name='request_approval')
|
||||
def request_approval(ctx: wf.WorkflowActivityContext, order: Order) -> None:
|
||||
print(f'request_approval: Request approval for order: {order.id}.', flush=True)
|
||||
# Imagine an approval request being sent to another system
|
||||
|
||||
@wf_runtime.activity(name='send_notification')
|
||||
def send_notification(ctx: wf.WorkflowActivityContext, message: str) -> None:
|
||||
print(f'send_notification: {message}.', flush=True)
|
||||
# Imagine a notification being sent to the user
|
||||
|
||||
@wf_runtime.activity(name='process_order')
|
||||
def process_order(ctx: wf.WorkflowActivityContext, order: Order) -> None:
|
||||
print(f'process_order: Processed order: {order.id}.', flush=True)
|
||||
# Imagine the order being processed by another system
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,30 @@
|
|||
@apphost=http://localhost:5258
|
||||
|
||||
### Start the external_events_workflow
|
||||
# @name startWorkflowRequest
|
||||
@orderId={{$guid}}
|
||||
POST {{ apphost }}/start
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"id": "{{orderId}}",
|
||||
"description": "Rubber ducks",
|
||||
"quantity": 100,
|
||||
"total_price": 500
|
||||
}
|
||||
|
||||
|
||||
### Get the workflow status
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3558
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
||||
|
||||
### Send an approval event
|
||||
@eventName=approval-event
|
||||
POST {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}/raiseEvent/{{ eventName }}
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"order_id": "{{instanceId}}",
|
||||
"is_approved": true
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,102 @@
|
|||
# Fan-out/Fan-in
|
||||
|
||||
This tutorial demonstrates how to author a workflow where multiple independent tasks can be scheduled and executed simultaneously. The workflow can either wait until all tasks are completed to proceed, or continue when the fastest task is completed. For more information about the fan-out/fan-in pattern see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-patterns/#fan-outfan-in).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `fanoutfanin_workflow.py` file in the `tutorials/workflow/python/fan-out-fan-in/fan_out_fan_in` folder. This file contains the definition for the workflow.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
subgraph for each word in the input
|
||||
GWL[get_word_length]
|
||||
end
|
||||
SHORT[Select the
|
||||
shortest word]
|
||||
ALL[Wait until all tasks
|
||||
are completed]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> GWL
|
||||
GWL --> ALL
|
||||
ALL --> SHORT
|
||||
SHORT --> EW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/fan-out-fan-in/fan_out_fan_in` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate back one level to the `fan-out-fan-in` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "fanoutfanin"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`fanoutfanin.http`](./fanoutfanin.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST \
|
||||
--url http://localhost:5256/start \
|
||||
--header 'content-type: application/json' \
|
||||
--data '["which","word","is","the","shortest"]'
|
||||
```
|
||||
|
||||
The input for the workflow is an array of strings:
|
||||
|
||||
```json
|
||||
[
|
||||
"which",
|
||||
"word",
|
||||
"is",
|
||||
"the",
|
||||
"shortest"
|
||||
]
|
||||
```
|
||||
|
||||
The expected app logs are as follows:
|
||||
|
||||
```text
|
||||
== APP - fanoutfanin == get_word_length: Received input: word.
|
||||
== APP - fanoutfanin == get_word_length: Received input: is.
|
||||
== APP - fanoutfanin == get_word_length: Received input: the.
|
||||
== APP - fanoutfanin == get_word_length: Received input: shortest.
|
||||
== APP - fanoutfanin == get_word_length: Received input: which.
|
||||
```
|
||||
|
||||
> Note that the order of the logs may vary.
|
||||
|
||||
5. Use the GET request in the [`fanoutfanin.http`](./fanoutfanin.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3556/v1.0/workflows/dapr/<INSTANCEID>
|
||||
```
|
||||
|
||||
Where `<INSTANCEID>` is the workflow instance ID you received in the `instance_id` property in the previous step.
|
||||
|
||||
The expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
"\"is\""
|
||||
```
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: fanoutfanin
|
||||
appDirPath: fan_out_fan_in
|
||||
appPort: 5256
|
||||
daprHTTPPort: 3556
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,27 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from fanoutfanin_workflow import wf_runtime, fanoutfanin_workflow
|
||||
from typing import List
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(words: List[str]):
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=fanoutfanin_workflow,
|
||||
input=words
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5256)
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@dataclass
|
||||
class WordLength:
|
||||
word: str
|
||||
length: int
|
||||
|
||||
@wf_runtime.workflow(name='fanoutfanin_workflow')
|
||||
def fanoutfanin_workflow(ctx: wf.DaprWorkflowContext, words: List[str]):
|
||||
tasks = [
|
||||
ctx.call_activity(get_word_length, input=word) for word in words
|
||||
]
|
||||
all_word_lengths = yield wf.when_all(tasks)
|
||||
shortest_word = min(all_word_lengths, key=lambda x: x.length)
|
||||
return shortest_word.word
|
||||
|
||||
@wf_runtime.activity(name='get_word_length')
|
||||
def get_word_length(ctx: wf.WorkflowActivityContext, word: str) -> WordLength:
|
||||
print(f'get_word_length: Received input: {word}.', flush=True)
|
||||
return WordLength(word=word, length=len(word))
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,19 @@
|
|||
@apphost=http://localhost:5256
|
||||
|
||||
### Start the fanoutfanin_workflow
|
||||
# @name startWorkflowRequest
|
||||
POST {{ apphost }}/start
|
||||
Content-Type: application/json
|
||||
|
||||
[
|
||||
"which",
|
||||
"word",
|
||||
"is",
|
||||
"the",
|
||||
"shortest"
|
||||
]
|
||||
|
||||
### Get the workflow status
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3556
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,80 @@
|
|||
# Workflow Basics
|
||||
|
||||
This tutorial covers the fundamentals of authoring Dapr Workflows. For more information about the fundamentals of Dapr Workflows, see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-features-concepts/).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `basic_workflow.py` file in the `tutorials/workflow/python/fundamentals/basic` folder. This file contains the definition for the workflow.
|
||||
|
||||
The workflow consists of two activities: `activity1` and `activity2`, which are called in sequence, where the result of `activity1` is used as an input for `activity2`. You can find the activity definitions below the workflow definition.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
A1[activity1]
|
||||
A2[activity2]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> A1
|
||||
A1 --> A2
|
||||
A2 --> EW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/fundamentals/basics` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate one level back to the `fundamentals` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "basic"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`fundamentals.http`](./fundamentals.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST http://localhost:5254/start/One
|
||||
```
|
||||
|
||||
Note the `instance_id` property in the response. This property contains the workflow instance ID. You can use this ID to get the status of the workflow instance you just started.
|
||||
|
||||
The input for the workflow is a string with the value `One`. The expected app logs are as follows:
|
||||
|
||||
```text
|
||||
== APP - basic == activity1: Received input: One.
|
||||
== APP - basic == activity2: Received input: One Two.
|
||||
```
|
||||
|
||||
5. Use the GET request in the [`fundamentals.http`](./fundamentals.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3554/v1.0/workflows/dapr/<INSTANCEID>
|
||||
```
|
||||
|
||||
Where `<INSTANCEID>` is the workflow instance ID you received in the `instance_id` property in the previous step.
|
||||
|
||||
The expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
"\"One Two Three\""
|
||||
```
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,29 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from basic_workflow import wf_runtime, basic_workflow
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start/{input}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(input: str):
|
||||
"""
|
||||
The DaprWorkflowClient is the API to manage workflows.
|
||||
Here it is used to schedule a new workflow instance.
|
||||
"""
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=basic_workflow,
|
||||
input=input
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5254)
|
|
@ -0,0 +1,39 @@
|
|||
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
"""
|
||||
Workflows orchestrate activities and other (child) workflows, and include business logic (if/else or switch statements).
|
||||
Workflow code must be be deterministic. Any non-deterministic behavior should be written inside activities.
|
||||
|
||||
Workflow definitions use the `workflow` decorator to define a workflow.
|
||||
The first argument (`ctx`) is the `DaprWorkflowContext`, this contains properties
|
||||
about the workflow instance, and methods to call activities.
|
||||
The second argument (`wf_input`) is the input to the workflow. It can be a simple or complex type.
|
||||
"""
|
||||
@wf_runtime.workflow(name='basic_workflow')
|
||||
def basic_workflow(ctx: wf.DaprWorkflowContext, wf_input: str):
|
||||
result1 = yield ctx.call_activity(activity1, input=wf_input)
|
||||
result2 = yield ctx.call_activity(activity2, input=result1)
|
||||
return result2
|
||||
|
||||
"""
|
||||
Activity code typically performs a one specific task, like calling an API to store or retrieve data.
|
||||
You can use other Dapr APIs inside an activity.
|
||||
|
||||
Activity definitions use the `activity` decorator to define an activity.
|
||||
The first argument (`ctx`) is the `WorkflowActivityContext` and provides
|
||||
the name of the activity and the workflow instance.
|
||||
The second argument (`act_input`) is the input parameter for the activity.
|
||||
There can only be one input parameter. Use a class if multiple input values are required.
|
||||
"""
|
||||
@wf_runtime.activity(name='activity1')
|
||||
def activity1(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity1: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} Two"
|
||||
|
||||
@wf_runtime.activity(name='activity2')
|
||||
def activity2(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity2: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} Three"
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: basic
|
||||
appDirPath: basic
|
||||
appPort: 5254
|
||||
daprHTTPPort: 3554
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,12 @@
|
|||
@apphost=http://localhost:5254
|
||||
|
||||
### Start the basic workflow
|
||||
# @name startWorkflowRequest
|
||||
@input=One
|
||||
POST {{ apphost }}/start/{{ input }}
|
||||
|
||||
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3554
|
||||
### Get the workflow status
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,89 @@
|
|||
# Monitor Pattern
|
||||
|
||||
This tutorial demonstrates how to run a workflow in a loop. This can be used for recurring tasks that need to be executed on a certain frequency (e.g. a clean-up job that runs every hour). For more information on the monitor pattern see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-patterns/#monitor).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `monitor_workflow.py` file in the `tutorials/workflow/python/monitor-pattern/monitor` folder. This file contains the definition for the workflow that calls the `check_status` activity to checks to see if a fictional resource is ready. The `check_status` activity uses a random number generator to simulate the status of the resource. If the status is not ready, the workflow will wait for one second and is continued as a new instance.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
CHECK[check_status]
|
||||
IF{Is Ready}
|
||||
TIMER[Wait for a period of time]
|
||||
NEW[Continue as a new instance]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> CHECK
|
||||
CHECK --> IF
|
||||
IF -->|Yes| EW
|
||||
IF -->|No| TIMER
|
||||
TIMER --> NEW
|
||||
NEW --> SW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/monitor-pattern/monitor` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate one level up to the `monitor-pattern` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "monitor"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`monitor.http`](./monitor.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST http://localhost:5257/start/0
|
||||
```
|
||||
|
||||
The input for the workflow is an integer with the value `0`.
|
||||
|
||||
The expected app logs are as follows:
|
||||
|
||||
```text
|
||||
== APP - monitor == check_status: Received input: 0.
|
||||
== APP - monitor == check_status: Received input: 1.
|
||||
== APP - monitor == check_status: Received input: 2.
|
||||
== APP - monitor == check_status: Received input: 3.
|
||||
```
|
||||
|
||||
*Note that the number of app log statements can vary due to the randomization in the `check_status` activity.*
|
||||
|
||||
5. Use the GET request in the [`monitor.http`](./monitor.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3557/v1.0/workflows/dapr/<INSTANCEID>
|
||||
```
|
||||
|
||||
Where `<INSTANCEID>` is the workflow instance ID you received in the `Location` header in the previous step.
|
||||
|
||||
The expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
"\"Status is healthy after checking 3 times.\""
|
||||
```
|
||||
|
||||
*The actual number of checks can vary since some randomization is used in the `check_status` activity.*
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: monitor
|
||||
appDirPath: monitor
|
||||
appPort: 5257
|
||||
daprHTTPPort: 3557
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,12 @@
|
|||
@apphost=http://localhost:5257
|
||||
|
||||
### Start the monitor workflow
|
||||
# @name startWorkflowRequest
|
||||
@counter=0
|
||||
POST {{ apphost }}/start/{{counter}}
|
||||
|
||||
|
||||
### Get the workflow status
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3557
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,25 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from monitor_workflow import wf_runtime, monitor_workflow
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start/{counter}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(counter: int):
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=monitor_workflow,
|
||||
input=counter
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5257)
|
|
@ -0,0 +1,26 @@
|
|||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import random
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@dataclass
|
||||
class Status:
|
||||
is_ready: bool
|
||||
|
||||
@wf_runtime.workflow(name='monitor_workflow')
|
||||
def monitor_workflow(ctx: wf.DaprWorkflowContext, counter: int):
|
||||
status = yield ctx.call_activity(check_status, input=counter)
|
||||
|
||||
if not status.is_ready:
|
||||
yield ctx.create_timer(fire_at=timedelta(seconds=2))
|
||||
yield ctx.continue_as_new(counter + 1)
|
||||
|
||||
return f"Status is healthy after {counter} times."
|
||||
|
||||
@wf_runtime.activity(name='check_status')
|
||||
def check_status(ctx: wf.WorkflowActivityContext, act_input: int) -> Status:
|
||||
print(f'check_status: Received input: {act_input}.', flush=True)
|
||||
is_ready = True if random.randint(0, act_input) > 1 else False
|
||||
return Status(is_ready = is_ready)
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,94 @@
|
|||
# Resiliency & Compensation
|
||||
|
||||
This tutorial demonstrates how to improve resiliency when activities are executed and how to include compensation actions when activities return an error. For more on workflow resiliency read the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-features-concepts/#retry-policies).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `resiliency_and_compensation_workflow.py` file in the `tutorials/workflow/python/resiliency-and-compensation/resiliency_and_compensation` folder. This file contains the definition for the workflow. This workflow implements an activity retry policy on all the associated activities and compensating logic if an activity throws an exception.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
A1[minus_one]
|
||||
A2[division]
|
||||
EX{Division
|
||||
Exception?}
|
||||
A3[plus_one]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> A1
|
||||
A1 --> A2
|
||||
A2 --> EX
|
||||
EX -- yes --> A3
|
||||
EX -- no --> EW
|
||||
A3 --> EW
|
||||
```
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/resiliency-and-compensation/resiliency_and_compensation` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate one folder back to `resiliency-and-compensation` and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "resiliency"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`resiliency-compensation.http`](./resiliency-compensation.http) file to start the workflow with a workflow input value of `1`, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST \
|
||||
--url http://localhost:5264/start/1
|
||||
```
|
||||
|
||||
When the workflow input is `1`, the `minus_one` activity will subtract `1` resulting in a `0`. This value is passed to the `division` activity, which will throw an error because the divisor is `0`. The `division` activity will be retried three times but all will fail the same way as the divisor has not changed. Finally the compensation action `plus_one` will be executed, increasing the value back to `1` before returning the result.
|
||||
|
||||
The app logs should output the following:
|
||||
|
||||
```txt
|
||||
== APP - resiliency == minus_one: Received input: 1.
|
||||
== APP - resiliency == division: Received divisor: 0.
|
||||
== APP - resiliency == division: Received divisor: 0.
|
||||
== APP - resiliency == division: Received divisor: 0.
|
||||
== APP - resiliency == plus_one: Received input: 0.
|
||||
```
|
||||
|
||||
5. Use the GET request in the [`resiliency-compensation.http`](./resiliency-compensation.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3564/v1.0/workflows/dapr/<INSTANCEID>
|
||||
```
|
||||
|
||||
Where `<INSTANCEID>` is the workflow instance ID you received in the `instance_id` property in the previous step.
|
||||
|
||||
Since `1` is used as the input, the expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
"1"
|
||||
```
|
||||
|
||||
The expected serialized custom status field of the workflow output is:
|
||||
|
||||
```txt
|
||||
"\"Compensated minus_one activity with plus_one activity.\""
|
||||
```
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: resiliency
|
||||
appDirPath: resiliency_and_compensation
|
||||
appPort: 5264
|
||||
daprHTTPPort: 3564
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,12 @@
|
|||
@apphost=http://localhost:5264
|
||||
|
||||
### Start the resiliency_and_compensation workflow
|
||||
# @name startWorkflowRequest
|
||||
@input=1
|
||||
POST {{ apphost }}/start/{{input}}
|
||||
|
||||
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3564
|
||||
### Get the workflow status
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,25 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from resiliency_and_compensation_workflow import wf_runtime, resiliency_and_compensation_workflow
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start/{wf_input}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(wf_input: int):
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=resiliency_and_compensation_workflow,
|
||||
input=wf_input
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5264)
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,52 @@
|
|||
from datetime import timedelta
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='resiliency_and_compensation_workflow')
|
||||
def resiliency_and_compensation_workflow(ctx: wf.DaprWorkflowContext, wf_input: int):
|
||||
default_retry_policy = wf.RetryPolicy(max_number_of_attempts=3, first_retry_interval=timedelta(seconds=2))
|
||||
|
||||
result1 = yield ctx.call_activity(minus_one, input=wf_input, retry_policy=default_retry_policy)
|
||||
wf_result = None
|
||||
|
||||
try:
|
||||
wf_result = yield ctx.call_activity(division, input=result1, retry_policy=default_retry_policy)
|
||||
except Exception as e:
|
||||
"""
|
||||
Something went wrong in the division activity which is not recoverable.
|
||||
Perform a compensation action for the minus_one activity to revert any
|
||||
changes made in that activity.
|
||||
"""
|
||||
if type(e).__name__ == 'TaskFailedError':
|
||||
wf_result = yield ctx.call_activity(plus_one, input=result1)
|
||||
|
||||
"""
|
||||
A custom status message can be set on the workflow instance.
|
||||
This can be used to clarify anything that happened during the workflow execution.
|
||||
"""
|
||||
ctx.set_custom_status("Compensated minus_one activity with plus_one activity.")
|
||||
|
||||
return wf_result
|
||||
|
||||
@wf_runtime.activity(name='minus_one')
|
||||
def minus_one(ctx: wf.WorkflowActivityContext, act_input: int) -> int:
|
||||
print(f'minus_one: Received input: {act_input}.', flush=True)
|
||||
result = act_input - 1
|
||||
return result
|
||||
|
||||
@wf_runtime.activity(name='division')
|
||||
def division(ctx: wf.WorkflowActivityContext, act_input: int) -> int:
|
||||
"""
|
||||
In case the divisor is 0 an exception is thrown.
|
||||
This exception is handled in the workflow.
|
||||
"""
|
||||
print(f'division: Received input: {act_input}.', flush=True)
|
||||
result = int(100/act_input)
|
||||
return result
|
||||
|
||||
@wf_runtime.activity(name='plus_one')
|
||||
def plus_one(ctx: wf.WorkflowActivityContext, act_input: int) -> int:
|
||||
print(f'plus_one: Received input: {act_input}.', flush=True)
|
||||
result = act_input + 1
|
||||
return result
|
|
@ -0,0 +1,80 @@
|
|||
# Task Chaining Pattern
|
||||
|
||||
This tutorial demonstrates how to chain multiple tasks together as a sequence in a workflow, where the output of one task is used as the input for the next one. For more information about the task chaining pattern see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-patterns/#task-chaining).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `chaining_workflow.py` file in the `tutorials/workflow/python/task-chaining/task_chaining` folder. This file contains the definition for the workflow.
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
SW((Start
|
||||
Workflow))
|
||||
A1[activity1]
|
||||
A2[activity2]
|
||||
A3[activity3]
|
||||
EW((End
|
||||
Workflow))
|
||||
SW --> A1
|
||||
A1 --> A2
|
||||
A2 --> A3
|
||||
A3 --> EW
|
||||
```
|
||||
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/task-chaining/task_chaining` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate back one level to the `task-chaining` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "chaining"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the POST request in the [`chaining.http`](./chaining.http) file to start the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl -i --request POST http://localhost:5255/start
|
||||
```
|
||||
|
||||
The input for the workflow is a string with the value `This`. The expected app logs are as follows:
|
||||
|
||||
```text
|
||||
== APP - chaining == activity1: Received input: This.
|
||||
== APP - chaining == activity2: Received input: This is.
|
||||
== APP - chaining == activity3: Received input: This is task.
|
||||
```
|
||||
|
||||
5. Use the GET request in the [`chaining.http`](./chaining.http) file to get the status of the workflow, or use this cURL command:
|
||||
|
||||
```bash
|
||||
curl --request GET --url http://localhost:3555/v1.0/workflows/dapr/<INSTANCEID>
|
||||
```
|
||||
|
||||
Where `<INSTANCEID>` is the workflow instance ID you received in the `instance_id` property in the previous step.
|
||||
|
||||
The expected serialized output of the workflow is:
|
||||
|
||||
```txt
|
||||
"\"This is task chaining\""
|
||||
```
|
||||
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,11 @@
|
|||
@apphost=http://localhost:5255
|
||||
|
||||
### Start the task_chaining workflow
|
||||
# @name startWorkflowRequest
|
||||
POST {{ apphost }}/start
|
||||
|
||||
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3555
|
||||
### Get the workflow status
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: chaining
|
||||
appDirPath: task_chaining
|
||||
appPort: 5255
|
||||
daprHTTPPort: 3555
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,25 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from chaining_workflow import wf_runtime, chaining_workflow
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
@app.post("/start", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow():
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=chaining_workflow,
|
||||
input="This"
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5255)
|
|
@ -0,0 +1,25 @@
|
|||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='chaining_workflow')
|
||||
def chaining_workflow(ctx: wf.DaprWorkflowContext, wf_input: str):
|
||||
result1 = yield ctx.call_activity(activity1, input=wf_input)
|
||||
result2 = yield ctx.call_activity(activity2, input=result1)
|
||||
wf_result = yield ctx.call_activity(activity3, input=result2)
|
||||
return wf_result
|
||||
|
||||
@wf_runtime.activity(name='activity1')
|
||||
def activity1(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity1: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} is"
|
||||
|
||||
@wf_runtime.activity(name='activity2')
|
||||
def activity2(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity2: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} task"
|
||||
|
||||
@wf_runtime.activity(name='activity3')
|
||||
def activity3(ctx: wf.WorkflowActivityContext, act_input: str) -> str:
|
||||
print(f'activity3: Received input: {act_input}.', flush=True)
|
||||
return f"{act_input} chaining"
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,51 @@
|
|||
# Workflow Management
|
||||
|
||||
This tutorial demonstrates the various APIs to manage a workflow instance, these methods include:
|
||||
|
||||
- Scheduling a workflow instance
|
||||
- Getting a workflow instance state
|
||||
- Suspending a workflow instance
|
||||
- Resuming a workflow instance
|
||||
- Terminating a workflow instance
|
||||
- Purging a workflow instance
|
||||
|
||||
For more information on workflow management, see the [Dapr docs](https://docs.dapr.io/developing-applications/building-blocks/workflow/howto-manage-workflow/).
|
||||
|
||||
## Inspect the code
|
||||
|
||||
Open the `app.py` file in the `tutorials/workflow/python/workflow-management/workflow_-_management/workflow_management` folder. This file contains the endpoint definitions that use the workflow management API. The workflow that is being managed is named `never_ending_workflow` and is a counter that will keep running once it's started.
|
||||
|
||||
## Run the tutorial
|
||||
|
||||
1. Use a terminal to navigate to the `tutorials/workflow/python/workflow-management/workflow-management` folder.
|
||||
2. Install the dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Navigate one level back to the `workflow-management` folder and use the Dapr CLI to run the Dapr Multi-App run file
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi app run template
|
||||
expected_stdout_lines:
|
||||
- 'Started Dapr with app id "neverendingworkflow"'
|
||||
expected_stderr_lines:
|
||||
working_dir: .
|
||||
output_match_mode: substring
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Use the first POST request in the [`workflowmanagement.http`](./workflowmanagement.http) file to start the workflow.
|
||||
5. Use other requests in the [`workflowmanagement.http`](./workflowmanagement.http) file to perform other actions on the workflow, such as:
|
||||
- Getting the workflow instance state.
|
||||
- Suspending & resuming the workflow instance.
|
||||
- Terminating the workflow instance.
|
||||
- Purging the workflow instance.
|
||||
6. Stop the Dapr Multi-App run process by pressing `Ctrl+C`.
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ../../resources
|
||||
apps:
|
||||
- appID: neverendingworkflow
|
||||
appDirPath: workflow_management
|
||||
appPort: 5262
|
||||
daprHTTPPort: 3562
|
||||
command: ["python3", "app.py"]
|
||||
appLogDestination: console
|
||||
daprdLogDestination: console
|
|
@ -0,0 +1,2 @@
|
|||
include ../../../../docker.mk
|
||||
include ../../../../validate.mk
|
|
@ -0,0 +1,46 @@
|
|||
from fastapi import FastAPI, status
|
||||
from contextlib import asynccontextmanager
|
||||
from never_ending_workflow import wf_runtime, never_ending_workflow
|
||||
import dapr.ext.workflow as wf
|
||||
import uvicorn
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
wf_runtime.start()
|
||||
yield
|
||||
wf_runtime.shutdown()
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
wf_client = wf.DaprWorkflowClient()
|
||||
|
||||
@app.post("/start/{counter}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def start_workflow(counter: int):
|
||||
instance_id = wf_client.schedule_new_workflow(
|
||||
workflow=never_ending_workflow,
|
||||
input=counter
|
||||
)
|
||||
return {"instance_id": instance_id}
|
||||
|
||||
@app.get("/status/{instance_id}")
|
||||
async def get_status(instance_id: str):
|
||||
wf_status = wf_client.get_workflow_state(instance_id)
|
||||
return wf_status
|
||||
|
||||
@app.post("/suspend/{instance_id}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def suspend_workflow(instance_id: str):
|
||||
wf_client.pause_workflow(instance_id);
|
||||
|
||||
@app.post("/resume/{instance_id}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def resume_workflow(instance_id: str):
|
||||
wf_client.resume_workflow(instance_id);
|
||||
|
||||
@app.post("/terminate/{instance_id}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def terminate_workflow(instance_id: str):
|
||||
wf_client.terminate_workflow(instance_id);
|
||||
|
||||
@app.delete("/purge/{instance_id}", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def purge_workflow(instance_id: str):
|
||||
wf_client.purge_workflow(instance_id);
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=5262)
|
|
@ -0,0 +1,18 @@
|
|||
from datetime import timedelta
|
||||
import dapr.ext.workflow as wf
|
||||
|
||||
wf_runtime = wf.WorkflowRuntime()
|
||||
|
||||
@wf_runtime.workflow(name='never_ending_workflow')
|
||||
def never_ending_workflow(ctx: wf.DaprWorkflowContext, counter: int):
|
||||
yield ctx.call_activity(send_notification, input=counter)
|
||||
yield ctx.create_timer(fire_at=timedelta(seconds=2))
|
||||
counter += 1
|
||||
yield ctx.continue_as_new(counter)
|
||||
|
||||
return True
|
||||
|
||||
@wf_runtime.activity(name='send_notification')
|
||||
def send_notification(ctx: wf.WorkflowActivityContext, counter: int) -> None:
|
||||
print(f'send_notification: Received input: {counter}.', flush=True)
|
||||
# Imagine that a notification is sent to the user here.
|
|
@ -0,0 +1,4 @@
|
|||
dapr>=1.15.0
|
||||
dapr-ext-workflow>=1.15.0
|
||||
fastapi>=0.115.0
|
||||
uvicorn>=0.34.2
|
|
@ -0,0 +1,26 @@
|
|||
@apphost=http://localhost:5262
|
||||
|
||||
### Start the never_ending_workflow
|
||||
# @name startWorkflowRequest
|
||||
@counter=0
|
||||
POST {{ apphost }}/start/{{counter}}
|
||||
|
||||
### Get the workflow status via the Dapr API
|
||||
@instanceId={{startWorkflowRequest.response.body.instance_id}}
|
||||
@daprHost=http://localhost:3562
|
||||
GET {{ daprHost }}/v1.0/workflows/dapr/{{ instanceId }}
|
||||
|
||||
### Get the workflow status via the application
|
||||
GET {{ apphost }}/status/{{ instanceId }}
|
||||
|
||||
### Suspend the workflow
|
||||
POST {{ apphost }}/suspend/{{ instanceId }}
|
||||
|
||||
### Resume the workflow
|
||||
POST {{ apphost }}/resume/{{ instanceId }}
|
||||
|
||||
### Terminate the workflow
|
||||
POST {{ apphost }}/terminate/{{ instanceId }}
|
||||
|
||||
### Purge the workflow
|
||||
DELETE {{ apphost }}/purge/{{ instanceId }}
|
Loading…
Reference in New Issue