mirror of https://github.com/dapr/rust-sdk.git
Compare commits
81 Commits
Author | SHA1 | Date |
---|---|---|
|
407447816c | |
|
06ea44e8d3 | |
|
942263938a | |
|
9537a3b826 | |
|
35930ebc5f | |
|
acb829cdb5 | |
|
26f4ee6d32 | |
|
4fb85abcea | |
|
f1e469cf57 | |
|
1cdcc272ce | |
|
185dc72889 | |
|
1508ea24cb | |
|
99d099f15e | |
|
c0b7155093 | |
|
ab219049a4 | |
|
57347e70c3 | |
|
93322c0e86 | |
|
52f095f8e7 | |
|
a532ee1a50 | |
|
8bf6013eee | |
|
6973b7d9be | |
|
5241ce696c | |
|
2deed2baa8 | |
|
b9de85f885 | |
|
0c909c544b | |
|
f0498dcc3f | |
|
abecc5c70b | |
|
4da565cbae | |
|
1d88e2074f | |
|
1ac019159c | |
|
c4675a6a09 | |
|
85ed911b1d | |
|
95b009baaf | |
|
e15f65b435 | |
|
fcff470451 | |
|
ae9bcda712 | |
|
851abcae57 | |
|
b2be904225 | |
|
4cfa9a3db2 | |
|
e78cab477e | |
|
709c67e856 | |
|
b1cacef2d7 | |
|
597aa46f0e | |
|
0183be5baa | |
|
580b88ff8a | |
|
e04157d7d7 | |
|
b81f8f387b | |
|
df0bf8818c | |
|
04375f4fd2 | |
|
80a3a90f82 | |
|
fbb81bf518 | |
|
dbc7a80ef1 | |
|
12dd8af64f | |
|
0f29428a55 | |
|
981340a5d8 | |
|
5fd49f196f | |
|
7322ec4cc3 | |
|
8f00930ebc | |
|
f70a18f896 | |
|
78ad9fd685 | |
|
01d2214a0d | |
|
d0b6490d9c | |
|
4e2d316032 | |
|
39071997ac | |
|
97c98af9aa | |
|
1b095c7108 | |
|
81b3834fd7 | |
|
7a508beaea | |
|
0a0efaa9ab | |
|
6f566cb051 | |
|
8d4db4382c | |
|
ece8714f29 | |
|
60c0bbed71 | |
|
0eaf89867c | |
|
ccf2902ed5 | |
|
2c0aa5a779 | |
|
5e699df5a3 | |
|
6301ee43c5 | |
|
7aef43a7b4 | |
|
6951e58060 | |
|
bc728d5262 |
|
@ -1,6 +1,8 @@
|
|||
name: dapr-rust-sdk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '8 8 * * *'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
@ -14,75 +16,134 @@ on:
|
|||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
|
||||
PROTOC_VERSION: 3.x
|
||||
RUST_TOOLCHAIN: 1.79.0
|
||||
PROTOC_VERSION: 24.4
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
name: Check and Lint
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
toolchain: stable
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v1
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check compiled protos for a diff
|
||||
run: |
|
||||
make proto-gen check-diff-proto
|
||||
|
||||
- name: cargo fmt
|
||||
run: cargo fmt -- --check --color ${{ env.CARGO_TERM_COLOR }}
|
||||
|
||||
- name: cargo clippy
|
||||
run: cargo clippy -- -W warnings
|
||||
run: cargo clippy
|
||||
|
||||
|
||||
build:
|
||||
name: Build
|
||||
name: Test and Build on rust-${{ matrix.rust-version }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
rust-version:
|
||||
- "stable"
|
||||
- "nightly"
|
||||
- "1.78.0" # MSRV
|
||||
|
||||
steps:
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
toolchain: ${{ matrix.rust-version }}
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v1
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build
|
||||
run: cargo build
|
||||
- name: Build examples
|
||||
run: cargo build --examples
|
||||
run: cargo build --workspace --all-targets --all-features
|
||||
- name: Run Tests
|
||||
run: cargo test --all-targets
|
||||
run: cargo test --workspace --all-targets --all-features
|
||||
|
||||
test-docs:
|
||||
name: Check Docs - ${{ matrix.crate }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
crate:
|
||||
- dapr
|
||||
- dapr-macros
|
||||
env:
|
||||
RUSTDOCFLAGS: -Dwarnings
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
- uses: dtolnay/install@cargo-docs-rs
|
||||
- run: cargo docs-rs -p ${{ matrix.crate }}
|
||||
|
||||
publish-dry:
|
||||
name: Publish Test
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
crate:
|
||||
- dapr
|
||||
- dapr-macros
|
||||
|
||||
steps:
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: cargo publish - ${{ matrix.crate }}
|
||||
run: cargo publish --manifest-path ${{ matrix.crate }}/Cargo.toml --dry-run
|
||||
|
||||
|
||||
publish:
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, build]
|
||||
needs: [test-docs, lint, build, publish-dry]
|
||||
if: startswith(github.ref, 'refs/tags/v')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
crate:
|
||||
- dapr
|
||||
- dapr-macros
|
||||
|
||||
steps:
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_TOOLCHAIN }}
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
toolchain: stable
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v1
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v2
|
||||
- name: cargo publish macros
|
||||
run: cargo publish --manifest-path macros/Cargo.toml --token ${{ env.CARGO_TOKEN }}
|
||||
- name: cargo publish
|
||||
run: cargo publish --token ${{ env.CARGO_TOKEN }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: cargo publish - ${{ matrix.crate }}
|
||||
env:
|
||||
CARGO_TOKEN: ${{ secrets.CARGO_TOKEN }}
|
||||
run: cargo publish --manifest-path ${{ matrix.crate }}/Cargo.toml --token ${{ env.CARGO_TOKEN }}
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
[package]
|
||||
name = "dapr-bot"
|
||||
authors = ["hey@mike.ee"]
|
||||
authors = ["Mike Nguyen <hey@mike.ee>"]
|
||||
license = "Apache-2.0"
|
||||
license-file = "LICENSE"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.70.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
exitcode = "1.1.2"
|
||||
octocrab = "0.34.1"
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
octocrab = "0.42.1"
|
||||
serde_json = "1.0.114"
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
|
|
|
@ -32,15 +32,15 @@ jobs:
|
|||
FOSSA_API_KEY: b88e1f4287c3108c8751bf106fb46db6 # This is a push-only token that is safe to be exposed.
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Run FOSSA Scan"
|
||||
uses: fossas/fossa-action@main # Use a specific version if locking is preferred
|
||||
uses: fossas/fossa-action@v1 # Use a specific version if locking is preferred
|
||||
with:
|
||||
api-key: ${{ env.FOSSA_API_KEY }}
|
||||
|
||||
- name: "Run FOSSA Test"
|
||||
uses: fossas/fossa-action@main # Use a specific version if locking is preferred
|
||||
uses: fossas/fossa-action@v1 # Use a specific version if locking is preferred
|
||||
with:
|
||||
api-key: ${{ env.FOSSA_API_KEY }}
|
||||
run-tests: true
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
name: validate-examples
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '8 8 * * *'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
@ -34,6 +36,8 @@ on:
|
|||
merge_group:
|
||||
jobs:
|
||||
setup:
|
||||
permissions:
|
||||
packages: read
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOS: linux
|
||||
|
@ -69,22 +73,17 @@ jobs:
|
|||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "stable"
|
||||
|
||||
- name: Determine latest Dapr Runtime version
|
||||
if: env.DAPR_RUNTIME_VERSION == ''
|
||||
run: |
|
||||
RUNTIME_VERSION=$(curl -s "https://api.github.com/repos/dapr/dapr/releases/latest" | grep '"tag_name"' | cut -d ':' -f2 | tr -d '",v')
|
||||
RUNTIME_VERSION=$(curl -s "https://api.github.com/repos/dapr/dapr/releases/latest" | grep '"tag_name"' | cut -d ':' -f2 | tr -d ' ",v')
|
||||
echo "DAPR_RUNTIME_VERSION=$RUNTIME_VERSION" >> $GITHUB_ENV
|
||||
echo "Found $RUNTIME_VERSION"
|
||||
|
||||
- name: Determine latest Dapr Cli version
|
||||
if: env.DAPR_CLI_VERSION == ''
|
||||
run: |
|
||||
CLI_VERSION=$(curl -s "https://api.github.com/repos/dapr/cli/releases/latest" | grep '"tag_name"' | cut -d ':' -f2 | tr -d '",v')
|
||||
CLI_VERSION=$(curl -s "https://api.github.com/repos/dapr/cli/releases/latest" | grep '"tag_name"' | cut -d ':' -f2 | tr -d ' ",v')
|
||||
echo "DAPR_CLI_VERSION=$CLI_VERSION" >> $GITHUB_ENV
|
||||
echo "Found $CLI_VERSION"
|
||||
|
||||
|
@ -104,6 +103,13 @@ jobs:
|
|||
ref: ${{ env.DAPR_REF }}
|
||||
path: dapr_runtime
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
if: env.DAPR_REF != '' || env.DAPR_CLI_REF != ''
|
||||
with:
|
||||
cache: false
|
||||
go-version: "stable"
|
||||
|
||||
- name: Build dapr cli with referenced commit and override version
|
||||
if: env.DAPR_CLI_REF != ''
|
||||
run: |
|
||||
|
@ -121,6 +127,7 @@ jobs:
|
|||
cd dapr_runtime
|
||||
make
|
||||
echo "artifactPath=~/artifacts/$GITHUB_SHA/" >> $GITHUB_ENV
|
||||
mkdir -p $HOME/artifacts/$GITHUB_SHA/
|
||||
RUNTIME_VERSION=edge
|
||||
echo "DAPR_RUNTIME_VERSION=$RUNTIME_VERSION" >> $GITHUB_ENV
|
||||
|
||||
|
@ -129,7 +136,7 @@ jobs:
|
|||
run: |
|
||||
mkdir ~/dapr_docker
|
||||
cd dapr_runtime
|
||||
docker build --build-arg "PKG_FILES=*" -f ./docker/Dockerfile ./dist/linux_amd64/release -t daprio/dapr:0.0.0-dev
|
||||
docker build --build-arg "PKG_FILES=*" -f ./docker/Dockerfile ./dist/linux_amd64/release -t daprio/dapr:9.0.0-dev
|
||||
|
||||
- name: Download Install Bundle CLI
|
||||
if: env.DAPR_REF != '' && env.DAPR_CLI_REF == ''
|
||||
|
@ -141,14 +148,14 @@ jobs:
|
|||
ls -la
|
||||
|
||||
- name: Build Custom Install Bundle
|
||||
if: env.DAPR_REF != '' && env.DAPR_CLI_REF != ''
|
||||
if: env.DAPR_REF != '' || env.DAPR_CLI_REF != ''
|
||||
run: |
|
||||
: # Create daprbundle directory
|
||||
mkdir ~/daprbundle
|
||||
cp .github/workflows/validate-examples/details.json ~/daprbundle/
|
||||
|
||||
: # Add cli
|
||||
cp ~/artifacts/$GITHUB_SHA/dapr ~/daprbundle/dapr
|
||||
cp cli/dist/linux_amd64/release/dapr ~/daprbundle/dapr
|
||||
|
||||
: # Compress executables to /dist/ appending _linux_amd64 to the name
|
||||
mkdir ~/daprbundle/dist
|
||||
|
@ -163,7 +170,7 @@ jobs:
|
|||
|
||||
: # Add docker image
|
||||
mkdir ~/daprbundle/docker
|
||||
docker save daprio/dapr:0.0.0-dev | gzip > ~/daprbundle/docker/daprio-dapr-0.0.0-dev.tar.gz
|
||||
docker save daprio/dapr:9.0.0-dev | gzip > ~/daprbundle/docker/daprio-dapr-9.0.0-dev.tar.gz
|
||||
|
||||
: # Bundle
|
||||
cd ~/daprbundle
|
||||
|
@ -195,6 +202,8 @@ jobs:
|
|||
echo "GITHUB_SHA=$GITHUB_SHA" >> "$GITHUB_OUTPUT"
|
||||
|
||||
validate-example:
|
||||
permissions:
|
||||
packages: read
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
|
@ -211,7 +220,7 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
examples:
|
||||
[ "actors", "bindings", "client", "configuration", "crypto", "invoke/grpc", "invoke/grpc-proxying", "jobs", "pubsub", "query_state", "secrets-bulk" ]
|
||||
[ "actors", "bindings", "client", "configuration", "conversation", "crypto", "invoke/grpc", "invoke/grpc-proxying", "jobs", "pubsub", "query_state", "secrets-bulk" ]
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
|
@ -242,7 +251,8 @@ jobs:
|
|||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
version: "25.2"
|
||||
version: "24.4"
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Dapr CLI ${{ env.DAPR_CLI_VERSION }}
|
||||
if: env.DAPR_CLI_VERSION != 'edge'
|
||||
|
@ -289,9 +299,6 @@ jobs:
|
|||
python -m pip install --upgrade pip
|
||||
pip install mechanical-markdown
|
||||
|
||||
- name: Cargo Build Examples
|
||||
run: cargo build --examples
|
||||
|
||||
- name: Dapr version
|
||||
run: |
|
||||
dapr version
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
{
|
||||
"daprd": "0.0.0-dev",
|
||||
"dashboard": "0.0.0-dev",
|
||||
"cli": "0.0.0-dev",
|
||||
"daprd": "9.0.0-dev",
|
||||
"dashboard": "9.0.0-dev",
|
||||
"cli": "9.0.0-dev",
|
||||
"daprBinarySubDir": "dist",
|
||||
"dockerImageSubDir": "docker",
|
||||
"daprImageName": "daprio/dapr:0.0.0-dev",
|
||||
"daprImageFileName": "daprio-dapr-0.0.0-dev.tar.gz"
|
||||
"daprImageName": "daprio/dapr:9.0.0-dev",
|
||||
"daprImageFileName": "daprio-dapr-9.0.0-dev.tar.gz"
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
|
|
138
Cargo.toml
138
Cargo.toml
|
@ -1,108 +1,38 @@
|
|||
[package]
|
||||
name = "dapr"
|
||||
version = "0.15.0"
|
||||
authors = ["dapr.io"]
|
||||
[workspace]
|
||||
members = [
|
||||
"dapr",
|
||||
"dapr-macros",
|
||||
"proto-gen",
|
||||
"examples",
|
||||
]
|
||||
exclude = [
|
||||
".github/workflows/dapr-bot" # Bot used in GitHub workflow
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.dependencies]
|
||||
async-trait = "0.1"
|
||||
prost = "0.13.4"
|
||||
prost-types = "0.13.4"
|
||||
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
|
||||
tokio = "1.39"
|
||||
tokio-stream = "0.1"
|
||||
tokio-util = "0.7"
|
||||
|
||||
tonic = "0.12.3"
|
||||
tonic-build = "0.12.3"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.16.0"
|
||||
authors = [
|
||||
"Mike Nguyen <hey@mike.ee>",
|
||||
"The Dapr Authors <dapr@dapr.io>"
|
||||
]
|
||||
edition = "2021"
|
||||
license = "Apache-2.0"
|
||||
repository = "https://github.com/dapr/rust-sdk"
|
||||
description = "Rust SDK for dapr"
|
||||
readme = "README.md"
|
||||
keywords = ["microservices", "dapr"]
|
||||
|
||||
[dependencies]
|
||||
dapr-macros = { version = "0.15.0", path = "macros" }
|
||||
futures = "0.3"
|
||||
tonic = "0.11.0"
|
||||
prost = "0.12.3"
|
||||
bytes = "1"
|
||||
prost-types = "0.12.3"
|
||||
async-trait = "0.1"
|
||||
env_logger = "0.11.2"
|
||||
log = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
axum = "0.7.4"
|
||||
tokio = { version = "1.29", features = ["sync"] }
|
||||
tokio-util = { version = "0.7.10", features = ["io"] }
|
||||
chrono = "0.4.38"
|
||||
base64 = "0.22.1"
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.11.0"
|
||||
|
||||
[dev-dependencies]
|
||||
axum-test = "15.0.1"
|
||||
once_cell = "1.18.0"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
uuid = { version = "1.4.0", features = ["v4"] }
|
||||
dapr = { path = "./" }
|
||||
tokio-test = "0.4.2"
|
||||
tokio-stream = { version = "0.1" }
|
||||
|
||||
[[example]]
|
||||
name = "actor-client"
|
||||
path = "examples/actors/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "actor-server"
|
||||
path = "examples/actors/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
path = "examples/client/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "configuration"
|
||||
path = "examples/configuration/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "crypto"
|
||||
path = "examples/crypto/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-client"
|
||||
path = "examples/invoke/grpc/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-server"
|
||||
path = "examples/invoke/grpc/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-proxying-client"
|
||||
path = "examples/invoke/grpc-proxying/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-proxying-server"
|
||||
path = "examples/invoke/grpc-proxying/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "jobs"
|
||||
path = "examples/jobs/jobs.rs"
|
||||
|
||||
[[example]]
|
||||
name = "publisher"
|
||||
path = "examples/pubsub/publisher.rs"
|
||||
|
||||
[[example]]
|
||||
name = "subscriber"
|
||||
path = "examples/pubsub/subscriber.rs"
|
||||
|
||||
[[example]]
|
||||
name = "output-bindings"
|
||||
path = "examples/bindings/output.rs"
|
||||
|
||||
[[example]]
|
||||
name = "input-bindings"
|
||||
path = "examples/bindings/input.rs"
|
||||
|
||||
[[example]]
|
||||
name = "query_state_q1"
|
||||
path = "examples/query_state/query1.rs"
|
||||
|
||||
[[example]]
|
||||
name = "query_state_q2"
|
||||
path = "examples/query_state/query2.rs"
|
||||
|
||||
[[example]]
|
||||
name = "secrets-bulk"
|
||||
path = "examples/secrets-bulk/app.rs"
|
||||
rust-version = "1.78.0" # MSRV
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
.PHONY: proto-gen
|
||||
proto-gen:
|
||||
cargo run --bin proto-gen
|
||||
|
||||
.PHONY: check-diff-proto
|
||||
check-diff-proto:
|
||||
git diff --exit-code ./proto/
|
26
README.md
26
README.md
|
@ -43,9 +43,9 @@ resiliency.
|
|||
|
||||
## Prerequisites
|
||||
|
||||
Ensure you have Rust version 1.79 or higher installed. If not, install Rust [here](https://www.rust-lang.org/tools/install).
|
||||
Ensure you have Rust version 1.78 or higher installed. If not, install Rust [here](https://www.rust-lang.org/tools/install).
|
||||
|
||||
You will also need to install [protoc](https://github.com/protocolbuffers/protobuf#protobuf-compiler-installation).
|
||||
These crates no longer require protoc unless to recompile the protobuf files.
|
||||
|
||||
## How to use
|
||||
|
||||
|
@ -53,14 +53,15 @@ Add the following to your `Cargo.toml` file:
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
dapr = "0.13.0"
|
||||
dapr = "0.16.0"
|
||||
```
|
||||
|
||||
Here's a basic example to create a client:
|
||||
|
||||
```rust
|
||||
```Rust
|
||||
use dapr;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Get the Dapr port and create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
|
@ -68,11 +69,12 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
// Create the client
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
}
|
||||
```
|
||||
|
||||
## Explore more examples
|
||||
|
||||
Browse through more examples to understand the SDK better: [View examples](./examples)
|
||||
Browse through more examples to understand the SDK better: [View examples](https://github.com/dapr/rust-sdk/tree/main/examples)
|
||||
|
||||
## Building
|
||||
|
||||
|
@ -82,9 +84,7 @@ To build the SDK run:
|
|||
cargo build
|
||||
```
|
||||
|
||||
>Note: The protobuf client generation is built into `cargo build` process so updating the proto files under `dapr/` is enough to update the protobuf client.
|
||||
|
||||
## Updating .proto files from upstream Dapr
|
||||
## Developing (Updating .proto files from upstream Dapr)
|
||||
|
||||
To fetch the latest .proto files from Dapr execute the script `update-protos.sh`:
|
||||
|
||||
|
@ -95,7 +95,15 @@ To fetch the latest .proto files from Dapr execute the script `update-protos.sh`
|
|||
By default, the script fetches the latest proto updates from the master branch of the Dapr repository. If you need to choose a specific release or version, use the -v flag:
|
||||
|
||||
```bash
|
||||
./update-protos.sh -v v1.12.0
|
||||
./update-protos.sh -v v1.14.0
|
||||
```
|
||||
|
||||
You will also need to install [protoc](https://github.com/protocolbuffers/protobuf#protobuf-compiler-installation).
|
||||
|
||||
Protos can then be compiled using:
|
||||
|
||||
```bash
|
||||
cargo run proto-gen
|
||||
```
|
||||
|
||||
### Contact Us
|
||||
|
|
15
build.rs
15
build.rs
|
@ -1,15 +0,0 @@
|
|||
// use std::env;
|
||||
|
||||
fn main() -> Result<(), std::io::Error> {
|
||||
// env::set_var("OUT_DIR", "src");
|
||||
tonic_build::configure().build_server(true).compile(
|
||||
&[
|
||||
"dapr/proto/common/v1/common.proto",
|
||||
"dapr/proto/runtime/v1/dapr.proto",
|
||||
"dapr/proto/runtime/v1/appcallback.proto",
|
||||
"examples/invoke/proto/helloworld.proto",
|
||||
],
|
||||
&["."],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
[package]
|
||||
name = "dapr-macros"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Dapr Rust SDK (Macros)"
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
syn = { version = "2.0", features = ["full"] }
|
|
@ -1,10 +1,11 @@
|
|||
use proc_macro::TokenStream;
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::TokenTree;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::{Ident, LitStr, parse_macro_input};
|
||||
use syn::parse::{Parse, ParseStream};
|
||||
use syn::{parse_macro_input, Ident, LitStr};
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
macro_rules! derive_parse {(
|
||||
@derive_only
|
||||
|
@ -80,7 +81,7 @@ pub fn actor(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
|||
Ok(actor_struct) => actor_struct.ident.clone(),
|
||||
Err(_) => match syn::parse::<syn::ItemType>(item.clone()) {
|
||||
Ok(ty) => ty.ident.clone(),
|
||||
Err(e) => panic!("Error parsing actor struct: {}", e),
|
||||
Err(e) => panic!("Error parsing actor struct: {e}"),
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -152,7 +153,6 @@ pub fn topic(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||
let struct_name = name
|
||||
.to_string()
|
||||
.split('_')
|
||||
.into_iter()
|
||||
.map(|i| {
|
||||
let mut chars: Vec<char> = i.chars().collect();
|
||||
chars[0] = chars[0].to_ascii_uppercase();
|
||||
|
@ -172,7 +172,6 @@ pub fn topic(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||
.to_string()
|
||||
.replace(['(', ')'], "")
|
||||
.split(':')
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter(|&(i, _)| i % 2 != 0)
|
||||
.map(|(_, i)| i.trim().to_string())
|
||||
|
@ -236,4 +235,4 @@ pub fn topic(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||
};
|
||||
|
||||
tokens.into()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
[package]
|
||||
name = "dapr"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
description = "Rust SDK for dapr"
|
||||
readme = "README.md"
|
||||
keywords = ["microservices", "dapr"]
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
axum = "0.7"
|
||||
chrono = "0.4"
|
||||
futures = "0.3"
|
||||
log = "0.4"
|
||||
prost = { workspace = true }
|
||||
prost-types = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
tokio-util = { workspace = true, features = ["io"] }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
axum-test = "=16.4.0" # TODO: Remove problematic dep
|
||||
litemap = "=0.7.4" # TODO: Remove pinned - linked to axum_test
|
||||
zerofrom = "=0.1.5" # TODO: Remove pinned - linked to axum_test
|
||||
reserve-port = "=2.1.0" # TODO: Remove pinned - linked to axum_test
|
||||
idna_adapter = "=1.2.0"
|
||||
|
||||
once_cell = "1.19"
|
||||
dapr = { path = "./" }
|
||||
dapr-macros = { path = "../dapr-macros" }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
uuid = { version = "1.10", features = ["v4"] }
|
||||
tokio-stream = { workspace = true }
|
|
@ -0,0 +1 @@
|
|||
../README.md
|
|
@ -1,38 +1,36 @@
|
|||
use crate::dapr::proto::runtime::v1::app_callback_server::AppCallback;
|
||||
use crate::dapr::proto::{common, runtime};
|
||||
use std::collections::HashMap;
|
||||
use tonic::{Code, Request, Response, Status};
|
||||
|
||||
use crate::dapr::dapr::proto::runtime::v1::app_callback_server::AppCallback;
|
||||
use crate::dapr::dapr::proto::{common, runtime};
|
||||
use crate::dapr::*;
|
||||
|
||||
/// InvokeRequest is the message to invoke a method with the data.
|
||||
pub type InvokeRequest = dapr::proto::common::v1::InvokeRequest;
|
||||
pub type InvokeRequest = common::v1::InvokeRequest;
|
||||
|
||||
/// InvokeResponse is the response message inclduing data and its content type
|
||||
/// from app callback.
|
||||
pub type InvokeResponse = dapr::proto::common::v1::InvokeResponse;
|
||||
pub type InvokeResponse = common::v1::InvokeResponse;
|
||||
|
||||
/// ListTopicSubscriptionsResponse is the message including the list of the subscribing topics.
|
||||
pub type ListTopicSubscriptionsResponse = dapr::proto::runtime::v1::ListTopicSubscriptionsResponse;
|
||||
pub type ListTopicSubscriptionsResponse = runtime::v1::ListTopicSubscriptionsResponse;
|
||||
|
||||
/// TopicSubscription represents a topic and it's metadata (session id etc.)
|
||||
pub type TopicSubscription = dapr::proto::runtime::v1::TopicSubscription;
|
||||
pub type TopicSubscription = runtime::v1::TopicSubscription;
|
||||
|
||||
/// TopicEventRequest message is compatiable with CloudEvent spec v1.0.
|
||||
pub type TopicEventRequest = dapr::proto::runtime::v1::TopicEventRequest;
|
||||
pub type TopicEventRequest = runtime::v1::TopicEventRequest;
|
||||
|
||||
/// TopicEventResponse is response from app on published message
|
||||
pub type TopicEventResponse = dapr::proto::runtime::v1::TopicEventResponse;
|
||||
pub type TopicEventResponse = runtime::v1::TopicEventResponse;
|
||||
|
||||
/// ListInputBindingsResponse is the message including the list of input bindings.
|
||||
pub type ListInputBindingsResponse = dapr::proto::runtime::v1::ListInputBindingsResponse;
|
||||
pub type ListInputBindingsResponse = runtime::v1::ListInputBindingsResponse;
|
||||
|
||||
/// BindingEventRequest represents input bindings event.
|
||||
pub type BindingEventRequest = dapr::proto::runtime::v1::BindingEventRequest;
|
||||
pub type BindingEventRequest = runtime::v1::BindingEventRequest;
|
||||
|
||||
/// BindingEventResponse includes operations to save state or
|
||||
/// send data to output bindings optionally.
|
||||
pub type BindingEventResponse = dapr::proto::runtime::v1::BindingEventResponse;
|
||||
pub type BindingEventResponse = runtime::v1::BindingEventResponse;
|
||||
|
||||
impl ListTopicSubscriptionsResponse {
|
||||
/// Create `ListTopicSubscriptionsResponse` with a topic.
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use futures::StreamExt;
|
||||
|
@ -11,7 +10,7 @@ use tonic::codegen::tokio_stream;
|
|||
use tonic::{transport::Channel as TonicChannel, Request};
|
||||
use tonic::{Status, Streaming};
|
||||
|
||||
use crate::dapr::dapr::proto::{common::v1 as common_v1, runtime::v1 as dapr_v1};
|
||||
use crate::dapr::proto::{common::v1 as common_v1, runtime::v1 as dapr_v1};
|
||||
use crate::error::Error;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -26,7 +25,27 @@ impl<T: DaprInterface> Client<T> {
|
|||
pub async fn connect(addr: String) -> Result<Self, Error> {
|
||||
// Get the Dapr port to create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
let address = format!("{}:{}", addr, port);
|
||||
let address = format!("{addr}:{port}");
|
||||
|
||||
Ok(Client(T::connect(address).await?))
|
||||
}
|
||||
|
||||
/// Connect to the Dapr sidecar with a specific port.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `addr` - Address of gRPC server to connect to.
|
||||
/// * `port` - Port of the gRPC server to connect to.
|
||||
pub async fn connect_with_port(addr: String, port: String) -> Result<Self, Error> {
|
||||
// assert that port is between 1 and 65535
|
||||
let port: u16 = match port.parse::<u16>() {
|
||||
Ok(p) => p,
|
||||
Err(_) => {
|
||||
panic!("Port must be a number between 1 and 65535");
|
||||
}
|
||||
};
|
||||
|
||||
let address = format!("{addr}:{port}");
|
||||
|
||||
Ok(Client(T::connect(address).await?))
|
||||
}
|
||||
|
@ -173,6 +192,7 @@ impl<T: DaprInterface> Client<T> {
|
|||
///
|
||||
/// * `store_name` - The name of state store.
|
||||
/// * `key` - The key of the desired state.
|
||||
/// * `metadata` - Any metadata pairs to include in the request.
|
||||
pub async fn get_state<S>(
|
||||
&mut self,
|
||||
store_name: S,
|
||||
|
@ -199,19 +219,54 @@ impl<T: DaprInterface> Client<T> {
|
|||
|
||||
/// Save an array of state objects.
|
||||
///
|
||||
/// This does not include any etag or metadata options.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `store_name` - The name of state store.
|
||||
/// * `states` - The array of the state key values.
|
||||
pub async fn save_state<I, K>(&mut self, store_name: K, states: I) -> Result<(), Error>
|
||||
/// * `key` - The key for the value
|
||||
/// * `value` - The value to be saved for the key
|
||||
/// * `etag` - The etag identifier
|
||||
/// * `metadata` - Any metadata pairs to include in the request.
|
||||
/// * `options` - Any state option
|
||||
pub async fn save_state<S>(
|
||||
&mut self,
|
||||
store_name: S,
|
||||
key: S,
|
||||
value: Vec<u8>,
|
||||
etag: Option<Etag>,
|
||||
metadata: Option<HashMap<String, String>>,
|
||||
options: Option<StateOptions>,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
I: IntoIterator<Item = (K, Vec<u8>)>,
|
||||
K: Into<String>,
|
||||
S: Into<String>,
|
||||
{
|
||||
let states = vec![StateItem {
|
||||
key: key.into(),
|
||||
value,
|
||||
etag,
|
||||
metadata: metadata.unwrap_or_default(),
|
||||
options,
|
||||
}];
|
||||
|
||||
self.save_bulk_states(store_name, states).await
|
||||
}
|
||||
|
||||
/// Save an array of state objects.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `store_name` - The name of state store.
|
||||
/// * `items` - The array of the state items.
|
||||
pub async fn save_bulk_states<S, I>(&mut self, store_name: S, items: I) -> Result<(), Error>
|
||||
where
|
||||
S: Into<String>,
|
||||
I: Into<Vec<StateItem>>,
|
||||
{
|
||||
self.0
|
||||
.save_state(SaveStateRequest {
|
||||
store_name: store_name.into(),
|
||||
states: states.into_iter().map(|pair| pair.into()).collect(),
|
||||
states: items.into(),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -427,7 +482,7 @@ impl<T: DaprInterface> Client<T> {
|
|||
self.0.unsubscribe_configuration(request).await
|
||||
}
|
||||
|
||||
/// Encrypt binary data using Dapr. returns Vec<StreamPayload> to be used in decrypt method
|
||||
/// Encrypt binary data using Dapr. returns `Vec<StreamPayload>` to be used in decrypt method
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
|
@ -468,11 +523,11 @@ impl<T: DaprInterface> Client<T> {
|
|||
self.0.encrypt(requested_items).await
|
||||
}
|
||||
|
||||
/// Decrypt binary data using Dapr. returns Vec<u8>.
|
||||
/// Decrypt binary data using Dapr. returns `Vec<u8>`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `encrypted` - Encrypted data usually returned from encrypted, Vec<StreamPayload>
|
||||
/// * `encrypted` - Encrypted data usually returned from encrypted, `Vec<StreamPayload>`
|
||||
/// * `options` - Decryption request options.
|
||||
pub async fn decrypt(
|
||||
&mut self,
|
||||
|
@ -534,6 +589,18 @@ impl<T: DaprInterface> Client<T> {
|
|||
};
|
||||
self.0.delete_job_alpha1(request).await
|
||||
}
|
||||
|
||||
/// Converse with an LLM
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * ConversationRequest - The request containing inputs to send to the LLM
|
||||
pub async fn converse_alpha1(
|
||||
&mut self,
|
||||
request: ConversationRequest,
|
||||
) -> Result<ConversationResponse, Error> {
|
||||
self.0.converse_alpha1(request).await
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
@ -596,6 +663,11 @@ pub trait DaprInterface: Sized {
|
|||
&mut self,
|
||||
request: DeleteJobRequest,
|
||||
) -> Result<DeleteJobResponse, Error>;
|
||||
|
||||
async fn converse_alpha1(
|
||||
&mut self,
|
||||
request: ConversationRequest,
|
||||
) -> Result<ConversationResponse, Error>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
@ -604,6 +676,13 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
Ok(dapr_v1::dapr_client::DaprClient::connect(addr).await?)
|
||||
}
|
||||
|
||||
async fn publish_event(&mut self, request: PublishEventRequest) -> Result<(), Error> {
|
||||
self.publish_event(Request::new(request))
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn invoke_service(
|
||||
&mut self,
|
||||
request: InvokeServiceRequest,
|
||||
|
@ -624,13 +703,6 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
.into_inner())
|
||||
}
|
||||
|
||||
async fn publish_event(&mut self, request: PublishEventRequest) -> Result<(), Error> {
|
||||
self.publish_event(Request::new(request))
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_secret(&mut self, request: GetSecretRequest) -> Result<GetSecretResponse, Error> {
|
||||
Ok(self.get_secret(Request::new(request)).await?.into_inner())
|
||||
}
|
||||
|
@ -649,6 +721,11 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
Ok(self.get_state(Request::new(request)).await?.into_inner())
|
||||
}
|
||||
|
||||
async fn save_state(&mut self, request: SaveStateRequest) -> Result<(), Error> {
|
||||
self.save_state(Request::new(request)).await?.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn query_state_alpha1(
|
||||
&mut self,
|
||||
request: QueryStateRequest,
|
||||
|
@ -659,11 +736,6 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
.into_inner())
|
||||
}
|
||||
|
||||
async fn save_state(&mut self, request: SaveStateRequest) -> Result<(), Error> {
|
||||
self.save_state(Request::new(request)).await?.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_state(&mut self, request: DeleteStateRequest) -> Result<(), Error> {
|
||||
self.delete_state(Request::new(request)).await?.into_inner();
|
||||
Ok(())
|
||||
|
@ -722,7 +794,7 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
.into_inner())
|
||||
}
|
||||
|
||||
/// Encrypt binary data using Dapr. returns Vec<StreamPayload> to be used in decrypt method
|
||||
/// Encrypt binary data using Dapr. returns `Vec<StreamPayload>` to be used in decrypt method
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
|
@ -746,11 +818,11 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
Ok(return_data)
|
||||
}
|
||||
|
||||
/// Decrypt binary data using Dapr. returns Vec<u8>.
|
||||
/// Decrypt binary data using Dapr. returns `Vec<u8>`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `encrypted` - Encrypted data usually returned from encrypted, Vec<StreamPayload>
|
||||
/// * `encrypted` - Encrypted data usually returned from encrypted, `Vec<StreamPayload>`
|
||||
/// * `options` - Decryption request options.
|
||||
async fn decrypt(&mut self, request: Vec<DecryptRequest>) -> Result<Vec<u8>, Status> {
|
||||
let request = Request::new(tokio_stream::iter(request));
|
||||
|
@ -790,6 +862,16 @@ impl DaprInterface for dapr_v1::dapr_client::DaprClient<TonicChannel> {
|
|||
.await?
|
||||
.into_inner())
|
||||
}
|
||||
|
||||
async fn converse_alpha1(
|
||||
&mut self,
|
||||
request: ConversationRequest,
|
||||
) -> Result<ConversationResponse, Error> {
|
||||
Ok(self
|
||||
.converse_alpha1(Request::new(request))
|
||||
.await?
|
||||
.into_inner())
|
||||
}
|
||||
}
|
||||
|
||||
/// A request from invoking a service
|
||||
|
@ -816,6 +898,15 @@ pub type GetStateResponse = dapr_v1::GetStateResponse;
|
|||
/// A request for saving state
|
||||
pub type SaveStateRequest = dapr_v1::SaveStateRequest;
|
||||
|
||||
/// A state item
|
||||
pub type StateItem = common_v1::StateItem;
|
||||
|
||||
/// State options
|
||||
pub type StateOptions = common_v1::StateOptions;
|
||||
|
||||
/// Etag identifier
|
||||
pub type Etag = common_v1::Etag;
|
||||
|
||||
/// A request for querying state
|
||||
pub type QueryStateRequest = dapr_v1::QueryStateRequest;
|
||||
|
||||
|
@ -876,39 +967,51 @@ pub type UnsubscribeConfigurationResponse = dapr_v1::UnsubscribeConfigurationRes
|
|||
pub type TonicClient = dapr_v1::dapr_client::DaprClient<TonicChannel>;
|
||||
|
||||
/// Encryption gRPC request
|
||||
pub type EncryptRequest = crate::dapr::dapr::proto::runtime::v1::EncryptRequest;
|
||||
pub type EncryptRequest = crate::dapr::proto::runtime::v1::EncryptRequest;
|
||||
|
||||
/// Decrypt gRPC request
|
||||
pub type DecryptRequest = crate::dapr::dapr::proto::runtime::v1::DecryptRequest;
|
||||
pub type DecryptRequest = crate::dapr::proto::runtime::v1::DecryptRequest;
|
||||
|
||||
/// Encryption request options
|
||||
pub type EncryptRequestOptions = crate::dapr::dapr::proto::runtime::v1::EncryptRequestOptions;
|
||||
pub type EncryptRequestOptions = crate::dapr::proto::runtime::v1::EncryptRequestOptions;
|
||||
|
||||
/// Decryption request options
|
||||
pub type DecryptRequestOptions = crate::dapr::dapr::proto::runtime::v1::DecryptRequestOptions;
|
||||
pub type DecryptRequestOptions = crate::dapr::proto::runtime::v1::DecryptRequestOptions;
|
||||
|
||||
/// The basic job structure
|
||||
pub type Job = crate::dapr::dapr::proto::runtime::v1::Job;
|
||||
pub type Job = crate::dapr::proto::runtime::v1::Job;
|
||||
|
||||
/// A request to schedule a job
|
||||
pub type ScheduleJobRequest = crate::dapr::dapr::proto::runtime::v1::ScheduleJobRequest;
|
||||
pub type ScheduleJobRequest = crate::dapr::proto::runtime::v1::ScheduleJobRequest;
|
||||
|
||||
/// A response from a schedule job request
|
||||
pub type ScheduleJobResponse = crate::dapr::dapr::proto::runtime::v1::ScheduleJobResponse;
|
||||
pub type ScheduleJobResponse = crate::dapr::proto::runtime::v1::ScheduleJobResponse;
|
||||
|
||||
/// A request to get a job
|
||||
pub type GetJobRequest = crate::dapr::dapr::proto::runtime::v1::GetJobRequest;
|
||||
pub type GetJobRequest = crate::dapr::proto::runtime::v1::GetJobRequest;
|
||||
|
||||
/// A response from a get job request
|
||||
pub type GetJobResponse = crate::dapr::dapr::proto::runtime::v1::GetJobResponse;
|
||||
pub type GetJobResponse = crate::dapr::proto::runtime::v1::GetJobResponse;
|
||||
|
||||
/// A request to delete a job
|
||||
pub type DeleteJobRequest = crate::dapr::dapr::proto::runtime::v1::DeleteJobRequest;
|
||||
pub type DeleteJobRequest = crate::dapr::proto::runtime::v1::DeleteJobRequest;
|
||||
|
||||
/// A response from a delete job request
|
||||
pub type DeleteJobResponse = crate::dapr::dapr::proto::runtime::v1::DeleteJobResponse;
|
||||
pub type DeleteJobResponse = crate::dapr::proto::runtime::v1::DeleteJobResponse;
|
||||
|
||||
type StreamPayload = crate::dapr::dapr::proto::common::v1::StreamPayload;
|
||||
/// A request to conversate with an LLM
|
||||
pub type ConversationRequest = crate::dapr::proto::runtime::v1::ConversationRequest;
|
||||
|
||||
/// A response from conversating with an LLM
|
||||
pub type ConversationResponse = crate::dapr::proto::runtime::v1::ConversationResponse;
|
||||
|
||||
/// A result from an interacting with a LLM
|
||||
pub type ConversationResult = crate::dapr::proto::runtime::v1::ConversationResult;
|
||||
|
||||
/// An input to the conversation
|
||||
pub type ConversationInput = crate::dapr::proto::runtime::v1::ConversationInput;
|
||||
|
||||
type StreamPayload = crate::dapr::proto::common::v1::StreamPayload;
|
||||
impl<K> From<(K, Vec<u8>)> for common_v1::StateItem
|
||||
where
|
||||
K: Into<String>,
|
||||
|
@ -930,7 +1033,6 @@ impl<T: AsyncRead> ReaderStream<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct JobBuilder {
|
||||
schedule: Option<String>,
|
||||
data: Option<Any>,
|
||||
|
@ -989,3 +1091,62 @@ impl JobBuilder {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ConversationInputBuilder {
|
||||
content: String,
|
||||
role: Option<String>,
|
||||
scrub_pii: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConversationInputBuilder {
|
||||
pub fn new(message: &str) -> Self {
|
||||
ConversationInputBuilder {
|
||||
content: message.to_string(),
|
||||
role: None,
|
||||
scrub_pii: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> ConversationInput {
|
||||
ConversationInput {
|
||||
content: self.content,
|
||||
role: self.role,
|
||||
scrub_pii: self.scrub_pii,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ConversationRequestBuilder {
|
||||
name: String,
|
||||
context_id: Option<String>,
|
||||
inputs: Vec<ConversationInput>,
|
||||
parameters: HashMap<String, Any>,
|
||||
metadata: HashMap<String, String>,
|
||||
scrub_pii: Option<bool>,
|
||||
temperature: Option<f64>,
|
||||
}
|
||||
impl ConversationRequestBuilder {
|
||||
pub fn new(name: &str, inputs: Vec<ConversationInput>) -> Self {
|
||||
ConversationRequestBuilder {
|
||||
name: name.to_string(),
|
||||
context_id: None,
|
||||
inputs,
|
||||
parameters: Default::default(),
|
||||
metadata: Default::default(),
|
||||
scrub_pii: None,
|
||||
temperature: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> ConversationRequest {
|
||||
ConversationRequest {
|
||||
name: self.name,
|
||||
context_id: self.context_id,
|
||||
inputs: self.inputs,
|
||||
parameters: self.parameters,
|
||||
metadata: self.metadata,
|
||||
scrub_pii: self.scrub_pii,
|
||||
temperature: self.temperature,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
# dapr
|
||||
|
||||
These are compiled by running the proto-gen crate
|
|
@ -0,0 +1,272 @@
|
|||
// This file is @generated by prost-build.
|
||||
/// HTTPExtension includes HTTP verb and querystring
|
||||
/// when Dapr runtime delivers HTTP content.
|
||||
///
|
||||
/// For example, when callers calls http invoke api
|
||||
/// `POST <http://localhost:3500/v1.0/invoke/<app_id>/method/<method>?query1=value1&query2=value2`>
|
||||
///
|
||||
/// Dapr runtime will parse POST as a verb and extract querystring to quersytring map.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HttpExtension {
|
||||
/// Required. HTTP verb.
|
||||
#[prost(enumeration = "http_extension::Verb", tag = "1")]
|
||||
pub verb: i32,
|
||||
/// Optional. querystring represents an encoded HTTP url query string in the following format: name=value&name2=value2
|
||||
#[prost(string, tag = "2")]
|
||||
pub querystring: ::prost::alloc::string::String,
|
||||
}
|
||||
/// Nested message and enum types in `HTTPExtension`.
|
||||
pub mod http_extension {
|
||||
/// Type of HTTP 1.1 Methods
|
||||
/// RFC 7231: <https://tools.ietf.org/html/rfc7231#page-24>
|
||||
/// RFC 5789: <https://datatracker.ietf.org/doc/html/rfc5789>
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
::prost::Enumeration
|
||||
)]
|
||||
#[repr(i32)]
|
||||
pub enum Verb {
|
||||
None = 0,
|
||||
Get = 1,
|
||||
Head = 2,
|
||||
Post = 3,
|
||||
Put = 4,
|
||||
Delete = 5,
|
||||
Connect = 6,
|
||||
Options = 7,
|
||||
Trace = 8,
|
||||
Patch = 9,
|
||||
}
|
||||
impl Verb {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::None => "NONE",
|
||||
Self::Get => "GET",
|
||||
Self::Head => "HEAD",
|
||||
Self::Post => "POST",
|
||||
Self::Put => "PUT",
|
||||
Self::Delete => "DELETE",
|
||||
Self::Connect => "CONNECT",
|
||||
Self::Options => "OPTIONS",
|
||||
Self::Trace => "TRACE",
|
||||
Self::Patch => "PATCH",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"NONE" => Some(Self::None),
|
||||
"GET" => Some(Self::Get),
|
||||
"HEAD" => Some(Self::Head),
|
||||
"POST" => Some(Self::Post),
|
||||
"PUT" => Some(Self::Put),
|
||||
"DELETE" => Some(Self::Delete),
|
||||
"CONNECT" => Some(Self::Connect),
|
||||
"OPTIONS" => Some(Self::Options),
|
||||
"TRACE" => Some(Self::Trace),
|
||||
"PATCH" => Some(Self::Patch),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// InvokeRequest is the message to invoke a method with the data.
|
||||
/// This message is used in InvokeService of Dapr gRPC Service and OnInvoke
|
||||
/// of AppCallback gRPC service.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct InvokeRequest {
|
||||
/// Required. method is a method name which will be invoked by caller.
|
||||
#[prost(string, tag = "1")]
|
||||
pub method: ::prost::alloc::string::String,
|
||||
/// Required in unary RPCs. Bytes value or Protobuf message which caller sent.
|
||||
/// Dapr treats Any.value as bytes type if Any.type_url is unset.
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub data: ::core::option::Option<::prost_types::Any>,
|
||||
/// The type of data content.
|
||||
///
|
||||
/// This field is required if data delivers http request body
|
||||
/// Otherwise, this is optional.
|
||||
#[prost(string, tag = "3")]
|
||||
pub content_type: ::prost::alloc::string::String,
|
||||
/// HTTP specific fields if request conveys http-compatible request.
|
||||
///
|
||||
/// This field is required for http-compatible request. Otherwise,
|
||||
/// this field is optional.
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub http_extension: ::core::option::Option<HttpExtension>,
|
||||
}
|
||||
/// InvokeResponse is the response message including data and its content type
|
||||
/// from app callback.
|
||||
/// This message is used in InvokeService of Dapr gRPC Service and OnInvoke
|
||||
/// of AppCallback gRPC service.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct InvokeResponse {
|
||||
/// Required in unary RPCs. The content body of InvokeService response.
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub data: ::core::option::Option<::prost_types::Any>,
|
||||
/// Required. The type of data content.
|
||||
#[prost(string, tag = "2")]
|
||||
pub content_type: ::prost::alloc::string::String,
|
||||
}
|
||||
/// Chunk of data sent in a streaming request or response.
|
||||
/// This is used in requests including InternalInvokeRequestStream.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct StreamPayload {
|
||||
/// Data sent in the chunk.
|
||||
/// The amount of data included in each chunk is up to the discretion of the sender, and can be empty.
|
||||
/// Additionally, the amount of data doesn't need to be fixed and subsequent messages can send more, or less, data.
|
||||
/// Receivers must not make assumptions about the number of bytes they'll receive in each chunk.
|
||||
#[prost(bytes = "vec", tag = "1")]
|
||||
pub data: ::prost::alloc::vec::Vec<u8>,
|
||||
/// Sequence number. This is a counter that starts from 0 and increments by 1 on each chunk sent.
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub seq: u64,
|
||||
}
|
||||
/// StateItem represents state key, value, and additional options to save state.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct StateItem {
|
||||
/// Required. The state key
|
||||
#[prost(string, tag = "1")]
|
||||
pub key: ::prost::alloc::string::String,
|
||||
/// Required. The state data for key
|
||||
#[prost(bytes = "vec", tag = "2")]
|
||||
pub value: ::prost::alloc::vec::Vec<u8>,
|
||||
/// The entity tag which represents the specific version of data.
|
||||
/// The exact ETag format is defined by the corresponding data store.
|
||||
#[prost(message, optional, tag = "3")]
|
||||
pub etag: ::core::option::Option<Etag>,
|
||||
/// The metadata which will be passed to state store component.
|
||||
#[prost(map = "string, string", tag = "4")]
|
||||
pub metadata: ::std::collections::HashMap<
|
||||
::prost::alloc::string::String,
|
||||
::prost::alloc::string::String,
|
||||
>,
|
||||
/// Options for concurrency and consistency to save the state.
|
||||
#[prost(message, optional, tag = "5")]
|
||||
pub options: ::core::option::Option<StateOptions>,
|
||||
}
|
||||
/// Etag represents a state item version
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct Etag {
|
||||
/// value sets the etag value
|
||||
#[prost(string, tag = "1")]
|
||||
pub value: ::prost::alloc::string::String,
|
||||
}
|
||||
/// StateOptions configures concurrency and consistency for state operations
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct StateOptions {
|
||||
#[prost(enumeration = "state_options::StateConcurrency", tag = "1")]
|
||||
pub concurrency: i32,
|
||||
#[prost(enumeration = "state_options::StateConsistency", tag = "2")]
|
||||
pub consistency: i32,
|
||||
}
|
||||
/// Nested message and enum types in `StateOptions`.
|
||||
pub mod state_options {
|
||||
/// Enum describing the supported concurrency for state.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
::prost::Enumeration
|
||||
)]
|
||||
#[repr(i32)]
|
||||
pub enum StateConcurrency {
|
||||
ConcurrencyUnspecified = 0,
|
||||
ConcurrencyFirstWrite = 1,
|
||||
ConcurrencyLastWrite = 2,
|
||||
}
|
||||
impl StateConcurrency {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::ConcurrencyUnspecified => "CONCURRENCY_UNSPECIFIED",
|
||||
Self::ConcurrencyFirstWrite => "CONCURRENCY_FIRST_WRITE",
|
||||
Self::ConcurrencyLastWrite => "CONCURRENCY_LAST_WRITE",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"CONCURRENCY_UNSPECIFIED" => Some(Self::ConcurrencyUnspecified),
|
||||
"CONCURRENCY_FIRST_WRITE" => Some(Self::ConcurrencyFirstWrite),
|
||||
"CONCURRENCY_LAST_WRITE" => Some(Self::ConcurrencyLastWrite),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Enum describing the supported consistency for state.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
::prost::Enumeration
|
||||
)]
|
||||
#[repr(i32)]
|
||||
pub enum StateConsistency {
|
||||
ConsistencyUnspecified = 0,
|
||||
ConsistencyEventual = 1,
|
||||
ConsistencyStrong = 2,
|
||||
}
|
||||
impl StateConsistency {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::ConsistencyUnspecified => "CONSISTENCY_UNSPECIFIED",
|
||||
Self::ConsistencyEventual => "CONSISTENCY_EVENTUAL",
|
||||
Self::ConsistencyStrong => "CONSISTENCY_STRONG",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"CONSISTENCY_UNSPECIFIED" => Some(Self::ConsistencyUnspecified),
|
||||
"CONSISTENCY_EVENTUAL" => Some(Self::ConsistencyEventual),
|
||||
"CONSISTENCY_STRONG" => Some(Self::ConsistencyStrong),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// ConfigurationItem represents all the configuration with its name(key).
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct ConfigurationItem {
|
||||
/// Required. The value of configuration item.
|
||||
#[prost(string, tag = "1")]
|
||||
pub value: ::prost::alloc::string::String,
|
||||
/// Version is response only and cannot be fetched. Store is not expected to keep all versions available
|
||||
#[prost(string, tag = "2")]
|
||||
pub version: ::prost::alloc::string::String,
|
||||
/// the metadata which will be passed to/from configuration store component.
|
||||
#[prost(map = "string, string", tag = "3")]
|
||||
pub metadata: ::std::collections::HashMap<
|
||||
::prost::alloc::string::String,
|
||||
::prost::alloc::string::String,
|
||||
>,
|
||||
}
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
@ -13,7 +13,7 @@ pub enum Error {
|
|||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
write!(f, "{self:?}")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,6 +50,6 @@ pub struct GrpcError {
|
|||
|
||||
impl Display for GrpcError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
write!(f, "{self:?}")
|
||||
}
|
||||
}
|
|
@ -1,8 +1,5 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
|
||||
extern crate dapr_macros;
|
||||
|
||||
pub use dapr_macros::actor;
|
||||
pub use serde;
|
||||
pub use serde_json;
|
||||
|
||||
|
@ -12,8 +9,23 @@ pub use client::Client;
|
|||
pub mod appcallback;
|
||||
/// Module containing the 'Client' implementation.
|
||||
pub mod client;
|
||||
|
||||
/// Module importing the Dapr runtime implementation.
|
||||
pub mod dapr;
|
||||
pub mod dapr {
|
||||
#![allow(clippy::large_enum_variant)]
|
||||
pub mod proto {
|
||||
pub mod common {
|
||||
pub mod v1 {
|
||||
include!("dapr/dapr.proto.common.v1.rs");
|
||||
}
|
||||
}
|
||||
pub mod runtime {
|
||||
pub mod v1 {
|
||||
include!("dapr/dapr.proto.runtime.v1.rs");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Module defining the error implementations.
|
||||
pub mod error;
|
||||
/// Module containing the 'Server' implementation.
|
|
@ -1,5 +1,5 @@
|
|||
use crate::client::TonicClient;
|
||||
use crate::dapr::dapr::proto::runtime::v1 as dapr_v1;
|
||||
use crate::dapr::proto::runtime::v1 as dapr_v1;
|
||||
use crate::error::Error as DaprError;
|
||||
use prost_types::Any;
|
||||
use std::collections::HashMap;
|
|
@ -37,7 +37,7 @@ impl Display for ActorError {
|
|||
ActorError::CorruptedState => write!(f, "Actor state corrupted"),
|
||||
ActorError::MethodNotFound => write!(f, "Method not found"),
|
||||
ActorError::ActorNotFound => write!(f, "Actor not found"),
|
||||
ActorError::MethodError(e) => write!(f, "Method error: {}", e),
|
||||
ActorError::MethodError(e) => write!(f, "Method error: {e}"),
|
||||
ActorError::SerializationError() => write!(f, "Serialization error"),
|
||||
}
|
||||
}
|
|
@ -102,9 +102,9 @@ impl ActorTypeRegistration {
|
|||
/// # Arguments:
|
||||
/// * `method_name` - The name of the method to be registered. This name will be used by actor clients to invoke the method.
|
||||
/// * `handler` - The handler function to be invoked when the method is called.
|
||||
/// Can be any valid [Axum handler](https://docs.rs/axum/latest/axum/handler/index.html),
|
||||
/// use [Axum extractors](https://docs.rs/axum/latest/axum/extract/index.html) to access the incoming request and return an [`impl IntoResponse`](https://docs.rs/axum/latest/axum/response/trait.IntoResponse.html).
|
||||
/// Use the `DaprJson` extractor to deserialize the request from Json coming from a Dapr sidecar.
|
||||
/// Can be any valid [Axum handler](https://docs.rs/axum/latest/axum/handler/index.html),
|
||||
/// use [Axum extractors](https://docs.rs/axum/latest/axum/extract/index.html) to access the incoming request and return an [`impl IntoResponse`](https://docs.rs/axum/latest/axum/response/trait.IntoResponse.html).
|
||||
/// Use the `DaprJson` extractor to deserialize the request from Json coming from a Dapr sidecar.
|
||||
/// # Example:
|
||||
/// ```ignore
|
||||
/// # use std::sync::Arc;
|
||||
|
@ -174,7 +174,7 @@ impl ActorTypeRegistration {
|
|||
T: 'static,
|
||||
{
|
||||
let actor_type = self.name.clone();
|
||||
let method_path = format!("/actors/{}/:actor_id/method/{}", actor_type, method_name);
|
||||
let method_path = format!("/actors/{actor_type}/:actor_id/method/{method_name}");
|
||||
|
||||
let reg_func = move |router: Router, runtime: Arc<ActorRuntime>| {
|
||||
router.route(
|
||||
|
@ -224,7 +224,7 @@ impl ActorRuntime {
|
|||
let name = registration.name.clone();
|
||||
let mut g = self.registered_actors_types.write().await;
|
||||
g.insert(name.clone(), registration);
|
||||
log::info!("registered actor {}", name);
|
||||
log::info!("registered actor {name}");
|
||||
}
|
||||
|
||||
pub async fn configure_method_routes(
|
|
@ -1,3 +1,4 @@
|
|||
#[cfg(test)]
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
@ -96,7 +97,7 @@ async fn test_actor_invoke() {
|
|||
let server = TestServer::new(app.into_make_service()).unwrap();
|
||||
|
||||
let invoke_resp = server
|
||||
.put(&format!("/actors/MyActor/{}/method/do_stuff", actor_id))
|
||||
.put(&format!("/actors/MyActor/{actor_id}/method/do_stuff"))
|
||||
.json(&json!({ "name": "foo" }))
|
||||
.await;
|
||||
invoke_resp.assert_status_ok();
|
||||
|
@ -117,7 +118,7 @@ async fn test_actor_invoke() {
|
|||
);
|
||||
|
||||
let invoke_resp2 = server
|
||||
.put(&format!("/actors/MyActor/{}/method/do_stuff", actor_id))
|
||||
.put(&format!("/actors/MyActor/{actor_id}/method/do_stuff"))
|
||||
.json(&json!({ "name": "foo" }))
|
||||
.await;
|
||||
invoke_resp2.assert_status_ok();
|
||||
|
@ -168,19 +169,15 @@ async fn test_actor_deactivate() {
|
|||
let actor_id = Uuid::new_v4().to_string();
|
||||
|
||||
let invoke_resp = server
|
||||
.put(&format!("/actors/MyActor/{}/method/do_stuff", actor_id))
|
||||
.put(&format!("/actors/MyActor/{actor_id}/method/do_stuff"))
|
||||
.json(&json!({ "name": "foo" }))
|
||||
.await;
|
||||
invoke_resp.assert_status_ok();
|
||||
|
||||
let deactivate_resp1 = server
|
||||
.delete(&format!("/actors/MyActor/{}", actor_id))
|
||||
.await;
|
||||
let deactivate_resp1 = server.delete(&format!("/actors/MyActor/{actor_id}")).await;
|
||||
deactivate_resp1.assert_status_ok();
|
||||
|
||||
let deactivate_resp2 = server
|
||||
.delete(&format!("/actors/MyActor/{}", actor_id))
|
||||
.await;
|
||||
let deactivate_resp2 = server.delete(&format!("/actors/MyActor/{actor_id}")).await;
|
||||
deactivate_resp2.assert_status_not_found();
|
||||
|
||||
assert_eq!(
|
|
@ -2,8 +2,8 @@ use std::collections::HashMap;
|
|||
|
||||
use tonic::{Code, Request, Response, Status};
|
||||
|
||||
use crate::dapr::dapr::proto::runtime;
|
||||
use crate::dapr::dapr::proto::runtime::v1::app_callback_alpha_server::AppCallbackAlpha;
|
||||
use crate::dapr::proto::runtime;
|
||||
use crate::dapr::proto::runtime::v1::app_callback_alpha_server::AppCallbackAlpha;
|
||||
|
||||
pub struct AppCallbackServiceAlpha {
|
||||
pub job_handlers: HashMap<String, Box<dyn JobHandlerMethod + Send + Sync + 'static>>,
|
|
@ -1,5 +1,5 @@
|
|||
use axum::{
|
||||
extract::{Path, State},
|
||||
extract::{OriginalUri, Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::{delete, get, put},
|
||||
|
@ -84,6 +84,12 @@ pub struct DaprHttpServer {
|
|||
|
||||
impl DaprHttpServer {
|
||||
/// Creates a new instance of the Dapr HTTP server with default options.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function panics if the Dapr Sidecar cannot be reached!
|
||||
/// For a non-panicking version that allows you to handle any errors yourself, see:
|
||||
/// [DaprHttpServer::try_new_with_dapr_port]
|
||||
pub async fn new() -> Self {
|
||||
let dapr_port: u16 = std::env::var("DAPR_GRPC_PORT")
|
||||
.unwrap_or("3501".into())
|
||||
|
@ -92,19 +98,38 @@ impl DaprHttpServer {
|
|||
Self::with_dapr_port(dapr_port).await
|
||||
}
|
||||
|
||||
/// Creates a new instance of the Dapr HTTP server that connects to the Dapr sidecar on the
|
||||
/// given dapr_port.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function panics if the Dapr Sidecar cannot be reached!
|
||||
/// For a non-panicking version that allows you to handle any errors yourself, see:
|
||||
/// [DaprHttpServer::try_new_with_dapr_port]
|
||||
pub async fn with_dapr_port(dapr_port: u16) -> Self {
|
||||
let dapr_addr = format!("https://127.0.0.1:{}", dapr_port);
|
||||
|
||||
let cc = match TonicClient::connect(dapr_addr).await {
|
||||
match Self::try_new_with_dapr_port(dapr_port).await {
|
||||
Ok(c) => c,
|
||||
Err(err) => panic!("failed to connect to dapr: {}", err),
|
||||
};
|
||||
Err(err) => panic!("failed to connect to dapr: {err}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance of the Dapr HTTP server that connects to the Dapr sidecar on the
|
||||
/// given dapr_port.
|
||||
///
|
||||
/// In contrast to the other functions that create a DaprHttpServer, this function does
|
||||
/// not panic, but instead returns a Result.
|
||||
pub async fn try_new_with_dapr_port(
|
||||
dapr_port: u16,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let dapr_addr = format!("https://127.0.0.1:{dapr_port}");
|
||||
|
||||
let cc = TonicClient::connect(dapr_addr).await?;
|
||||
let rt = ActorRuntime::new(cc);
|
||||
|
||||
DaprHttpServer {
|
||||
Ok(DaprHttpServer {
|
||||
actor_runtime: Arc::new(rt),
|
||||
shutdown_signal: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_graceful_shutdown<F>(self, signal: F) -> Self
|
||||
|
@ -138,7 +163,7 @@ impl DaprHttpServer {
|
|||
.unwrap_or(8080);
|
||||
|
||||
let address = format!("127.0.0.1:{}", port.unwrap_or(default_port));
|
||||
let listener = TcpListener::bind(address).await.unwrap();
|
||||
let listener = TcpListener::bind(address).await?;
|
||||
|
||||
let server = axum::serve(listener, app.into_make_service());
|
||||
|
||||
|
@ -182,7 +207,8 @@ impl DaprHttpServer {
|
|||
.route(
|
||||
"/actors/:actor_type/:actor_id/method/timer/:timer_name",
|
||||
put(invoke_timer).with_state(rt.clone()),
|
||||
);
|
||||
)
|
||||
.fallback(fallback_handler);
|
||||
|
||||
self.actor_runtime
|
||||
.configure_method_routes(app, rt.clone())
|
||||
|
@ -190,6 +216,14 @@ impl DaprHttpServer {
|
|||
}
|
||||
}
|
||||
|
||||
async fn fallback_handler(OriginalUri(uri): OriginalUri) -> impl IntoResponse {
|
||||
log::warn!("Returning 404 for request: {uri}");
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
format!("The URI '{uri}' could not be found!"),
|
||||
)
|
||||
}
|
||||
|
||||
async fn health_check() -> impl IntoResponse {
|
||||
log::debug!("recieved health check request");
|
||||
StatusCode::OK
|
||||
|
@ -210,11 +244,11 @@ async fn deactivate_actor(
|
|||
match runtime.deactivate_actor(&actor_type, &actor_id).await {
|
||||
Ok(_) => StatusCode::OK,
|
||||
Err(err) => {
|
||||
log::error!("invoke_actor: {:?}", err);
|
||||
log::error!("invoke_actor: {err:?}");
|
||||
match err {
|
||||
super::actor::ActorError::ActorNotFound => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
log::error!("deactivate_actor: {:?}", err);
|
||||
log::error!("deactivate_actor: {err:?}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
|
@ -227,13 +261,7 @@ async fn invoke_reminder(
|
|||
Path((actor_type, actor_id, reminder_name)): Path<(String, String, String)>,
|
||||
Json(payload): Json<ReminderPayload>,
|
||||
) -> impl IntoResponse {
|
||||
log::debug!(
|
||||
"invoke_reminder: {} {} {} {:?}",
|
||||
actor_type,
|
||||
actor_id,
|
||||
reminder_name,
|
||||
payload
|
||||
);
|
||||
log::debug!("invoke_reminder: {actor_type} {actor_id} {reminder_name} {payload:?}");
|
||||
|
||||
match runtime
|
||||
.invoke_reminder(
|
||||
|
@ -246,11 +274,11 @@ async fn invoke_reminder(
|
|||
{
|
||||
Ok(_output) => StatusCode::OK,
|
||||
Err(err) => {
|
||||
log::error!("invoke_actor: {:?}", err);
|
||||
log::error!("invoke_actor: {err:?}");
|
||||
match err {
|
||||
super::actor::ActorError::ActorNotFound => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
log::error!("invoke_reminder: {:?}", err);
|
||||
log::error!("invoke_reminder: {err:?}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
|
@ -263,13 +291,7 @@ async fn invoke_timer(
|
|||
Path((actor_type, actor_id, timer_name)): Path<(String, String, String)>,
|
||||
Json(payload): Json<TimerPayload>,
|
||||
) -> impl IntoResponse {
|
||||
log::debug!(
|
||||
"invoke_timer: {} {} {}, {:?}",
|
||||
actor_type,
|
||||
actor_id,
|
||||
timer_name,
|
||||
payload
|
||||
);
|
||||
log::debug!("invoke_timer: {actor_type} {actor_id} {timer_name}, {payload:?}");
|
||||
|
||||
match runtime
|
||||
.invoke_timer(
|
||||
|
@ -282,11 +304,11 @@ async fn invoke_timer(
|
|||
{
|
||||
Ok(_output) => StatusCode::OK,
|
||||
Err(err) => {
|
||||
log::error!("invoke_actor: {:?}", err);
|
||||
log::error!("invoke_actor: {err:?}");
|
||||
match err {
|
||||
super::actor::ActorError::ActorNotFound => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
log::error!("invoke_timer: {:?}", err);
|
||||
log::error!("invoke_timer: {err:?}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
|
@ -28,14 +28,14 @@ where
|
|||
let bytes = match axum::body::Bytes::from_request(req, state).await {
|
||||
Ok(bytes) => bytes,
|
||||
Err(e) => {
|
||||
log::error!("Error getting bytes: {}", e);
|
||||
log::error!("Error getting bytes: {e}");
|
||||
return Err(JsonRejection::JsonError(e.to_string()));
|
||||
}
|
||||
};
|
||||
let value = match serde_json::from_slice::<T>(&bytes) {
|
||||
Ok(value) => value,
|
||||
Err(e) => {
|
||||
log::error!("Error deserializing JSON: {}", e);
|
||||
log::error!("Error deserializing JSON: {e}");
|
||||
return Err(JsonRejection::JsonError(e.to_string()));
|
||||
}
|
||||
};
|
|
@ -8,13 +8,14 @@ All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), includ
|
|||
|
||||
### Rust SDK docs source
|
||||
|
||||
Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Go SDK content and images are within the `content` and `static` directories, respectively.
|
||||
Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Rust SDK content and images are within the `content` and `static` directories, respectively.
|
||||
|
||||
This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for.
|
||||
|
||||
## Writing Rust SDK docs
|
||||
|
||||
To get up and running to write Go SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo.
|
||||
To get up and running to write Rust SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your
|
||||
environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo.
|
||||
|
||||
Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc.
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ When contributing to the [Rust SDK](https://github.com/dapr/rust-sdk) the follow
|
|||
|
||||
The `examples` directory contains code samples for users to run to try out specific functionality of the various Rust SDK packages and extensions. It also hosts component examples used for validation. When writing new and updated samples keep in mind:
|
||||
|
||||
- All examples should be runnable on Windows, Linux, and MacOS. While Rust code is consistent among operating systems aside from minor OS-feature gating, any pre/post example commands should provide options through [codetabs]({{< ref "contributing-docs.md#tabbed-content" >}})
|
||||
- All examples should be runnable on Windows, Linux, and MacOS. While Rust code is consistent among operating systems aside from minor OS-feature gating, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}})
|
||||
- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine.
|
||||
- Examples should be pass validation and include mechanical markdown steps and be added to the validation workflow [TBA](#)
|
||||
|
||||
|
@ -20,7 +20,7 @@ The `examples` directory contains code samples for users to run to try out speci
|
|||
|
||||
The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind:
|
||||
|
||||
- All rules in the [docs guide]({{< ref contributing-docs.md >}}) should be followed in addition to these.
|
||||
- All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these.
|
||||
- All files and directories should be prefixed with `rust-` to ensure all file/directory names are globally unique across all Dapr documentation.
|
||||
|
||||
## Update Protobufs
|
||||
|
|
|
@ -22,6 +22,6 @@ A client library to help build Dapr applications using Rust. This client is targ
|
|||
{{< card title="**Client**">}}
|
||||
Use the Rust Client SDK for invoking public Dapr APIs
|
||||
|
||||
[**Learn more about the Rust Client SDK**]({{< ref rust-client >}})
|
||||
[**Learn more about the Rust Client SDK**]({{% ref rust-client %}})
|
||||
{{< /card >}}
|
||||
{{< /cardpane >}}
|
||||
|
|
|
@ -7,19 +7,20 @@ description: How to get up and running with the Dapr Rust SDK
|
|||
no_list: true
|
||||
---
|
||||
|
||||
The Dapr client package allows you to interact with other Dapr applications from a Rust application.
|
||||
The Dapr client package allows you to interact with other Dapr applications from
|
||||
a Rust application.
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
The Dapr Rust-SDK is currently in Alpha. Work is underway to bring it to a stable release and will likely involve breaking changes.
|
||||
The Dapr Rust-SDK is currently in Alpha. Work is underway to bring it to a
|
||||
stable release and will likely involve breaking changes.
|
||||
{{% /alert %}}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Dapr CLI]({{< ref install-dapr-cli.md >}}) installed
|
||||
- Initialized [Dapr environment]({{< ref install-dapr-selfhost.md >}})
|
||||
- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed
|
||||
- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}})
|
||||
- [Rust installed](https://www.rust-lang.org/tools/install)
|
||||
|
||||
|
||||
## Import the client package
|
||||
|
||||
Add Dapr to your `cargo.toml`
|
||||
|
@ -27,34 +28,42 @@ Add Dapr to your `cargo.toml`
|
|||
```toml
|
||||
[dependencies]
|
||||
# Other dependencies
|
||||
dapr = "0.13.0"
|
||||
dapr = "0.16.0"
|
||||
```
|
||||
|
||||
You can either reference `dapr::Client` or bind the full path to a new name as follows:
|
||||
|
||||
```rust
|
||||
use dapr::Client as DaprClient
|
||||
use dapr::Client as DaprClient;
|
||||
```
|
||||
|
||||
## Instantiating the Dapr client
|
||||
|
||||
```rust
|
||||
const addr: String = "https://127.0.0.1";
|
||||
const port: String = "50001";
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr,
|
||||
port).await?;
|
||||
port).await?;
|
||||
```
|
||||
|
||||
Alternatively if you would like to specify a custom port, this can be done by using this connect method:
|
||||
|
||||
```rust
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect_with_port(addr, "3500".to_string()).await?;
|
||||
```
|
||||
|
||||
## Building blocks
|
||||
|
||||
The Rust SDK allows you to interface with the [Dapr building blocks]({{< ref building-blocks >}}).
|
||||
The Rust SDK allows you to interface with the
|
||||
[Dapr building blocks]({{% ref building-blocks %}}).
|
||||
|
||||
### Service Invocation
|
||||
### Service Invocation (gRPC)
|
||||
|
||||
To invoke a specific method on another service running with Dapr sidecar, the Dapr client Go SDK provides two options:
|
||||
To invoke a specific method on another service running with Dapr sidecar, the
|
||||
Dapr client provides two options:
|
||||
|
||||
Invoke a (gRPC) service
|
||||
|
||||
Invoke a service
|
||||
```rust
|
||||
let response = client
|
||||
.invoke_service("service-to-invoke", "method-to-invoke", Some(data))
|
||||
|
@ -62,36 +71,43 @@ let response = client
|
|||
.unwrap();
|
||||
```
|
||||
|
||||
|
||||
For a full guide on service invocation, visit [How-To: Invoke a service]({{< ref howto-invoke-discover-services.md >}}).
|
||||
For a full guide on service invocation, visit
|
||||
[How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}).
|
||||
|
||||
### State Management
|
||||
|
||||
The Dapr Client provides access to these state management methods: `save_state`, `get_state`, `delete_state` that can be used like so:
|
||||
The Dapr Client provides access to these state management methods: `save_state`
|
||||
, `get_state`, `delete_state` that can be used like so:
|
||||
|
||||
```rust
|
||||
let store_name = "store-name";
|
||||
let state_key = "state-key";
|
||||
let store_name = String::from("statestore");
|
||||
|
||||
let states = vec![(state_key, ("state-value").as_bytes().to_vec())];
|
||||
let key = String::from("hello");
|
||||
let val = String::from("world").into_bytes();
|
||||
|
||||
// save state with the key "state-key" and value "state-value"
|
||||
client.save_state(store_name, states).await?;
|
||||
// save key-value pair in the state store
|
||||
client
|
||||
.save_state(store_name, key, val, None, None, None)
|
||||
.await?;
|
||||
|
||||
// get state for key "state-key"
|
||||
let response = client.get_state(store_name, state_key, None).await.unwrap();
|
||||
let get_response = client
|
||||
.get_state("statestore", "hello", None)
|
||||
.await?;
|
||||
|
||||
// delete state for key "state-key"
|
||||
client.delete_state(store_name, state_key, None).await?;
|
||||
// delete a value from the state store
|
||||
client
|
||||
.delete_state("statestore", "hello", None)
|
||||
.await?;
|
||||
```
|
||||
|
||||
> **Note:** The `save_state` method currently performs a 'bulk' save but this will be refactored
|
||||
Multiple states can be sent with the `save_bulk_states` method.
|
||||
|
||||
|
||||
For a full guide on state management, visit [How-To: Save & get state]({{< ref howto-get-save-state.md >}}).
|
||||
For a full guide on state management, visit
|
||||
[How-To: Save & get state]({{% ref howto-get-save-state.md %}}).
|
||||
|
||||
### Publish Messages
|
||||
To publish data onto a topic, the Dapr Go client provides a simple method:
|
||||
|
||||
To publish data onto a topic, the Dapr client provides a simple method:
|
||||
|
||||
```rust
|
||||
let pubsub_name = "pubsub-name".to_string();
|
||||
|
@ -104,7 +120,9 @@ client
|
|||
.await?;
|
||||
```
|
||||
|
||||
For a full guide on pub/sub, visit [How-To: Publish & subscribe]({{< ref howto-publish-subscribe.md >}}).
|
||||
For a full guide on pub/sub, visit
|
||||
[How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}).
|
||||
|
||||
## Related links
|
||||
|
||||
[Rust SDK Examples](https://github.com/dapr/rust-sdk/tree/master/examples)
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
[package]
|
||||
name = "examples"
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish = false
|
||||
version = "0.0.1"
|
||||
repository.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
dapr = { path = "../dapr" }
|
||||
dapr-macros = { path = "../dapr-macros" }
|
||||
env_logger = "0.11"
|
||||
log = "0.4"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
tokio-stream = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
prost = { workspace = true }
|
||||
prost-types = { workspace = true }
|
||||
|
||||
[[example]]
|
||||
name = "actors-client"
|
||||
path = "src/actors/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "actors-server"
|
||||
path = "src/actors/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bindings-input"
|
||||
path = "src/bindings/input.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bindings-output"
|
||||
path = "src/bindings/output.rs"
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
path = "src/client/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "configuration"
|
||||
path = "src/configuration/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "conversation"
|
||||
path = "src/conversation/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "crypto"
|
||||
path = "src/crypto/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-client"
|
||||
path = "src/invoke/grpc/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-server"
|
||||
path = "src/invoke/grpc/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-proxying-client"
|
||||
path = "src/invoke/grpc-proxying/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-proxying-server"
|
||||
path = "src/invoke/grpc-proxying/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "jobs"
|
||||
path = "src/jobs/jobs.rs"
|
||||
|
||||
[[example]]
|
||||
name = "pubsub-publisher"
|
||||
path = "src/pubsub/publisher.rs"
|
||||
|
||||
[[example]]
|
||||
name = "pubsub-subscriber"
|
||||
path = "src/pubsub/subscriber.rs"
|
||||
|
||||
[[example]]
|
||||
name = "query-state-1"
|
||||
path = "src/query_state/query1.rs"
|
||||
|
||||
[[example]]
|
||||
name = "query-state-2"
|
||||
path = "src/query_state/query2.rs"
|
||||
|
||||
[[example]]
|
||||
name = "secrets-bulk"
|
||||
path = "src/secrets-bulk/app.rs"
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
These examples demonstrates how to use Dapr rust sdk.
|
||||
|
||||
* [client](./client)
|
||||
* [client](src/client)
|
||||
* Simple dapr client example that saves, gets, and deletes state from the state stores
|
||||
* [pubsub](./pubsub)
|
||||
* [pubsub](src/pubsub)
|
||||
* Publishes and subscribes to events
|
||||
|
||||
## Adding new examples
|
||||
|
|
|
@ -88,7 +88,22 @@ Use the `DaprJson` extractor to deserialize the request from Json coming from a
|
|||
> docker ps
|
||||
> ```
|
||||
|
||||
To run this example (using the multi-app run):
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run this example (using the multi-app run):
|
||||
|
||||
|
||||
<!-- STEP
|
||||
|
@ -120,11 +135,11 @@ dapr run -f .
|
|||
|
||||
1. Start actor host (expose Http server receiver on port 50051):
|
||||
```bash
|
||||
dapr run --app-id actor-host --app-protocol http --app-port 50051 cargo run -- --example actor-server
|
||||
dapr run --app-id actor-host --app-protocol http --app-port 50051 cargo run -- --example actors-server
|
||||
```
|
||||
|
||||
2. Start actor client:
|
||||
```bash
|
||||
dapr run --app-id actor-client --dapr-grpc-port 3502 cargo run -- --example actor-client
|
||||
dapr run --app-id actor-client --dapr-grpc-port 3502 cargo run -- --example actors-client
|
||||
|
||||
```
|
|
@ -14,7 +14,7 @@ pub struct MyRequest {
|
|||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
std::thread::sleep(std::time::Duration::new(2, 0));
|
||||
tokio::time::sleep(std::time::Duration::new(2, 0)).await;
|
||||
|
||||
// Define the Dapr address
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
@ -30,7 +30,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
.invoke_actor("MyActor", "a1", "do_stuff", data, None)
|
||||
.await;
|
||||
|
||||
println!("Response: {:#?}", resp);
|
||||
println!("Response: {resp:#?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -8,9 +8,9 @@ apps:
|
|||
appProtocol: http
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "actor-server"]
|
||||
command: ["cargo", "run", "--example", "actors-server"]
|
||||
- appID: actor-client
|
||||
appDirPath: ./
|
||||
daprGRPCPort: 3502
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "actor-client"]
|
||||
command: ["cargo", "run", "--example", "actors-client"]
|
|
@ -31,7 +31,7 @@ impl MyActor {
|
|||
println!("doing stuff with {}", req.name);
|
||||
let mut dapr = self.client.clone();
|
||||
let r = dapr.get_actor_state("key1").await.unwrap();
|
||||
println!("get_actor_state {:?}", r);
|
||||
println!("get_actor_state {r:?}");
|
||||
Json(MyResponse { available: true })
|
||||
}
|
||||
}
|
|
@ -12,9 +12,22 @@ In order to have both examples working with the same binding configuration Servi
|
|||
|
||||
## Running
|
||||
|
||||
To run this example:
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
1. Run a kafka container
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2Run a kafka container
|
||||
|
||||
<!-- STEP
|
||||
name: Run kafka instance
|
||||
|
@ -31,7 +44,7 @@ docker run -p 9092:9092 apache/kafka:3.7.1
|
|||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the multi-app run template (`dapr.yaml`)
|
||||
3. Run the multi-app run template (`dapr.yaml`)
|
||||
|
||||
<!-- STEP
|
||||
name: Run Multi-app Run
|
||||
|
@ -60,7 +73,7 @@ expected_stdout_lines:
|
|||
- '== APP - rust-input-b == Message: 9 => hello from rust!'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 90
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
|
@ -8,9 +8,9 @@ apps:
|
|||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "input-bindings"]
|
||||
command: ["cargo", "run", "--example", "bindings-input"]
|
||||
- appID: rust-output-b
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "output-bindings"]
|
||||
command: ["cargo", "run", "--example", "bindings-output"]
|
|
@ -1,8 +1,8 @@
|
|||
use tonic::{transport::Server, Request, Response, Status};
|
||||
|
||||
use dapr::dapr::dapr::proto::common::v1::{InvokeRequest, InvokeResponse};
|
||||
use dapr::dapr::dapr::proto::runtime::v1::app_callback_server::{AppCallback, AppCallbackServer};
|
||||
use dapr::dapr::dapr::proto::runtime::v1::{
|
||||
use dapr::dapr::proto::common::v1::{InvokeRequest, InvokeResponse};
|
||||
use dapr::dapr::proto::runtime::v1::{
|
||||
app_callback_server::{AppCallback, AppCallbackServer},
|
||||
BindingEventRequest, BindingEventResponse, ListInputBindingsResponse,
|
||||
ListTopicSubscriptionsResponse, TopicEventRequest, TopicEventResponse,
|
||||
};
|
||||
|
@ -61,7 +61,7 @@ impl AppCallback for AppCallbackService {
|
|||
let name = &r.name;
|
||||
let data = &r.data;
|
||||
|
||||
let message = String::from_utf8_lossy(&data);
|
||||
let message = String::from_utf8_lossy(data);
|
||||
println!("Binding Name: {}", &name);
|
||||
println!("Message: {}", &message);
|
||||
|
||||
|
@ -75,7 +75,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
let callback_service = AppCallbackService::default();
|
||||
|
||||
println!("AppCallback server listening on: {}", addr);
|
||||
println!("AppCallback server listening on: {addr}");
|
||||
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
|
@ -1,10 +1,10 @@
|
|||
use std::{collections::HashMap, thread, time::Duration};
|
||||
use std::{collections::HashMap, time::Duration};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
thread::sleep(Duration::from_secs(2));
|
||||
tokio::time::sleep(Duration::from_secs(2)).await;
|
||||
|
||||
// Get the Dapr port and create a connection
|
||||
let addr = "https://127.0.0.1".to_string();
|
|
@ -5,10 +5,19 @@ docker ps
|
|||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
```
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command:
|
||||
|
||||
<!-- STEP
|
|
@ -2,7 +2,7 @@
|
|||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
std::thread::sleep(std::time::Duration::new(2, 0));
|
||||
tokio::time::sleep(std::time::Duration::new(2, 0)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
@ -17,7 +17,9 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
let store_name = String::from("statestore");
|
||||
|
||||
// save key-value pair in the state store
|
||||
client.save_state(store_name, vec![(key, val)]).await?;
|
||||
client
|
||||
.save_state(store_name, key, val, None, None, None)
|
||||
.await?;
|
||||
|
||||
println!("Successfully saved!");
|
||||
|
|
@ -5,10 +5,19 @@ docker ps
|
|||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Insert a key with the value `hello` to redis using the following command:
|
||||
|
||||
|
|
@ -7,7 +7,7 @@ type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
|||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
std::thread::sleep(std::time::Duration::new(2, 0));
|
||||
tokio::time::sleep(std::time::Duration::new(2, 0)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
@ -19,14 +19,14 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
// get key-value pair in the state store
|
||||
let response = client
|
||||
.get_configuration(CONFIGSTORE_NAME, vec![(&key)], None)
|
||||
.get_configuration(CONFIGSTORE_NAME, vec![&key], None)
|
||||
.await?;
|
||||
let val = response.items.get("hello").unwrap();
|
||||
println!("Configuration value: {val:?}");
|
||||
|
||||
// Subscribe for configuration changes
|
||||
let mut stream = client
|
||||
.subscribe_configuration(CONFIGSTORE_NAME, vec![(&key)], None)
|
||||
.subscribe_configuration(CONFIGSTORE_NAME, vec![&key], None)
|
||||
.await?;
|
||||
|
||||
let mut subscription_id = String::new();
|
||||
|
@ -52,7 +52,7 @@ async fn unsubscribe(client: &mut DaprClient, subscription_id: &str) {
|
|||
.await
|
||||
{
|
||||
Ok(_) => println!("App unsubscribed from config changes"),
|
||||
Err(e) => println!("Error unsubscribing from config updates: {}", e),
|
||||
Err(e) => println!("Error unsubscribing from config updates: {e}"),
|
||||
}
|
||||
std::process::exit(0);
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
# Dapr Conversation Example with the Rust-SDK
|
||||
|
||||
This example uses the echo component to send a request and the component response will be the exact message received.
|
||||
|
||||
## Step
|
||||
|
||||
### Prepare
|
||||
|
||||
- Dapr installed
|
||||
|
||||
### Run Conversation Example
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example using the Dapr CLI
|
||||
|
||||
<!-- STEP
|
||||
name: Run Conversation
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- 'conversation input: "hello world"'
|
||||
- 'conversation output: "hello world"'
|
||||
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run --app-id=conversation --resources-path ./config --dapr-grpc-port 3500 -- cargo run --example conversation
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
## Result
|
||||
|
||||
```
|
||||
- 'conversation input: hello world'
|
||||
- 'conversation output: hello world'
|
||||
```
|
|
@ -0,0 +1,7 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: echo
|
||||
spec:
|
||||
type: conversation.echo
|
||||
version: v1
|
|
@ -0,0 +1,30 @@
|
|||
use dapr::client::{ConversationInputBuilder, ConversationRequestBuilder};
|
||||
use std::time::Duration;
|
||||
|
||||
type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Sleep to allow for the server to become available
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let address = "https://127.0.0.1".to_string();
|
||||
let port = "3500".to_string();
|
||||
|
||||
let mut client = DaprClient::connect_with_port(address, port).await?;
|
||||
|
||||
let input = ConversationInputBuilder::new("hello world").build();
|
||||
|
||||
let conversation_component = "echo";
|
||||
|
||||
let request =
|
||||
ConversationRequestBuilder::new(conversation_component, vec![input.clone()]).build();
|
||||
|
||||
println!("conversation input: {:?}", input.content);
|
||||
|
||||
let response = client.converse_alpha1(request).await?;
|
||||
|
||||
println!("conversation output: {:?}", response.outputs[0].result);
|
||||
Ok(())
|
||||
}
|
|
@ -6,15 +6,30 @@ This is a simple example that demonstrates Dapr's Cryptography capabilities.
|
|||
|
||||
## Running
|
||||
|
||||
To run this example:
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Generate keys in examples/crypto/keys directory:
|
||||
|
||||
1. Generate keys in examples/crypto/keys directory:
|
||||
<!-- STEP
|
||||
name: Generate keys
|
||||
background: false
|
||||
sleep: 5
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
mkdir -p keys
|
||||
# Generate a private RSA key, 4096-bit keys
|
||||
|
@ -25,7 +40,7 @@ openssl rand -out keys/symmetric-key-256 32
|
|||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the multi-app run template:
|
||||
3. Run the multi-app run template:
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi-app
|
||||
|
@ -36,7 +51,7 @@ expected_stdout_lines:
|
|||
- '== APP - crypto-example == Successfully Decrypted Image'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 90
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
|
@ -45,4 +60,4 @@ dapr run -f .
|
|||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Stop with `ctrl + c`
|
||||
4. Stop with `ctrl + c`
|
Before Width: | Height: | Size: 3.9 KiB After Width: | Height: | Size: 3.9 KiB |
|
@ -5,10 +5,19 @@ docker ps
|
|||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
```
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command:
|
||||
|
||||
<!-- STEP
|
||||
|
@ -22,7 +31,7 @@ expected_stdout_lines:
|
|||
- '== APP - invoke-grpc-client == }'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 90
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
|
@ -1,21 +1,21 @@
|
|||
use std::{thread, time::Duration};
|
||||
use std::time::Duration;
|
||||
|
||||
use hello_world::{greeter_client::GreeterClient, HelloRequest};
|
||||
|
||||
use tonic::metadata::MetadataValue;
|
||||
|
||||
pub mod hello_world {
|
||||
tonic::include_proto!("helloworld"); // The string specified here must match the proto package name
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Sleep to allow for the server to become available
|
||||
thread::sleep(Duration::from_secs(5));
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Get the Dapr port and create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT").unwrap().parse().unwrap();
|
||||
let address = format!("https://127.0.0.1:{}", port);
|
||||
let address = format!("https://127.0.0.1:{port}");
|
||||
|
||||
let mut client = GreeterClient::connect(address).await?;
|
||||
|
||||
|
@ -31,7 +31,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
let response = client.say_hello(request).await.unwrap();
|
||||
let hello_reply = response.into_inner();
|
||||
|
||||
println!("Response: {:#?}", hello_reply);
|
||||
println!("Response: {hello_reply:#?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -3,7 +3,7 @@ use crate::hello_world::{HelloReply, HelloRequest};
|
|||
use tonic::{transport::Server, Request, Response, Status};
|
||||
|
||||
pub mod hello_world {
|
||||
tonic::include_proto!("helloworld"); // The string specified here must match the proto package name
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
@ -33,7 +33,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
let greeter_service = GreeterService::default();
|
||||
|
||||
println!("AppCallback server listening on: {}", server_address);
|
||||
println!("AppCallback server listening on: {server_address}");
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
||||
.add_service(GreeterServer::new(greeter_service))
|
|
@ -5,10 +5,19 @@ docker ps
|
|||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
```
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command to start the multi-app run:
|
||||
|
||||
<!-- STEP
|
||||
|
@ -45,7 +54,7 @@ expected_stdout_lines:
|
|||
- '== APP - invoke-grpc-client == }'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 90
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
== APP - invoke-grpc-server == Method: say_hello
|
||||
== APP - invoke-grpc-server == Name: "Test"
|
|
@ -1,10 +1,10 @@
|
|||
use std::{thread, time::Duration};
|
||||
use crate::hello_world::HelloReply;
|
||||
use std::time::Duration;
|
||||
|
||||
use hello_world::{HelloReply, HelloRequest};
|
||||
use prost::Message;
|
||||
|
||||
pub mod hello_world {
|
||||
tonic::include_proto!("helloworld"); // The string specified here must match the proto package name
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
||||
|
@ -12,14 +12,14 @@ type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
|||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Sleep to allow for the server to become available
|
||||
thread::sleep(Duration::from_secs(5));
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let address = "https://127.0.0.1".to_string();
|
||||
|
||||
let mut client = DaprClient::connect(address).await?;
|
||||
|
||||
let request = HelloRequest {
|
||||
let request = hello_world::HelloRequest {
|
||||
name: "Test".to_string(),
|
||||
};
|
||||
let data = request.encode_to_vec();
|
||||
|
@ -39,7 +39,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
println!("Message: {:#?}", &resp.message);
|
||||
};
|
||||
|
||||
println!("Response: {:#?}", response);
|
||||
println!("Response: {response:#?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,14 +1,15 @@
|
|||
use dapr::{
|
||||
appcallback::*,
|
||||
dapr::dapr::proto::runtime::v1::app_callback_server::{AppCallback, AppCallbackServer},
|
||||
dapr::proto::runtime::v1::app_callback_server::{AppCallback, AppCallbackServer},
|
||||
};
|
||||
use prost::Message;
|
||||
use tonic::{transport::Server, Request, Response, Status};
|
||||
|
||||
use prost::Message;
|
||||
|
||||
use hello_world::{HelloReply, HelloRequest};
|
||||
|
||||
pub mod hello_world {
|
||||
tonic::include_proto!("helloworld"); // The string specified here must match the proto package name
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
pub struct AppCallbackService {}
|
||||
|
@ -96,7 +97,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
let callback_service = AppCallbackService {};
|
||||
|
||||
println!("AppCallback server listening on: {}", server_address);
|
||||
println!("AppCallback server listening on: {server_address}");
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
||||
.add_service(AppCallbackServer::new(callback_service))
|
|
@ -0,0 +1,300 @@
|
|||
// This file is @generated by prost-build.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HelloRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub name: ::prost::alloc::string::String,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HelloReply {
|
||||
#[prost(string, tag = "1")]
|
||||
pub message: ::prost::alloc::string::String,
|
||||
}
|
||||
/// Generated client implementations.
|
||||
pub mod greeter_client {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value,
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
use tonic::codegen::http::Uri;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GreeterClient<T> {
|
||||
inner: tonic::client::Grpc<T>,
|
||||
}
|
||||
impl GreeterClient<tonic::transport::Channel> {
|
||||
/// Attempt to create a new client by connecting to a given endpoint.
|
||||
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
|
||||
where
|
||||
D: TryInto<tonic::transport::Endpoint>,
|
||||
D::Error: Into<StdError>,
|
||||
{
|
||||
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
|
||||
Ok(Self::new(conn))
|
||||
}
|
||||
}
|
||||
impl<T> GreeterClient<T>
|
||||
where
|
||||
T: tonic::client::GrpcService<tonic::body::BoxBody>,
|
||||
T::Error: Into<StdError>,
|
||||
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
|
||||
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
|
||||
{
|
||||
pub fn new(inner: T) -> Self {
|
||||
let inner = tonic::client::Grpc::new(inner);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_origin(inner: T, origin: Uri) -> Self {
|
||||
let inner = tonic::client::Grpc::with_origin(inner, origin);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> GreeterClient<InterceptedService<T, F>>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
T::ResponseBody: Default,
|
||||
T: tonic::codegen::Service<
|
||||
http::Request<tonic::body::BoxBody>,
|
||||
Response = http::Response<
|
||||
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
|
||||
>,
|
||||
>,
|
||||
<T as tonic::codegen::Service<
|
||||
http::Request<tonic::body::BoxBody>,
|
||||
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
|
||||
{
|
||||
GreeterClient::new(InterceptedService::new(inner, interceptor))
|
||||
}
|
||||
/// Compress requests with the given encoding.
|
||||
///
|
||||
/// This requires the server to support it otherwise it might respond with an
|
||||
/// error.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.send_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Enable decompressing responses.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.accept_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_decoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_encoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
pub async fn say_hello(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::HelloRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::HelloReply>, tonic::Status> {
|
||||
self.inner
|
||||
.ready()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tonic::Status::unknown(
|
||||
format!("Service was not ready: {}", e.into()),
|
||||
)
|
||||
})?;
|
||||
let codec = tonic::codec::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/helloworld.Greeter/SayHello",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut()
|
||||
.insert(GrpcMethod::new("helloworld.Greeter", "SayHello"));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated server implementations.
|
||||
pub mod greeter_server {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value,
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
/// Generated trait containing gRPC methods that should be implemented for use with GreeterServer.
|
||||
#[async_trait]
|
||||
pub trait Greeter: std::marker::Send + std::marker::Sync + 'static {
|
||||
async fn say_hello(
|
||||
&self,
|
||||
request: tonic::Request<super::HelloRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::HelloReply>, tonic::Status>;
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct GreeterServer<T> {
|
||||
inner: Arc<T>,
|
||||
accept_compression_encodings: EnabledCompressionEncodings,
|
||||
send_compression_encodings: EnabledCompressionEncodings,
|
||||
max_decoding_message_size: Option<usize>,
|
||||
max_encoding_message_size: Option<usize>,
|
||||
}
|
||||
impl<T> GreeterServer<T> {
|
||||
pub fn new(inner: T) -> Self {
|
||||
Self::from_arc(Arc::new(inner))
|
||||
}
|
||||
pub fn from_arc(inner: Arc<T>) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: Default::default(),
|
||||
send_compression_encodings: Default::default(),
|
||||
max_decoding_message_size: None,
|
||||
max_encoding_message_size: None,
|
||||
}
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> InterceptedService<Self, F>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
{
|
||||
InterceptedService::new(Self::new(inner), interceptor)
|
||||
}
|
||||
/// Enable decompressing requests with the given encoding.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.accept_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Compress responses with the given encoding, if the client supports it.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.send_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_decoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_encoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
}
|
||||
impl<T, B> tonic::codegen::Service<http::Request<B>> for GreeterServer<T>
|
||||
where
|
||||
T: Greeter,
|
||||
B: Body + std::marker::Send + 'static,
|
||||
B::Error: Into<StdError> + std::marker::Send + 'static,
|
||||
{
|
||||
type Response = http::Response<tonic::body::BoxBody>;
|
||||
type Error = std::convert::Infallible;
|
||||
type Future = BoxFuture<Self::Response, Self::Error>;
|
||||
fn poll_ready(
|
||||
&mut self,
|
||||
_cx: &mut Context<'_>,
|
||||
) -> Poll<std::result::Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
fn call(&mut self, req: http::Request<B>) -> Self::Future {
|
||||
match req.uri().path() {
|
||||
"/helloworld.Greeter/SayHello" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct SayHelloSvc<T: Greeter>(pub Arc<T>);
|
||||
impl<T: Greeter> tonic::server::UnaryService<super::HelloRequest>
|
||||
for SayHelloSvc<T> {
|
||||
type Response = super::HelloReply;
|
||||
type Future = BoxFuture<
|
||||
tonic::Response<Self::Response>,
|
||||
tonic::Status,
|
||||
>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::HelloRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as Greeter>::say_hello(&inner, request).await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = SayHelloSvc(inner);
|
||||
let codec = tonic::codec::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
_ => {
|
||||
Box::pin(async move {
|
||||
let mut response = http::Response::new(empty_body());
|
||||
let headers = response.headers_mut();
|
||||
headers
|
||||
.insert(
|
||||
tonic::Status::GRPC_STATUS,
|
||||
(tonic::Code::Unimplemented as i32).into(),
|
||||
);
|
||||
headers
|
||||
.insert(
|
||||
http::header::CONTENT_TYPE,
|
||||
tonic::metadata::GRPC_CONTENT_TYPE,
|
||||
);
|
||||
Ok(response)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> Clone for GreeterServer<T> {
|
||||
fn clone(&self) -> Self {
|
||||
let inner = self.inner.clone();
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: self.accept_compression_encodings,
|
||||
send_compression_encodings: self.send_compression_encodings,
|
||||
max_decoding_message_size: self.max_decoding_message_size,
|
||||
max_encoding_message_size: self.max_encoding_message_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated gRPC service name
|
||||
pub const SERVICE_NAME: &str = "helloworld.Greeter";
|
||||
impl<T> tonic::server::NamedService for GreeterServer<T> {
|
||||
const NAME: &'static str = SERVICE_NAME;
|
||||
}
|
||||
}
|
Binary file not shown.
|
@ -6,7 +6,22 @@ This is a simple example that demonstrates Dapr's job scheduling capabilities.
|
|||
|
||||
To run this example:
|
||||
|
||||
1. Run the multi-app run template:
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the multi-app run template:
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi-app
|
||||
|
@ -33,4 +48,4 @@ dapr run -f .
|
|||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Stop with `ctrl + c`
|
||||
3. Stop with `ctrl + c`
|
|
@ -7,4 +7,5 @@ apps:
|
|||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: [ "cargo", "run", "--example", "jobs" ]
|
||||
schedulerHostAddress: localhost
|
||||
command: [ "cargo", "run", "--example", "jobs" ]
|
|
@ -1,11 +1,11 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use base64::prelude::*;
|
||||
use dapr::add_job_handler_alpha;
|
||||
use dapr::client::JobBuilder;
|
||||
use dapr::dapr::dapr::proto::runtime::v1::app_callback_alpha_server::AppCallbackAlphaServer;
|
||||
use dapr::dapr::dapr::proto::runtime::v1::{JobEventRequest, JobEventResponse};
|
||||
use dapr::dapr::proto::runtime::v1::{
|
||||
app_callback_alpha_server::AppCallbackAlphaServer, JobEventRequest, JobEventResponse,
|
||||
};
|
||||
use dapr::server::appcallbackalpha::{AppCallbackServiceAlpha, JobHandlerMethod};
|
||||
use dapr::{add_job_handler_alpha, serde_json};
|
||||
use prost_types::Any;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::time::sleep;
|
||||
|
@ -41,19 +41,10 @@ async fn backup_job_handler(request: JobEventRequest) -> Result<JobEventResponse
|
|||
// The logic for handling the backup job request
|
||||
|
||||
if request.data.is_some() {
|
||||
// weird value - any type is actually put into the value
|
||||
let any = request.data.unwrap().value;
|
||||
|
||||
// parse any value
|
||||
let any_parsed: JsonAny = serde_json::from_slice(&any).unwrap();
|
||||
|
||||
// Decode the base64-encoded value field
|
||||
let decoded_value = BASE64_STANDARD.decode(any_parsed.value).unwrap();
|
||||
|
||||
// Deserialize the decoded value into a Backup struct
|
||||
let backup_val: Backup = serde_json::from_slice(&decoded_value).unwrap();
|
||||
let backup_val: Backup = serde_json::from_slice(&request.data.unwrap().value).unwrap();
|
||||
|
||||
println!("job received: {:?}", backup_val);
|
||||
println!("job received: {backup_val:?}");
|
||||
}
|
||||
|
||||
Ok(JobEventResponse::default())
|
||||
|
@ -97,9 +88,9 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
let client_addr = "https://127.0.0.1".to_string();
|
||||
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
let address = format!("{}:{}", client_addr, port);
|
||||
let address = format!("{client_addr}:{port}");
|
||||
|
||||
println!("attempting to create a dapr client: {}", address);
|
||||
println!("attempting to create a dapr client: {address}");
|
||||
|
||||
// Create the client
|
||||
let mut client = DaprClient::connect(client_addr).await?;
|
||||
|
@ -136,7 +127,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
let get_resp_backup: Backup =
|
||||
serde_json::from_slice(&get_resp.clone().job.unwrap().data.unwrap().value).unwrap();
|
||||
|
||||
println!("job retrieved: {:?}", get_resp_backup);
|
||||
println!("job retrieved: {get_resp_backup:?}");
|
||||
|
||||
let _delete_resp = client.delete_job_alpha1("prod-db-backup").await?;
|
||||
|
|
@ -14,12 +14,25 @@ This is a simple example that demonstrates Dapr's pub/sub capabilities. To imple
|
|||
> docker ps
|
||||
> ```
|
||||
|
||||
To run this example:
|
||||
|
||||
1. Run the multi-app run template:
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Run Subscriber
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the multi-app run template:
|
||||
|
||||
<!-- STEP
|
||||
name: Run PubSub example
|
||||
output_match_mode: substring
|
||||
match_order: sequential
|
||||
expected_stdout_lines:
|
||||
|
@ -104,9 +117,9 @@ expected_stdout_lines:
|
|||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-publisher == messages published'
|
||||
background: true
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout_seconds: 90
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
|
||||
|
@ -116,16 +129,16 @@ dapr run -f .
|
|||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Stop with `ctrl + c`
|
||||
3. Stop with `ctrl + c`
|
||||
|
||||
### Running without multi-app
|
||||
|
||||
1. Run the subscriber with dapr
|
||||
```bash
|
||||
dapr run --app-id rust-subscriber --app-protocol grpc --app-port 50051 cargo run -- --example subscriber
|
||||
dapr run --app-id rust-subscriber --app-protocol grpc --app-port 50051 cargo run -- --example pubsub-subscriber
|
||||
```
|
||||
|
||||
2. Run the publisher with dapr
|
||||
```bash
|
||||
dapr run --app-id rust-publisher --app-protocol grpc cargo run -- --example publisher
|
||||
dapr run --app-id rust-publisher --app-protocol grpc cargo run -- --example pubsub-publisher
|
||||
```
|
|
@ -8,9 +8,9 @@ apps:
|
|||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "subscriber"]
|
||||
command: ["cargo", "run", "--example", "pubsub-subscriber"]
|
||||
- appID: rust-publisher
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "publisher"]
|
||||
command: ["cargo", "run", "--example", "pubsub-publisher"]
|
|
@ -1,4 +1,6 @@
|
|||
use std::{collections::HashMap, thread, time::Duration};
|
||||
use std::{collections::HashMap, time::Duration};
|
||||
|
||||
use tokio::time;
|
||||
|
||||
use dapr::serde::{Deserialize, Serialize};
|
||||
use dapr::serde_json;
|
||||
|
@ -19,7 +21,7 @@ struct Refund {
|
|||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
thread::sleep(Duration::from_secs(2));
|
||||
tokio::time::sleep(Duration::from_secs(2)).await;
|
||||
|
||||
// Set address for Dapr connection
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
@ -37,10 +39,13 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
let topic = "A".to_string();
|
||||
let topic_b = "B".to_string();
|
||||
|
||||
// Delay to wait for the subscriber to fully start
|
||||
time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
for count in 0..10 {
|
||||
let order = Order {
|
||||
order_number: count,
|
||||
order_details: format!("Count is {}", count),
|
||||
order_details: format!("Count is {count}"),
|
||||
};
|
||||
// message metadata
|
||||
let mut metadata = HashMap::<String, String>::new();
|
|
@ -9,4 +9,4 @@ spec:
|
|||
- name: redisHost
|
||||
value: localhost:6379
|
||||
- name: redisPassword
|
||||
value: ""
|
||||
value: ""
|
|
@ -1,11 +1,8 @@
|
|||
use dapr_macros::topic;
|
||||
use tonic::transport::Server;
|
||||
|
||||
use dapr::appcallback::AppCallbackService;
|
||||
use dapr::serde::{Deserialize, Serialize};
|
||||
use dapr::{
|
||||
appcallback::*, dapr::dapr::proto::runtime::v1::app_callback_server::AppCallbackServer,
|
||||
};
|
||||
use dapr::{appcallback::*, dapr::proto::runtime::v1::app_callback_server::AppCallbackServer};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct Order {
|
||||
|
@ -21,12 +18,12 @@ struct Refund {
|
|||
|
||||
#[topic(pub_sub_name = "pubsub", topic = "A")]
|
||||
async fn handle_a_event(order: Order) {
|
||||
println!("Topic A - {:#?}", order)
|
||||
println!("Topic A - {order:#?}")
|
||||
}
|
||||
|
||||
#[topic(pub_sub_name = "pubsub", topic = "B")]
|
||||
async fn handle_b_event(refund: Refund) {
|
||||
println!("Topic B - {:#?}", refund)
|
||||
println!("Topic B - {refund:#?}")
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
|
@ -39,7 +36,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
callback_service.add_handler(HandleBEvent.get_handler());
|
||||
|
||||
println!("AppCallback server listening on: {}", addr);
|
||||
println!("AppCallback server listening on: {addr}");
|
||||
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
|
@ -50,10 +50,19 @@ curl -X POST -H "Content-Type: application/json" http://localhost:3500/v1.0/stat
|
|||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Executing the first query
|
||||
Query:
|
||||
```json
|
||||
|
@ -83,7 +92,7 @@ sleep: 15
|
|||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run --app-id=rustapp --dapr-grpc-port 3501 --resources-path statestore/ cargo run -- --example query_state_q1
|
||||
dapr run --app-id=rustapp --dapr-grpc-port 3501 --resources-path statestore/ cargo run -- --example query-state-1
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
||||
|
@ -117,7 +126,7 @@ sleep: 15
|
|||
timeout_seconds: 30
|
||||
-->
|
||||
```bash
|
||||
dapr run --app-id=rustapp --dapr-grpc-port 3501 --resources-path statestore/ cargo run -- --example query_state_q2
|
||||
dapr run --app-id=rustapp --dapr-grpc-port 3501 --resources-path statestore/ cargo run -- --example query-state-2
|
||||
```
|
||||
<!-- END_STEP -->
|
||||
|
|
@ -3,7 +3,7 @@ use serde_json::json;
|
|||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
std::thread::sleep(std::time::Duration::new(5, 0));
|
||||
tokio::time::sleep(std::time::Duration::new(5, 0)).await;
|
||||
|
||||
// Set the Dapr address and create a connection
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
@ -29,7 +29,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
{
|
||||
Ok(response) => response.results,
|
||||
Err(e) => {
|
||||
println!("Error: {:?}", e);
|
||||
println!("Error: {e:?}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
@ -43,7 +43,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
"value": value
|
||||
}));
|
||||
}
|
||||
println!("Query results: {:?}", results);
|
||||
println!("Query results: {results:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -3,7 +3,7 @@ use serde_json::json;
|
|||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
std::thread::sleep(std::time::Duration::new(5, 0));
|
||||
tokio::time::sleep(std::time::Duration::new(5, 0)).await;
|
||||
|
||||
// Set the Dapr address and create a connection
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
@ -23,7 +23,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
{
|
||||
Ok(response) => response.results,
|
||||
Err(e) => {
|
||||
println!("Error: {:?}", e);
|
||||
println!("Error: {e:?}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
@ -37,7 +37,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
"value": value
|
||||
}));
|
||||
}
|
||||
println!("Query results: {:?}", results);
|
||||
println!("Query results: {results:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -5,10 +5,19 @@ docker ps
|
|||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
```
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command:
|
||||
|
||||
<!-- STEP
|
|
@ -2,5 +2,5 @@
|
|||
set -e
|
||||
echo "Home: $HOME"
|
||||
|
||||
cd $1
|
||||
cd src/$1
|
||||
mm.py README.md
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
[package]
|
||||
name = "dapr-macros"
|
||||
version = "0.15.0"
|
||||
edition = "2021"
|
||||
description = "Dapr Rust SDK"
|
||||
license = "Apache-2.0"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1"
|
||||
log = "0.4"
|
||||
axum = "0.7.4"
|
||||
syn = { version = "2.0.29", features = ["full"] }
|
||||
quote = "1.0.8"
|
||||
proc-macro2 = "1.0"
|
|
@ -0,0 +1,12 @@
|
|||
[package]
|
||||
name = "proto-gen"
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish = false
|
||||
repository.workspace = true
|
||||
version.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
tonic-build = { workspace = true }
|
|
@ -0,0 +1,74 @@
|
|||
use std::env;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() {
|
||||
let root_dir = {
|
||||
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
manifest_dir.parent().unwrap().to_path_buf()
|
||||
};
|
||||
|
||||
// dapr
|
||||
// env::set_var("OUT_DIR", "src");
|
||||
proto_gen(
|
||||
root_dir.clone(),
|
||||
true,
|
||||
true,
|
||||
"dapr/src/dapr",
|
||||
&[
|
||||
"proto/dapr/proto/common/v1/common.proto",
|
||||
"proto/dapr/proto/runtime/v1/dapr.proto",
|
||||
"proto/dapr/proto/runtime/v1/appcallback.proto",
|
||||
],
|
||||
&[
|
||||
"proto",
|
||||
"proto/dapr/proto/common/v1",
|
||||
"proto/dapr/proto/runtime/v1",
|
||||
],
|
||||
);
|
||||
|
||||
// example - helloworld
|
||||
proto_gen(
|
||||
root_dir.clone(),
|
||||
true,
|
||||
true,
|
||||
"examples/src/invoke/protos/",
|
||||
&["examples/proto/helloworld/helloworld.proto"],
|
||||
&["examples/proto/helloworld"],
|
||||
);
|
||||
}
|
||||
|
||||
fn proto_gen(
|
||||
root_dir: PathBuf,
|
||||
build_client: bool,
|
||||
build_server: bool,
|
||||
out_dir: &str,
|
||||
include_dirs: &[&str],
|
||||
interface: &[&str],
|
||||
) {
|
||||
let include_dirs = include_dirs
|
||||
.iter()
|
||||
.map(|path| format!("{}/{}", root_dir.to_str().unwrap(), path))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
println!("included {include_dirs:?}");
|
||||
|
||||
let interface = interface
|
||||
.iter()
|
||||
.map(|path| format!("{}/{}", root_dir.to_str().unwrap(), path))
|
||||
.collect::<Vec<_>>();
|
||||
println!("interface {interface:?}");
|
||||
|
||||
let out_dir = root_dir.join(out_dir);
|
||||
println!("outdir {out_dir:?}");
|
||||
|
||||
tonic_build::configure()
|
||||
.build_client(build_client)
|
||||
.build_server(build_server)
|
||||
.build_transport(true)
|
||||
.out_dir(out_dir.clone())
|
||||
.file_descriptor_set_path(out_dir.clone().join("types.bin"))
|
||||
.protoc_arg("--experimental_allow_proto3_optional")
|
||||
.compile_protos(&include_dirs, &interface)
|
||||
.expect("Failed to compile protos");
|
||||
}
|
|
@ -26,7 +26,7 @@ option go_package = "github.com/dapr/dapr/pkg/proto/common/v1;common";
|
|||
// when Dapr runtime delivers HTTP content.
|
||||
//
|
||||
// For example, when callers calls http invoke api
|
||||
// POST http://localhost:3500/v1.0/invoke/<app_id>/method/<method>?query1=value1&query2=value2
|
||||
// `POST http://localhost:3500/v1.0/invoke/<app_id>/method/<method>?query1=value1&query2=value2`
|
||||
//
|
||||
// Dapr runtime will parse POST as a verb and extract querystring to quersytring map.
|
||||
message HTTPExtension {
|
|
@ -151,25 +151,39 @@ service Dapr {
|
|||
rpc SubtleVerifyAlpha1(SubtleVerifyRequest) returns (SubtleVerifyResponse);
|
||||
|
||||
// Starts a new instance of a workflow
|
||||
rpc StartWorkflowAlpha1 (StartWorkflowRequest) returns (StartWorkflowResponse) {}
|
||||
rpc StartWorkflowAlpha1 (StartWorkflowRequest) returns (StartWorkflowResponse) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Gets details about a started workflow instance
|
||||
rpc GetWorkflowAlpha1 (GetWorkflowRequest) returns (GetWorkflowResponse) {}
|
||||
rpc GetWorkflowAlpha1 (GetWorkflowRequest) returns (GetWorkflowResponse) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Purge Workflow
|
||||
rpc PurgeWorkflowAlpha1 (PurgeWorkflowRequest) returns (google.protobuf.Empty) {}
|
||||
rpc PurgeWorkflowAlpha1 (PurgeWorkflowRequest) returns (google.protobuf.Empty) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Terminates a running workflow instance
|
||||
rpc TerminateWorkflowAlpha1 (TerminateWorkflowRequest) returns (google.protobuf.Empty) {}
|
||||
rpc TerminateWorkflowAlpha1 (TerminateWorkflowRequest) returns (google.protobuf.Empty) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Pauses a running workflow instance
|
||||
rpc PauseWorkflowAlpha1 (PauseWorkflowRequest) returns (google.protobuf.Empty) {}
|
||||
rpc PauseWorkflowAlpha1 (PauseWorkflowRequest) returns (google.protobuf.Empty) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Resumes a paused workflow instance
|
||||
rpc ResumeWorkflowAlpha1 (ResumeWorkflowRequest) returns (google.protobuf.Empty) {}
|
||||
rpc ResumeWorkflowAlpha1 (ResumeWorkflowRequest) returns (google.protobuf.Empty) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Raise an event to a running workflow instance
|
||||
rpc RaiseEventWorkflowAlpha1 (RaiseEventWorkflowRequest) returns (google.protobuf.Empty) {}
|
||||
rpc RaiseEventWorkflowAlpha1 (RaiseEventWorkflowRequest) returns (google.protobuf.Empty) {
|
||||
option deprecated = true;
|
||||
}
|
||||
|
||||
// Starts a new instance of a workflow
|
||||
rpc StartWorkflowBeta1 (StartWorkflowRequest) returns (StartWorkflowResponse) {}
|
||||
|
@ -191,6 +205,7 @@ service Dapr {
|
|||
|
||||
// Raise an event to a running workflow instance
|
||||
rpc RaiseEventWorkflowBeta1 (RaiseEventWorkflowRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Shutdown the sidecar
|
||||
rpc Shutdown (ShutdownRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
|
@ -202,6 +217,9 @@ service Dapr {
|
|||
|
||||
// Delete a job
|
||||
rpc DeleteJobAlpha1(DeleteJobRequest) returns (DeleteJobResponse) {}
|
||||
|
||||
// Converse with a LLM service
|
||||
rpc ConverseAlpha1(ConversationRequest) returns (ConversationResponse) {}
|
||||
}
|
||||
|
||||
// InvokeServiceRequest represents the request message for Service invocation.
|
||||
|
@ -493,6 +511,7 @@ message InvokeBindingRequest {
|
|||
//
|
||||
// Common metadata property:
|
||||
// - ttlInSeconds : the time to live in seconds for the message.
|
||||
//
|
||||
// If set in the binding definition will cause all messages to
|
||||
// have a default time to live. The message ttl overrides any value
|
||||
// in the binding definition.
|
||||
|
@ -675,7 +694,14 @@ message GetMetadataResponse {
|
|||
string runtime_version = 8 [json_name = "runtimeVersion"];
|
||||
repeated string enabled_features = 9 [json_name = "enabledFeatures"];
|
||||
ActorRuntime actor_runtime = 10 [json_name = "actorRuntime"];
|
||||
//TODO: Cassie: probably add scheduler runtime status
|
||||
optional MetadataScheduler scheduler = 11 [json_name = "scheduler"];
|
||||
}
|
||||
|
||||
// MetadataScheduler is a message that contains the list of addresses of the
|
||||
// scheduler connections.
|
||||
message MetadataScheduler {
|
||||
// connected_addresses the list of addresses of the scheduler connections.
|
||||
repeated string connected_addresses = 1;
|
||||
}
|
||||
|
||||
message ActorRuntime {
|
||||
|
@ -839,11 +865,11 @@ message TryLockRequest {
|
|||
//
|
||||
// The reason why we don't make it automatically generated is:
|
||||
// 1. If it is automatically generated,there must be a 'my_lock_owner_id' field in the response.
|
||||
// This name is so weird that we think it is inappropriate to put it into the api spec
|
||||
// This name is so weird that we think it is inappropriate to put it into the api spec
|
||||
// 2. If we change the field 'my_lock_owner_id' in the response to 'lock_owner',which means the current lock owner of this lock,
|
||||
// we find that in some lock services users can't get the current lock owner.Actually users don't need it at all.
|
||||
// we find that in some lock services users can't get the current lock owner.Actually users don't need it at all.
|
||||
// 3. When reentrant lock is needed,the existing lock_owner is required to identify client and check "whether this client can reenter this lock".
|
||||
// So this field in the request shouldn't be removed.
|
||||
// So this field in the request shouldn't be removed.
|
||||
string lock_owner = 3 [json_name = "lockOwner"];
|
||||
|
||||
// Required. The time before expiry.The time unit is second.
|
||||
|
@ -880,7 +906,7 @@ message SubtleGetKeyRequest {
|
|||
// JSON (JSON Web Key) as string
|
||||
JSON = 1;
|
||||
}
|
||||
|
||||
|
||||
// Name of the component
|
||||
string component_name = 1 [json_name="componentName"];
|
||||
// Name (or name/version) of the key to use in the key vault
|
||||
|
@ -1062,7 +1088,7 @@ message EncryptRequestOptions {
|
|||
// If true, the encrypted document does not contain a key reference.
|
||||
// In that case, calls to the Decrypt method must provide a key reference (name or name/version).
|
||||
// Defaults to false.
|
||||
bool omit_decryption_key_name = 11 [json_name="omitDecryptionKeyName"];
|
||||
bool omit_decryption_key_name = 11 [json_name="omitDecryptionKeyName"];
|
||||
// Key reference to embed in the encrypted document (name or name/version).
|
||||
// This is helpful if the reference of the key used to decrypt the document is different from the one used to encrypt it.
|
||||
// If unset, uses the reference of the key used to encrypt the document (this is the default behavior).
|
||||
|
@ -1205,20 +1231,20 @@ message Job {
|
|||
//
|
||||
// Systemd timer style cron accepts 6 fields:
|
||||
// seconds | minutes | hours | day of month | month | day of week
|
||||
// 0-59 | 0-59 | 0-23 | 1-31 | 1-12/jan-dec | 0-7/sun-sat
|
||||
// 0-59 | 0-59 | 0-23 | 1-31 | 1-12/jan-dec | 0-6/sun-sat
|
||||
//
|
||||
// "0 30 * * * *" - every hour on the half hour
|
||||
// "0 15 3 * * *" - every day at 03:15
|
||||
//
|
||||
// Period string expressions:
|
||||
// Entry | Description | Equivalent To
|
||||
// ----- | ----------- | -------------
|
||||
// @every <duration> | Run every <duration> (e.g. '@every 1h30m') | N/A
|
||||
// @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 0 1 1 *
|
||||
// @monthly | Run once a month, midnight, first of month | 0 0 0 1 * *
|
||||
// @weekly | Run once a week, midnight on Sunday | 0 0 0 * * 0
|
||||
// @daily (or @midnight) | Run once a day, midnight | 0 0 0 * * *
|
||||
// @hourly | Run once an hour, beginning of hour | 0 0 * * * *
|
||||
// Entry | Description | Equivalent To
|
||||
// ----- | ----------- | -------------
|
||||
// @every `<duration>` | Run every `<duration>` (e.g. '@every 1h30m') | N/A
|
||||
// @yearly (or @annually) | Run once a year, midnight, Jan. 1st | 0 0 0 1 1 *
|
||||
// @monthly | Run once a month, midnight, first of month | 0 0 0 1 * *
|
||||
// @weekly | Run once a week, midnight on Sunday | 0 0 0 * * 0
|
||||
// @daily (or @midnight) | Run once a day, midnight | 0 0 0 * * *
|
||||
// @hourly | Run once an hour, beginning of hour | 0 0 * * * *
|
||||
optional string schedule = 2 [json_name = "schedule"];
|
||||
|
||||
// repeats is the optional number of times in which the job should be
|
||||
|
@ -1274,3 +1300,55 @@ message DeleteJobRequest {
|
|||
message DeleteJobResponse {
|
||||
// Empty
|
||||
}
|
||||
|
||||
// ConversationRequest is the request object for Conversation.
|
||||
message ConversationRequest {
|
||||
// The name of Conversation component
|
||||
string name = 1;
|
||||
|
||||
// The ID of an existing chat (like in ChatGPT)
|
||||
optional string contextID = 2;
|
||||
|
||||
// Inputs for the conversation, support multiple input in one time.
|
||||
repeated ConversationInput inputs = 3;
|
||||
|
||||
// Parameters for all custom fields.
|
||||
map<string, google.protobuf.Any> parameters = 4;
|
||||
|
||||
// The metadata passing to conversation components.
|
||||
map<string, string> metadata = 5;
|
||||
|
||||
// Scrub PII data that comes back from the LLM
|
||||
optional bool scrubPII = 6;
|
||||
|
||||
// Temperature for the LLM to optimize for creativity or predictability
|
||||
optional double temperature = 7;
|
||||
}
|
||||
|
||||
message ConversationInput {
|
||||
// The content to send to the llm
|
||||
string content = 1;
|
||||
|
||||
// The role to set for the message
|
||||
optional string role = 2;
|
||||
|
||||
// Scrub PII data that goes into the LLM
|
||||
optional bool scrubPII = 3;
|
||||
}
|
||||
|
||||
// ConversationResult is the result for one input.
|
||||
message ConversationResult {
|
||||
// Result for the one conversation input.
|
||||
string result = 1;
|
||||
// Parameters for all custom fields.
|
||||
map<string, google.protobuf.Any> parameters = 2;
|
||||
}
|
||||
|
||||
// ConversationResponse is the response for Conversation.
|
||||
message ConversationResponse {
|
||||
// The ID of an existing chat (like in ChatGPT)
|
||||
optional string contextID = 1;
|
||||
|
||||
// An array of results.
|
||||
repeated ConversationResult outputs = 2;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue