mirror of https://github.com/dapr/rust-sdk.git
Compare commits
226 Commits
Author | SHA1 | Date |
---|---|---|
|
407447816c | |
|
06ea44e8d3 | |
|
942263938a | |
|
9537a3b826 | |
|
35930ebc5f | |
|
acb829cdb5 | |
|
26f4ee6d32 | |
|
4fb85abcea | |
|
f1e469cf57 | |
|
1cdcc272ce | |
|
185dc72889 | |
|
1508ea24cb | |
|
99d099f15e | |
|
c0b7155093 | |
|
ab219049a4 | |
|
57347e70c3 | |
|
93322c0e86 | |
|
52f095f8e7 | |
|
a532ee1a50 | |
|
8bf6013eee | |
|
6973b7d9be | |
|
5241ce696c | |
|
2deed2baa8 | |
|
b9de85f885 | |
|
0c909c544b | |
|
f0498dcc3f | |
|
abecc5c70b | |
|
4da565cbae | |
|
1d88e2074f | |
|
1ac019159c | |
|
c4675a6a09 | |
|
85ed911b1d | |
|
95b009baaf | |
|
e15f65b435 | |
|
fcff470451 | |
|
ae9bcda712 | |
|
851abcae57 | |
|
b2be904225 | |
|
4cfa9a3db2 | |
|
e78cab477e | |
|
709c67e856 | |
|
b1cacef2d7 | |
|
597aa46f0e | |
|
0183be5baa | |
|
580b88ff8a | |
|
e04157d7d7 | |
|
b81f8f387b | |
|
df0bf8818c | |
|
04375f4fd2 | |
|
80a3a90f82 | |
|
fbb81bf518 | |
|
dbc7a80ef1 | |
|
12dd8af64f | |
|
0f29428a55 | |
|
981340a5d8 | |
|
5fd49f196f | |
|
7322ec4cc3 | |
|
8f00930ebc | |
|
f70a18f896 | |
|
78ad9fd685 | |
|
01d2214a0d | |
|
d0b6490d9c | |
|
4e2d316032 | |
|
39071997ac | |
|
97c98af9aa | |
|
1b095c7108 | |
|
81b3834fd7 | |
|
7a508beaea | |
|
0a0efaa9ab | |
|
6f566cb051 | |
|
8d4db4382c | |
|
ece8714f29 | |
|
60c0bbed71 | |
|
0eaf89867c | |
|
ccf2902ed5 | |
|
2c0aa5a779 | |
|
5e699df5a3 | |
|
6301ee43c5 | |
|
7aef43a7b4 | |
|
6951e58060 | |
|
bc728d5262 | |
|
103caa3291 | |
|
5407c713e5 | |
|
da08c030a6 | |
|
641f1248d1 | |
|
3c79d18938 | |
|
b8ce2aafd0 | |
|
c411864c6d | |
|
906f19061a | |
|
4abf5aa650 | |
|
e00a116ff8 | |
|
4d55176164 | |
|
f413a1b934 | |
|
5764c6cf3d | |
|
ea644546fb | |
|
ed283c2e25 | |
|
c6048ef1a2 | |
|
396f571d96 | |
|
223fadac5b | |
|
7dced71f04 | |
|
0beb668e85 | |
|
56da1c1607 | |
|
fe9e52ea5f | |
|
5de625cf62 | |
|
ac71bb7b37 | |
|
02dd85084b | |
|
034da1d5cc | |
|
fb1d6b3018 | |
|
b731100560 | |
|
63c3f94422 | |
|
5fd87bfdd2 | |
|
7e98f9b630 | |
|
52d853bdde | |
|
b9fe7649b8 | |
|
e3bbf921ac | |
|
4dfb399e25 | |
|
8371c9070e | |
|
f4200a39ef | |
|
a3384e01a3 | |
|
372522a775 | |
|
ac2e6e9e54 | |
|
8a28b12719 | |
|
1f738df5bf | |
|
7b03b4e02d | |
|
65f9fc7f9d | |
|
ca9e074b91 | |
|
23a848b11d | |
|
f642bd86c6 | |
|
d21290ee60 | |
|
4cfdc8fe71 | |
|
b28ce2e889 | |
|
90f07be813 | |
|
a1a55bb32a | |
|
895a483b1d | |
|
fe94932e86 | |
|
28f33f214d | |
|
a0b154dac4 | |
|
17671f0492 | |
|
02e8f03d26 | |
|
075526bbb1 | |
|
1de125f236 | |
|
0e08e91b76 | |
|
88f3e970ee | |
|
930b40a8f8 | |
|
aa10558662 | |
|
80c77acfeb | |
|
2895058ae5 | |
|
a9bdb9e1f3 | |
|
94719caae3 | |
|
bcbb5860dc | |
|
1134f8aa57 | |
|
a4e9a965de | |
|
781b70d757 | |
|
50b1ac6c9d | |
|
dba4f0b2b7 | |
|
d97063ddcf | |
|
dae08cf938 | |
|
08d6691933 | |
|
3527b9a430 | |
|
24e3310eba | |
|
b613923ca6 | |
|
0933656683 | |
|
02036474d9 | |
|
a1671cd3f5 | |
|
1eab56e38e | |
|
e1b6191153 | |
|
3617a9e39c | |
|
94a278af74 | |
|
9780a90313 | |
|
baa7146491 | |
|
0d06646279 | |
|
e100e56244 | |
|
648669e9ce | |
|
30f8c3fe21 | |
|
94beb7a416 | |
|
f3cfdf0cc1 | |
|
41136e9a0d | |
|
14f2cb25ba | |
|
1a8ad946f8 | |
|
ba3e413b1d | |
|
1d69f239b3 | |
|
e961a4c836 | |
|
0225e3ebf7 | |
|
1fcb3e16c1 | |
|
85ebee2083 | |
|
6415ed9a18 | |
|
011fa64669 | |
|
7080073b77 | |
|
7591c83aab | |
|
bef6e3b5e5 | |
|
667e85952a | |
|
0364e08883 | |
|
0df62b3d76 | |
|
e36701ddb1 | |
|
9123f29b11 | |
|
8f00997746 | |
|
2a10c0fd7f | |
|
92ea4bfb94 | |
|
fbacb1d567 | |
|
cdc99047cc | |
|
54ba4fb971 | |
|
9a8516d519 | |
|
14e6c231b8 | |
|
5eee0ceca8 | |
|
a9df2d9a25 | |
|
9d9e2f8713 | |
|
98a7266a1a | |
|
ceaccb1218 | |
|
3c6330dc91 | |
|
8db69d84fe | |
|
6950787c22 | |
|
9a93196566 | |
|
9aa50967b1 | |
|
00ca92b817 | |
|
0ba28b2bf1 | |
|
bbe82f1410 | |
|
d50e3f616a | |
|
48e683ea2d | |
|
b69e1f84b0 | |
|
532df85969 | |
|
ecc0e4e813 | |
|
b682ce7768 | |
|
5016f61f7e | |
|
4cf17de437 | |
|
9bcbc7264d | |
|
eb48811f44 |
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Report a bug with the Rust SDK
|
||||
title: ''
|
||||
labels: kind/bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
## Expected Behavior
|
||||
|
||||
<!-- Briefly describe what you expect to happen -->
|
||||
|
||||
|
||||
## Actual Behavior
|
||||
|
||||
<!-- Briefly describe what is actually happening -->
|
||||
|
||||
|
||||
## Steps to Reproduce the Problem
|
||||
|
||||
<!-- How can this issue be reproduced (be detailed) -->
|
||||
|
||||
|
||||
## Release Note
|
||||
<!-- How should the fix for this issue be communicated in our release notes? It can be populated later. -->
|
||||
<!-- Keep it as a single line. Examples: -->
|
||||
<!-- RELEASE NOTE: **ADD** New feature in the Rust SDK. -->
|
||||
<!-- RELEASE NOTE: **FIX** Bug in Client. -->
|
||||
<!-- RELEASE NOTE: **UPDATE** Client dependencies. -->
|
||||
|
||||
RELEASE NOTE:
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: Discussion
|
||||
about: Start a discussion for the Dapr Rust SDK
|
||||
title: ''
|
||||
labels: kind/discussion
|
||||
assignees: ''
|
||||
|
||||
---
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
name: Feature Request/Proposal
|
||||
about: Create a Feature Request/Proposal for the Rust SDK
|
||||
title: ''
|
||||
labels: kind/enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
## Describe the feature/proposal
|
||||
|
||||
|
||||
|
||||
## Release Note
|
||||
<!-- How should this new feature be announced in our release notes? It can be populated later. -->
|
||||
<!-- Keep it as a single line. Examples: -->
|
||||
<!-- RELEASE NOTE: **ADD** New feature in the Rust SDK. -->
|
||||
<!-- RELEASE NOTE: **FIX** Bug in Client. -->
|
||||
<!-- RELEASE NOTE: **UPDATE** Client dependencies. -->
|
||||
|
||||
RELEASE NOTE:
|
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
name: Question
|
||||
about: Ask a question about the Rust SDK
|
||||
title: ''
|
||||
labels: kind/question
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
## Question
|
||||
|
||||
|
||||
<!-- Ask your question here -->
|
||||
<!-- Include as much information as possible to find an answer quicker :) -->
|
|
@ -0,0 +1,11 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "daily"
|
|
@ -0,0 +1,5 @@
|
|||
organization: dapr
|
||||
defaultSticker: clrqfdv4x24910fl5n4iwu5oa
|
||||
stickers:
|
||||
- id: clrqfdv4x24910fl5n4iwu5oa
|
||||
alias: sdk-badge
|
|
@ -0,0 +1,21 @@
|
|||
# Description
|
||||
|
||||
<!--Please explain the changes you've made here-->
|
||||
|
||||
|
||||
|
||||
## Issue reference
|
||||
|
||||
<!--We strive to have all PR being opened based on an issue, where the problem or feature should be discussed prior to implementation.-->
|
||||
|
||||
<!--Please reference the issue(s) with a hashtag for example #100 -->
|
||||
This PR will close #number
|
||||
|
||||
## Checklist
|
||||
|
||||
<!-- This list is non-exhaustive, please ensure the tests are passing. -->
|
||||
<!--Please make sure you've completed the relevant tasks for this PR, out of the following list: -->
|
||||
|
||||
* [ ] Code compiles correctly
|
||||
* [ ] Created/updated tests
|
||||
* [ ] Extended the documentation
|
|
@ -1,54 +1,149 @@
|
|||
name: dapr-rust-sdk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '8 8 * * *'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- release-*
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- release-*
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
|
||||
PROTOC_VERSION: 24.4
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
name: Check and Lint
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check compiled protos for a diff
|
||||
run: |
|
||||
make proto-gen check-diff-proto
|
||||
|
||||
- name: cargo fmt
|
||||
run: cargo fmt -- --check --color ${{ env.CARGO_TERM_COLOR }}
|
||||
|
||||
- name: cargo clippy
|
||||
run: cargo clippy -- -W warnings
|
||||
run: cargo clippy
|
||||
|
||||
|
||||
build:
|
||||
name: Build
|
||||
name: Test and Build on rust-${{ matrix.rust-version }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
rust-version:
|
||||
- "stable"
|
||||
- "nightly"
|
||||
- "1.78.0" # MSRV
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ matrix.rust-version }}
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build
|
||||
run: cargo build
|
||||
- name: Build examples
|
||||
run: cargo build --examples
|
||||
run: cargo build --workspace --all-targets --all-features
|
||||
- name: Run Tests
|
||||
run: cargo test --all-targets
|
||||
|
||||
run: cargo test --workspace --all-targets --all-features
|
||||
|
||||
test-docs:
|
||||
name: Check Docs - ${{ matrix.crate }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
crate:
|
||||
- dapr
|
||||
- dapr-macros
|
||||
env:
|
||||
RUSTDOCFLAGS: -Dwarnings
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@nightly
|
||||
- uses: dtolnay/install@cargo-docs-rs
|
||||
- run: cargo docs-rs -p ${{ matrix.crate }}
|
||||
|
||||
publish-dry:
|
||||
name: Publish Test
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
crate:
|
||||
- dapr
|
||||
- dapr-macros
|
||||
|
||||
steps:
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: cargo publish - ${{ matrix.crate }}
|
||||
run: cargo publish --manifest-path ${{ matrix.crate }}/Cargo.toml --dry-run
|
||||
|
||||
|
||||
publish:
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, build]
|
||||
needs: [test-docs, lint, build, publish-dry]
|
||||
if: startswith(github.ref, 'refs/tags/v')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
crate:
|
||||
- dapr
|
||||
- dapr-macros
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: cargo publish
|
||||
run: cargo publish --token ${{ env.CARGO_TOKEN }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy, rustfmt
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: ${{ env.PROTOC_VERSION }}
|
||||
- uses: actions/checkout@v4
|
||||
- name: cargo publish - ${{ matrix.crate }}
|
||||
env:
|
||||
CARGO_TOKEN: ${{ secrets.CARGO_TOKEN }}
|
||||
run: cargo publish --manifest-path ${{ matrix.crate }}/Cargo.toml --token ${{ env.CARGO_TOKEN }}
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
#
|
||||
# Copyright 2024 The Dapr Authors
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
name: dapr-bot-schedule
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '*/10 * * * *'
|
||||
jobs:
|
||||
prune_stale:
|
||||
name: Prune Stale
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Prune Stale
|
||||
uses: actions/stale@v9.0.0
|
||||
with:
|
||||
days-before-pr-stale: 60
|
||||
days-before-pr-close: 7
|
||||
days-before-issue-stale: 60
|
||||
days-before-issue-close: 7
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had activity in the
|
||||
last 60 days. It will be closed in the next 7 days unless it is tagged (pinned, good first issue, help wanted) or other activity
|
||||
occurs. Thank you for your contributions.
|
||||
close-issue-message: >
|
||||
This issue has been automatically closed because it has not had activity in the
|
||||
last 67 days. If this issue is still valid, please ping a maintainer and ask them to label it as pinned, good first issue or help wanted.
|
||||
Thank you for your contributions.
|
||||
stale-pr-message: >
|
||||
This pull request has been automatically marked as stale because it has not had
|
||||
activity in the last 60 days. It will be closed in 7 days if no further activity occurs. Please
|
||||
feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
Thank you for your contributions!
|
||||
close-pr-message: >
|
||||
This pull request has been automatically closed because it has not had
|
||||
activity in the last 67 days. Please feel free to give a status update now, ping for review, or re-open when it's ready.
|
||||
Thank you for your contributions!
|
||||
stale-issue-label: 'stale'
|
||||
exempt-issue-labels: 'pinned,good first issue,help wanted'
|
||||
stale-pr-label: 'stale'
|
||||
exempt-pr-labels: 'pinned'
|
||||
operations-per-run: 500
|
||||
ascending: true
|
|
@ -0,0 +1,45 @@
|
|||
name: dapr-bot-test
|
||||
|
||||
on:
|
||||
push:
|
||||
paths: # Explicitly declare which paths (could potentially be combined into dapr-bot*
|
||||
- ".github/workflows/dapr-bot.yml"
|
||||
- ".github/workflows/dapr-bot-test.yml"
|
||||
- ".github/workflows/dapr-bot/*"
|
||||
pull_request:
|
||||
paths: # Explicitly declare which paths (could potentially be combined into dapr-bot*
|
||||
- ".github/workflows/dapr-bot.yml"
|
||||
- ".github/workflows/dapr-bot-test.yml"
|
||||
- ".github/workflows/dapr-bot/*"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./.github/workflows/dapr-bot
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- uses: swatinem/rust-cache@v2
|
||||
- name: Cargo clippy
|
||||
run: |
|
||||
cargo clippy -- -W warnings
|
||||
|
||||
- name: Cargo fmt
|
||||
run: |
|
||||
cargo fmt -- --check --color ${{ env.CARGO_TERM_COLOR }}
|
||||
|
||||
- name: Cargo test
|
||||
run: |
|
||||
cargo test
|
||||
|
||||
- name: Cargo build
|
||||
run: |
|
||||
cargo build
|
|
@ -0,0 +1,21 @@
|
|||
name: dapr-bot
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./.github/workflows/dapr-bot
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- uses: swatinem/rust-cache@v2
|
||||
- name: Cargo run
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
cargo run
|
|
@ -0,0 +1,16 @@
|
|||
[package]
|
||||
name = "dapr-bot"
|
||||
authors = ["Mike Nguyen <hey@mike.ee>"]
|
||||
license = "Apache-2.0"
|
||||
license-file = "LICENSE"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.70.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
exitcode = "1.1.2"
|
||||
octocrab = "0.42.1"
|
||||
serde_json = "1.0.114"
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
|
@ -0,0 +1 @@
|
|||
../../../LICENSE
|
|
@ -0,0 +1,28 @@
|
|||
use super::GitHub;
|
||||
|
||||
impl GitHub {
|
||||
pub async fn create_comment(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
number: u64,
|
||||
comment: String,
|
||||
) -> std::result::Result<octocrab::models::issues::Comment, octocrab::Error> {
|
||||
self.client
|
||||
.issues(owner, repo)
|
||||
.create_comment(number, comment)
|
||||
.await
|
||||
}
|
||||
pub async fn add_assignee(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
number: u64,
|
||||
assignee: String,
|
||||
) -> std::result::Result<octocrab::models::issues::Issue, octocrab::Error> {
|
||||
self.client
|
||||
.issues(owner, repo)
|
||||
.add_assignees(number, &[&assignee])
|
||||
.await
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
use octocrab::Octocrab;
|
||||
|
||||
pub mod issues;
|
||||
|
||||
pub struct GitHub {
|
||||
client: Octocrab,
|
||||
}
|
||||
|
||||
impl GitHub {
|
||||
pub fn new_client(token: String) -> GitHub {
|
||||
match Octocrab::builder().personal_token(token).build() {
|
||||
Ok(client) => GitHub { client },
|
||||
Err(e) => {
|
||||
panic!("Unable to create client: {:?}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
mod github;
|
||||
|
||||
use core::panic;
|
||||
use std::{error::Error, fs::File, io::BufReader, path::Path, process::exit};
|
||||
|
||||
use octocrab::models;
|
||||
|
||||
use github::GitHub;
|
||||
|
||||
// Defining the repo explicitly as the octocrab model for the event doesn't deserialize a
|
||||
// owner/repo.
|
||||
const OWNER: &str = "dapr";
|
||||
const REPOSITORY: &str = "rust-sdk";
|
||||
|
||||
const GITHUB_TOKEN: &str = "GITHUB_TOKEN";
|
||||
|
||||
const GITHUB_EVENT_PATH: &str = "GITHUB_EVENT_PATH";
|
||||
const GITHUB_EVENT_NAME: &str = "GITHUB_EVENT_NAME";
|
||||
|
||||
const ISSUE_COMMENT_EVENT_NAME: &str = "issue_comment";
|
||||
|
||||
fn get_payload<P: AsRef<Path>>(
|
||||
path: P,
|
||||
) -> Result<models::events::payload::IssueCommentEventPayload, Box<dyn Error>> {
|
||||
// Open the file in read-only mode with buffer.
|
||||
let file = File::open(path)?;
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
// Read the JSON contents of the file as an instance.
|
||||
let event = serde_json::from_reader(reader)?;
|
||||
|
||||
// Return the event.
|
||||
Ok(event)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> octocrab::Result<()> {
|
||||
let github_event_path: String =
|
||||
std::env::var(GITHUB_EVENT_PATH).expect("GITHUB_EVENT_PATH env missing");
|
||||
let github_event_name: String =
|
||||
std::env::var(GITHUB_EVENT_NAME).expect("GITHUB_EVENT_NAME env missing");
|
||||
let github_token: String = std::env::var(GITHUB_TOKEN).expect("GITHUB_TOKEN env missing");
|
||||
|
||||
if github_event_name != ISSUE_COMMENT_EVENT_NAME {
|
||||
println!("Event is not an issue_comment, the app will now exit.");
|
||||
exit(exitcode::TEMPFAIL); // This failure is because the bot is not
|
||||
// designed to process anything other than an issue_comment
|
||||
}
|
||||
|
||||
// deserialize event payload
|
||||
let event = get_payload(github_event_path).unwrap();
|
||||
|
||||
// check the issue body
|
||||
if !event.clone().comment.body.unwrap().starts_with("/assign") {
|
||||
println!("Event does not start with /assign");
|
||||
exit(exitcode::OK);
|
||||
}
|
||||
|
||||
let assignee: String = event.comment.user.login;
|
||||
|
||||
let issue: u64 = event.issue.number;
|
||||
|
||||
// spawn a client
|
||||
let github_client = GitHub::new_client(github_token);
|
||||
|
||||
// assign the user
|
||||
match github_client
|
||||
.add_assignee(OWNER, REPOSITORY, issue, assignee.clone())
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
println!("Assigned issue to user successfully");
|
||||
match github_client
|
||||
.create_comment(
|
||||
OWNER,
|
||||
REPOSITORY,
|
||||
issue,
|
||||
format!("🚀 issue assigned to you {assignee}"),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
println!("Comment on assign successful")
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("Comment on assign failed: {:?}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("Failed to assign issue: {:?}", e)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
#
|
||||
# Copyright 2021 The Dapr Authors
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
name: fossa
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
fossa-scan:
|
||||
if: github.repository_owner == 'dapr' # FOSSA is not intended to run on forks.
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
FOSSA_API_KEY: b88e1f4287c3108c8751bf106fb46db6 # This is a push-only token that is safe to be exposed.
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: "Run FOSSA Scan"
|
||||
uses: fossas/fossa-action@v1 # Use a specific version if locking is preferred
|
||||
with:
|
||||
api-key: ${{ env.FOSSA_API_KEY }}
|
||||
|
||||
- name: "Run FOSSA Test"
|
||||
uses: fossas/fossa-action@v1 # Use a specific version if locking is preferred
|
||||
with:
|
||||
api-key: ${{ env.FOSSA_API_KEY }}
|
||||
run-tests: true
|
|
@ -0,0 +1,310 @@
|
|||
name: validate-examples
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '8 8 * * *'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- release-*
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dapr_commit:
|
||||
description: "Dapr/Dapr commit to build custom daprd from"
|
||||
required: false
|
||||
default: ""
|
||||
daprcli_commit:
|
||||
description: "Dapr/CLI commit to build custom dapr CLI from"
|
||||
required: false
|
||||
default: ""
|
||||
dapr_version:
|
||||
description: "Dapr/Dapr version to use"
|
||||
required: false
|
||||
default: ""
|
||||
daprcli_version:
|
||||
description: "Dapr/CLI version to use"
|
||||
required: false
|
||||
default: ""
|
||||
repository_dispatch:
|
||||
types: [ validate-examples ]
|
||||
merge_group:
|
||||
jobs:
|
||||
setup:
|
||||
permissions:
|
||||
packages: read
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOS: linux
|
||||
GOARCH: amd64
|
||||
GOPROXY: https://proxy.golang.org
|
||||
DAPR_INSTALL_URL: https://raw.githubusercontent.com/dapr/cli/master/install/install.sh
|
||||
DAPR_CLI_REF: ${{ github.event.inputs_daprcli_commit }}
|
||||
DAPR_CLI_VERSION: ${{ github.event.inputs_daprcli_version }}
|
||||
DAPR_REF: ${{ github.event.inputs.dapr_commit }}
|
||||
DAPR_RUNTIME_VERSION: ${{ github.event.inputs.dapr_version }}
|
||||
CHECKOUT_REPO: ${{ github.repository }}
|
||||
CHECKOUT_REF: ${{ github.ref }}
|
||||
outputs:
|
||||
DAPR_INSTALL_URL: ${{ env.DAPR_INSTALL_URL }}
|
||||
DAPR_CLI_VERSION: ${{ steps.outputs.outputs.DAPR_CLI_VERSION }}
|
||||
DAPR_RUNTIME_VERSION: ${{ steps.outputs.outputs.DAPR_RUNTIME_VERSION }}
|
||||
CHECKOUT_REPO: ${{ steps.outputs.outputs.CHECKOUT_REPO }}
|
||||
CHECKOUT_REF: ${{ steps.outputs.outputs.CHECKOUT_REF }}
|
||||
GITHUB_SHA: ${{ steps.outputs.outputs.GITHUB_SHA }}
|
||||
steps:
|
||||
- name: Parse repository_dispatch payload
|
||||
if: github.event_name == 'repository_dispatch'
|
||||
run: |
|
||||
if [ ${{ github.event.client_payload.command }} = "ok-to-test" ]; then
|
||||
echo "CHECKOUT_REPO=${{ github.event.client_payload.pull_head_repo }}" >> $GITHUB_ENV
|
||||
echo "CHECKOUT_REF=${{ github.event.client_payload.pull_head_ref }}" >> $GITHUB_ENV
|
||||
echo "DAPR_REF=master" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
||||
- name: Determine latest Dapr Runtime version
|
||||
if: env.DAPR_RUNTIME_VERSION == ''
|
||||
run: |
|
||||
RUNTIME_VERSION=$(curl -s "https://api.github.com/repos/dapr/dapr/releases/latest" | grep '"tag_name"' | cut -d ':' -f2 | tr -d ' ",v')
|
||||
echo "DAPR_RUNTIME_VERSION=$RUNTIME_VERSION" >> $GITHUB_ENV
|
||||
echo "Found $RUNTIME_VERSION"
|
||||
|
||||
- name: Determine latest Dapr Cli version
|
||||
if: env.DAPR_CLI_VERSION == ''
|
||||
run: |
|
||||
CLI_VERSION=$(curl -s "https://api.github.com/repos/dapr/cli/releases/latest" | grep '"tag_name"' | cut -d ':' -f2 | tr -d ' ",v')
|
||||
echo "DAPR_CLI_VERSION=$CLI_VERSION" >> $GITHUB_ENV
|
||||
echo "Found $CLI_VERSION"
|
||||
|
||||
- name: Checkout Dapr CLI repo to override dapr command.
|
||||
uses: actions/checkout@v4
|
||||
if: env.DAPR_CLI_REF != ''
|
||||
with:
|
||||
repository: dapr/cli
|
||||
ref: ${{ env.DAPR_CLI_REF }}
|
||||
path: cli
|
||||
|
||||
- name: Checkout Dapr repo to override daprd.
|
||||
uses: actions/checkout@v4
|
||||
if: env.DAPR_REF != ''
|
||||
with:
|
||||
repository: dapr/dapr
|
||||
ref: ${{ env.DAPR_REF }}
|
||||
path: dapr_runtime
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
if: env.DAPR_REF != '' || env.DAPR_CLI_REF != ''
|
||||
with:
|
||||
cache: false
|
||||
go-version: "stable"
|
||||
|
||||
- name: Build dapr cli with referenced commit and override version
|
||||
if: env.DAPR_CLI_REF != ''
|
||||
run: |
|
||||
cd cli
|
||||
make
|
||||
echo "artifactPath=~/artifacts/$GITHUB_SHA/" >> $GITHUB_ENV
|
||||
mkdir -p $HOME/artifacts/$GITHUB_SHA/
|
||||
cp dist/linux_amd64/release/dapr $HOME/artifacts/$GITHUB_SHA/dapr
|
||||
CLI_VERSION=edge
|
||||
echo "DAPR_CLI_VERSION=$CLI_VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Build daprd and placement with referenced commit and override version
|
||||
if: env.DAPR_REF != ''
|
||||
run: |
|
||||
cd dapr_runtime
|
||||
make
|
||||
echo "artifactPath=~/artifacts/$GITHUB_SHA/" >> $GITHUB_ENV
|
||||
mkdir -p $HOME/artifacts/$GITHUB_SHA/
|
||||
RUNTIME_VERSION=edge
|
||||
echo "DAPR_RUNTIME_VERSION=$RUNTIME_VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Build Docker Image
|
||||
if: env.DAPR_REF != ''
|
||||
run: |
|
||||
mkdir ~/dapr_docker
|
||||
cd dapr_runtime
|
||||
docker build --build-arg "PKG_FILES=*" -f ./docker/Dockerfile ./dist/linux_amd64/release -t daprio/dapr:9.0.0-dev
|
||||
|
||||
- name: Download Install Bundle CLI
|
||||
if: env.DAPR_REF != '' && env.DAPR_CLI_REF == ''
|
||||
run: |
|
||||
mkdir -p cli/dist/linux_amd64/release
|
||||
cd cli/dist/linux_amd64/release/
|
||||
curl -L --remote-name https://github.com/dapr/cli/releases/download/v$DAPR_CLI_VERSION/dapr_linux_amd64.tar.gz
|
||||
tar xvzf dapr_linux_amd64.tar.gz
|
||||
ls -la
|
||||
|
||||
- name: Build Custom Install Bundle
|
||||
if: env.DAPR_REF != '' || env.DAPR_CLI_REF != ''
|
||||
run: |
|
||||
: # Create daprbundle directory
|
||||
mkdir ~/daprbundle
|
||||
cp .github/workflows/validate-examples/details.json ~/daprbundle/
|
||||
|
||||
: # Add cli
|
||||
cp cli/dist/linux_amd64/release/dapr ~/daprbundle/dapr
|
||||
|
||||
: # Compress executables to /dist/ appending _linux_amd64 to the name
|
||||
mkdir ~/daprbundle/dist
|
||||
cd dapr_runtime/dist/linux_amd64/release/
|
||||
for file in $(ls -1)
|
||||
do
|
||||
echo "packing $file"
|
||||
tar czvf ~/daprbundle/dist/${file}_linux_amd64.tar.gz ${file}
|
||||
done
|
||||
cd ~/daprbundle/dist/
|
||||
curl -L --remote-name https://github.com/dapr/dashboard/releases/download/v0.14.0/dashboard_linux_amd64.tar.gz
|
||||
|
||||
: # Add docker image
|
||||
mkdir ~/daprbundle/docker
|
||||
docker save daprio/dapr:9.0.0-dev | gzip > ~/daprbundle/docker/daprio-dapr-9.0.0-dev.tar.gz
|
||||
|
||||
: # Bundle
|
||||
cd ~/daprbundle
|
||||
tar czvf $HOME/artifacts/$GITHUB_SHA/daprbundle.tar.gz .
|
||||
|
||||
- name: List artifacts
|
||||
if: env.DAPR_REF != '' || env.DAPR_CLI_REF != ''
|
||||
run: |
|
||||
ls -la $HOME/artifacts/$GITHUB_SHA/
|
||||
|
||||
- name: Upload dapr-artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: env.DAPR_REF != '' || env.DAPR_CLI_REF != ''
|
||||
with:
|
||||
name: dapr-artifacts
|
||||
path: ${{ env.artifactPath }}
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
compression-level: 0
|
||||
|
||||
- name: Outputs
|
||||
id: outputs
|
||||
run: |
|
||||
echo "DAPR_INSTALL_URL=$DAPR_INSTALL_URL"
|
||||
echo "DAPR_CLI_VERSION=$DAPR_CLI_VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "DAPR_RUNTIME_VERSION=$DAPR_RUNTIME_VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "CHECKOUT_REPO=$CHECKOUT_REPO" >> "$GITHUB_OUTPUT"
|
||||
echo "CHECKOUT_REF=$CHECKOUT_REF" >> "$GITHUB_OUTPUT"
|
||||
echo "GITHUB_SHA=$GITHUB_SHA" >> "$GITHUB_OUTPUT"
|
||||
|
||||
validate-example:
|
||||
permissions:
|
||||
packages: read
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PYTHON_VER: 3.12
|
||||
DAPR_INSTALL_URL: ${{ needs.setup.outputs.DAPR_INSTALL_URL }}
|
||||
DAPR_CLI_VERSION: ${{ needs.setup.outputs.DAPR_CLI_VERSION }}
|
||||
DAPR_RUNTIME_VERSION: ${{ needs.setup.outputs.DAPR_RUNTIME_VERSION }}
|
||||
CHECKOUT_REPO: ${{ needs.setup.outputs.CHECKOUT_REPO }}
|
||||
CHECKOUT_REF: ${{ needs.setup.outputs.CHECKOUT_REF }}
|
||||
GITHUB_SHA: ${{ needs.setup.outputs.GITHUB_SHA }}
|
||||
RUST_BACKTRACE: full
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
examples:
|
||||
[ "actors", "bindings", "client", "configuration", "conversation", "crypto", "invoke/grpc", "invoke/grpc-proxying", "jobs", "pubsub", "query_state", "secrets-bulk" ]
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.CHECKOUT_REPO }}
|
||||
ref: ${{ env.CHECKOUT_REF }}
|
||||
|
||||
- name: Make Artifacts destination folder
|
||||
if: env.DAPR_CLI_VERSION == 'edge' || env.DAPR_RUNTIME_VERSION == 'edge'
|
||||
run: |
|
||||
mkdir -p $HOME/artifacts/$GITHUB_SHA/
|
||||
|
||||
- name: Retrieve dapr-artifacts
|
||||
if: env.DAPR_CLI_VERSION == 'edge' || env.DAPR_RUNTIME_VERSION == 'edge'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: dapr-artifacts
|
||||
path: ~/artifacts/${{ env.GITHUB_SHA }}
|
||||
|
||||
- name: Display downloaded artifacts
|
||||
if: env.DAPR_CLI_VERSION == 'edge' || env.DAPR_RUNTIME_VERSION == 'edge'
|
||||
run: |
|
||||
ls -la $HOME/artifacts/$GITHUB_SHA/
|
||||
|
||||
- name: Rust setup
|
||||
run: rustup toolchain install stable --profile minimal
|
||||
|
||||
- name: Install Protoc
|
||||
uses: arduino/setup-protoc@v3
|
||||
with:
|
||||
version: "24.4"
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Dapr CLI ${{ env.DAPR_CLI_VERSION }}
|
||||
if: env.DAPR_CLI_VERSION != 'edge'
|
||||
run: wget -q ${{ env.DAPR_INSTALL_URL }} -O - | /bin/bash -s ${{ env.DAPR_CLI_VERSION }}
|
||||
|
||||
- name: Set up Dapr CLI (edge)
|
||||
if: env.DAPR_CLI_VERSION == 'edge'
|
||||
run: |
|
||||
sudo cp $HOME/artifacts/$GITHUB_SHA/dapr /usr/local/bin/dapr
|
||||
sudo chmod +x /usr/local/bin/dapr
|
||||
|
||||
- name: Initialize Dapr runtime ${{ env.DAPR_RUNTIME_VERSION }}
|
||||
if: env.DAPR_RUNTIME_VERSION != 'edge'
|
||||
run: |
|
||||
dapr uninstall --all
|
||||
dapr init --runtime-version ${{ env.DAPR_RUNTIME_VERSION }}
|
||||
|
||||
- name: Initialize Dapr runtime with installer bundle - EDGE
|
||||
if: env.DAPR_RUNTIME_VERSION == 'edge'
|
||||
run: |
|
||||
: # Unpack Bundle
|
||||
mkdir ~/daprbundle
|
||||
tar xvzf $HOME/artifacts/$GITHUB_SHA/daprbundle.tar.gz -C ~/daprbundle
|
||||
|
||||
: # Run installer
|
||||
dapr uninstall --all
|
||||
dapr init --from-dir ~/daprbundle/
|
||||
|
||||
: # Initialize redis and zipkin
|
||||
docker run --name "dapr_redis" --restart always -d -p 6379:6379 redislabs/rejson
|
||||
docker run --name "dapr_zipkin" --restart always -d -p 9411:9411 openzipkin/zipkin
|
||||
|
||||
- name: List running containers
|
||||
run: |
|
||||
docker ps -a
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VER }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VER }}
|
||||
|
||||
- name: Install Mechanical Markdown
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install mechanical-markdown
|
||||
|
||||
- name: Dapr version
|
||||
run: |
|
||||
dapr version
|
||||
docker ps -a
|
||||
|
||||
- name: Check Example
|
||||
run: |
|
||||
cd examples
|
||||
./validate.sh ${{ matrix.examples }}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"daprd": "9.0.0-dev",
|
||||
"dashboard": "9.0.0-dev",
|
||||
"cli": "9.0.0-dev",
|
||||
"daprBinarySubDir": "dist",
|
||||
"dockerImageSubDir": "docker",
|
||||
"daprImageName": "daprio/dapr:9.0.0-dev",
|
||||
"daprImageFileName": "daprio-dapr-9.0.0-dev.tar.gz"
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
|
@ -8,3 +8,13 @@ Cargo.lock
|
|||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
.vscode/settings.json
|
||||
|
||||
# Ignore logs from dapr runs
|
||||
.dapr/
|
||||
|
||||
# OSX cruft
|
||||
.DS_Store
|
||||
|
||||
# IDE configs
|
||||
/.idea/
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
# These owners are the maintainers and approvers of this repo
|
||||
* @dapr/maintainers-rust-sdk @dapr/approvers-rust-sdk
|
||||
|
||||
# IMPORTANT - This should be the last entry in the list.
|
||||
CODEOWNERS @dapr/maintainers-rust-sdk
|
|
@ -54,6 +54,7 @@ This section describes the guidelines for contributing code / docs to Dapr.
|
|||
All contributions come through pull requests. To submit a proposed change, we recommend following this workflow:
|
||||
|
||||
1. Make sure there's an issue (bug or proposal) raised, which sets the expectations for the contribution you are about to make.
|
||||
- Assign yourself to the issue by commenting with `/assign`
|
||||
1. Fork the relevant repo and create a new branch
|
||||
1. Create your change
|
||||
- Code changes require tests
|
||||
|
@ -107,17 +108,6 @@ git commit --amend --no-edit --signoff
|
|||
git push --force-with-lease <remote-name> <branch-name>
|
||||
```
|
||||
|
||||
### Use of Third-party code
|
||||
|
||||
- All third-party code must be placed in the `vendor/` folder.
|
||||
- `vendor/` folder is managed by Go modules and stores the source code of third-party Go dependencies. - The `vendor/` folder should not be modified manually.
|
||||
- Third-party code must include licenses.
|
||||
|
||||
A non-exclusive list of code that must be places in `vendor/`:
|
||||
|
||||
- Open source, free software, or commercially-licensed code.
|
||||
- Tools or libraries or protocols that are open source, free software, or commercially licensed.
|
||||
|
||||
**Thank You!** - Your contributions to open source, large or small, make projects like this possible. Thank you for taking the time to contribute.
|
||||
|
||||
### Fork and set upstream
|
||||
|
|
60
Cargo.toml
60
Cargo.toml
|
@ -1,36 +1,38 @@
|
|||
[package]
|
||||
name = "dapr"
|
||||
version = "0.9.0"
|
||||
authors = ["dapr.io"]
|
||||
edition = "2018"
|
||||
license-file = "LICENSE"
|
||||
repository = "https://github.com/dapr/rust-sdk"
|
||||
description = "Rust SDK for dapr"
|
||||
readme = "README.md"
|
||||
keywords = ["microservices", "dapr"]
|
||||
[workspace]
|
||||
members = [
|
||||
"dapr",
|
||||
"dapr-macros",
|
||||
"proto-gen",
|
||||
"examples",
|
||||
]
|
||||
exclude = [
|
||||
".github/workflows/dapr-bot" # Bot used in GitHub workflow
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
|
||||
[dependencies]
|
||||
tonic = "0.5"
|
||||
prost = "0.8"
|
||||
bytes = "1"
|
||||
prost-types = "0.8"
|
||||
[workspace.dependencies]
|
||||
async-trait = "0.1"
|
||||
prost = "0.13.4"
|
||||
prost-types = "0.13.4"
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.5"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio = "1.39"
|
||||
tokio-stream = "0.1"
|
||||
tokio-util = "0.7"
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
path = "examples/client/client.rs"
|
||||
tonic = "0.12.3"
|
||||
tonic-build = "0.12.3"
|
||||
|
||||
[[example]]
|
||||
name = "publisher"
|
||||
path = "examples/pubsub/publisher.rs"
|
||||
[workspace.package]
|
||||
version = "0.16.0"
|
||||
authors = [
|
||||
"Mike Nguyen <hey@mike.ee>",
|
||||
"The Dapr Authors <dapr@dapr.io>"
|
||||
]
|
||||
edition = "2021"
|
||||
license = "Apache-2.0"
|
||||
repository = "https://github.com/dapr/rust-sdk"
|
||||
|
||||
[[example]]
|
||||
name = "subscriber"
|
||||
path = "examples/pubsub/subscriber.rs"
|
||||
rust-version = "1.78.0" # MSRV
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
.PHONY: proto-gen
|
||||
proto-gen:
|
||||
cargo run --bin proto-gen
|
||||
|
||||
.PHONY: check-diff-proto
|
||||
check-diff-proto:
|
||||
git diff --exit-code ./proto/
|
86
README.md
86
README.md
|
@ -1,39 +1,67 @@
|
|||
# Dapr SDK for Rust
|
||||
# Dapr SDK for Rust (Alpha)
|
||||
|
||||
[![Crates.io][crates-badge]][crates-url]
|
||||
[![Build Status][actions-badge]][actions-url]
|
||||
[![discord][discord-badge]][discord-url]
|
||||
[![License: Apache 2.0][apache-badge]][apache-url]
|
||||
[![FOSSA Status][fossa-badge]][fossa-url]
|
||||
|
||||
[crates-badge]: https://img.shields.io/crates/v/dapr.svg
|
||||
[crates-url]: https://crates.io/crates/dapr
|
||||
[apache-badge]: https://img.shields.io/badge/License-Apache_2.0-blue.svg
|
||||
[apache-url]: https://github.com/dapr/rust-sdk/blob/master/LICENSE
|
||||
[actions-badge]: https://github.com/dapr/rust-sdk/workflows/dapr-rust-sdk/badge.svg
|
||||
[actions-url]: https://github.com/dapr/rust-sdk/actions?query=workflow%3Adapr-rust-sdk
|
||||
|
||||
⚠ Work in Progress ⚠
|
||||
[actions-url]: https://github.com/dapr/rust-sdk/actions?query=workflow%3Adapr-rust-sdk+branch%3Amain
|
||||
[fossa-badge]: https://app.fossa.com/api/projects/custom%2B162%2Fgithub.com%2Fdapr%2Frust-sdk.svg?type=shield
|
||||
[fossa-url]: https://app.fossa.com/projects/custom%2B162%2Fgithub.com%2Fdapr%2Frust-sdk?ref=badge_shield
|
||||
[discord-badge]: https://img.shields.io/discord/778680217417809931
|
||||
[discord-url]: https://discord.com/channels/778680217417809931/778680217417809934
|
||||
|
||||
Dapr is a portable, event-driven, serverless runtime for building distributed applications across cloud and edge.
|
||||
|
||||
- [dapr.io](https://dapr.io)
|
||||
- [@DaprDev](https://twitter.com/DaprDev)
|
||||
|
||||
## Prerequsites
|
||||
## Alpha
|
||||
|
||||
* [Install Rust > 1.40](https://www.rust-lang.org/tools/install)
|
||||
This SDK is currently in Alpha. Work is underway to bring forward a stable
|
||||
release and will likely involve breaking changes.
|
||||
- Documentation is incomplete.
|
||||
- Not all building blocks are currently implemented.
|
||||
- There may be bugs.
|
||||
- The SDK does not have complete test coverage.
|
||||
|
||||
## Usage
|
||||
The maintainers commit to resolving any issues that arise and bringing this SDK
|
||||
to a stable release. With this in mind, the SDK will follow the norms and
|
||||
conventions of a stable SDK so far as is possible.
|
||||
|
||||
This SDK will be accounted for as a part of the release process. Support for
|
||||
the latest runtime release is targeted but not guaranteed.
|
||||
|
||||
The main tenet of development will be stability and functionality that improves
|
||||
resiliency.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Ensure you have Rust version 1.78 or higher installed. If not, install Rust [here](https://www.rust-lang.org/tools/install).
|
||||
|
||||
These crates no longer require protoc unless to recompile the protobuf files.
|
||||
|
||||
## How to use
|
||||
|
||||
Add the following to your `Cargo.toml` file:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
dapr = "0.5.0"
|
||||
dapr = "0.16.0"
|
||||
```
|
||||
|
||||
A client can be created as follows:
|
||||
Here's a basic example to create a client:
|
||||
|
||||
```rust
|
||||
```Rust
|
||||
use dapr;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Get the Dapr port and create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
|
@ -41,27 +69,45 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
// Create the client
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
}
|
||||
```
|
||||
|
||||
## Try out examples
|
||||
## Explore more examples
|
||||
|
||||
[Examples](./examples)
|
||||
Browse through more examples to understand the SDK better: [View examples](https://github.com/dapr/rust-sdk/tree/main/examples)
|
||||
|
||||
## Building
|
||||
|
||||
To build
|
||||
To build the SDK run:
|
||||
|
||||
```bash
|
||||
cargo build
|
||||
```
|
||||
|
||||
>Note: The proto buf client generation is built into `cargo build` process so updating the proto files under `dapr/` is enough to update the proto buf client.
|
||||
## Developing (Updating .proto files from upstream Dapr)
|
||||
|
||||
## To refresh .proto files from upstream dapr
|
||||
|
||||
1. Just need to run update-protos.sh, which will basically fetch latest proto updates.
|
||||
2. By default, it picks from master proto. To specify a particular release/version, please specify with a -v flag
|
||||
To fetch the latest .proto files from Dapr execute the script `update-protos.sh`:
|
||||
|
||||
```bash
|
||||
./update-protos.sh -v v1.7.0-rc.2
|
||||
```
|
||||
./update-protos.sh
|
||||
```
|
||||
|
||||
By default, the script fetches the latest proto updates from the master branch of the Dapr repository. If you need to choose a specific release or version, use the -v flag:
|
||||
|
||||
```bash
|
||||
./update-protos.sh -v v1.14.0
|
||||
```
|
||||
|
||||
You will also need to install [protoc](https://github.com/protocolbuffers/protobuf#protobuf-compiler-installation).
|
||||
|
||||
Protos can then be compiled using:
|
||||
|
||||
```bash
|
||||
cargo run proto-gen
|
||||
```
|
||||
|
||||
### Contact Us
|
||||
Reach out with any questions you may have and we'll be sure to answer them as
|
||||
soon as possible!
|
||||
|
||||
[](https://aka.ms/dapr-discord)
|
||||
|
|
14
build.rs
14
build.rs
|
@ -1,14 +0,0 @@
|
|||
// use std::env;
|
||||
|
||||
fn main() -> Result<(), std::io::Error> {
|
||||
// env::set_var("OUT_DIR", "src");
|
||||
tonic_build::configure().build_server(true).compile(
|
||||
&[
|
||||
"dapr/proto/common/v1/common.proto",
|
||||
"dapr/proto/runtime/v1/dapr.proto",
|
||||
"dapr/proto/runtime/v1/appcallback.proto",
|
||||
],
|
||||
&["."],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
target
|
|
@ -0,0 +1,17 @@
|
|||
[package]
|
||||
name = "dapr-macros"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Dapr Rust SDK (Macros)"
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
syn = { version = "2.0", features = ["full"] }
|
|
@ -0,0 +1,238 @@
|
|||
use std::iter;
|
||||
|
||||
use proc_macro2::TokenTree;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::parse::{Parse, ParseStream};
|
||||
use syn::{parse_macro_input, Ident, LitStr};
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
macro_rules! derive_parse {(
|
||||
@derive_only
|
||||
$( #[$attr:meta] )*
|
||||
$pub:vis
|
||||
struct $StructName:ident {
|
||||
$(
|
||||
$( #[$field_attr:meta] )*
|
||||
$field_pub:vis
|
||||
$field_name:ident : $FieldTy:ty
|
||||
),* $(,)?
|
||||
}
|
||||
) => (
|
||||
impl Parse for $StructName {
|
||||
fn parse (input: ParseStream)
|
||||
-> ::syn::Result<Self>
|
||||
{
|
||||
mod kw {
|
||||
$(
|
||||
::syn::custom_keyword!( $field_name );
|
||||
)*
|
||||
}
|
||||
use ::core::ops::Not as _;
|
||||
|
||||
$(
|
||||
let mut $field_name = ::core::option::Option::None::< $FieldTy >;
|
||||
)*
|
||||
while input.is_empty().not() {
|
||||
let lookahead = input.lookahead1();
|
||||
match () {
|
||||
$(
|
||||
_case if lookahead.peek(kw::$field_name) => {
|
||||
let span = input.parse::<kw::$field_name>().unwrap().span;
|
||||
let _: ::syn::Token![ = ] = input.parse()?;
|
||||
let prev = $field_name.replace(input.parse()?);
|
||||
if prev.is_some() {
|
||||
return ::syn::Result::Err(::syn::Error::new(span, "Duplicate key"));
|
||||
}
|
||||
},
|
||||
)*
|
||||
_default => return ::syn::Result::Err(lookahead.error()),
|
||||
}
|
||||
let _: ::core::option::Option<::syn::Token![ , ]> = input.parse()?;
|
||||
}
|
||||
Ok(Self {
|
||||
$(
|
||||
$field_name: $field_name.ok_or_else(|| ::syn::Error::new(
|
||||
::proc_macro2::Span::call_site(),
|
||||
::core::concat!("Missing key `", ::core::stringify!($field_name), "`"),
|
||||
))?,
|
||||
)*
|
||||
})
|
||||
}
|
||||
}
|
||||
); (
|
||||
$( #[$attr:meta] )* $pub:vis struct $($rest:tt)*
|
||||
) => (
|
||||
$( #[$attr] )* $pub struct $($rest)*
|
||||
|
||||
derive_parse! { @derive_only $( #[$attr] )* $pub struct $($rest)* }
|
||||
)}
|
||||
|
||||
derive_parse! {
|
||||
struct TopicArgs {
|
||||
pub_sub_name: LitStr,
|
||||
topic: LitStr
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn actor(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let actor_struct_name = match syn::parse::<syn::ItemStruct>(item.clone()) {
|
||||
Ok(actor_struct) => actor_struct.ident.clone(),
|
||||
Err(_) => match syn::parse::<syn::ItemType>(item.clone()) {
|
||||
Ok(ty) => ty.ident.clone(),
|
||||
Err(e) => panic!("Error parsing actor struct: {e}"),
|
||||
},
|
||||
};
|
||||
|
||||
let mut result = TokenStream::from(quote!(
|
||||
#[async_trait::async_trait]
|
||||
impl dapr::server::actor::axum::extract::FromRequestParts<dapr::server::actor::runtime::ActorState> for &#actor_struct_name {
|
||||
type Rejection = dapr::server::actor::ActorRejection;
|
||||
|
||||
async fn from_request_parts(
|
||||
parts: &mut dapr::server::actor::axum::http::request::Parts,
|
||||
state: &dapr::server::actor::runtime::ActorState,
|
||||
) -> Result<Self, Self::Rejection> {
|
||||
let path = match dapr::server::actor::axum::extract::Path::<dapr::server::actor::ActorPath>::from_request_parts(parts, state).await {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
log::error!("Error getting path: {}", e);
|
||||
return Err(dapr::server::actor::ActorRejection::Path(e));
|
||||
}
|
||||
};
|
||||
let actor_type = state.actor_type.clone();
|
||||
let actor_id = path.actor_id.clone();
|
||||
log::info!(
|
||||
"Request for actor_type: {}, actor_id: {}",
|
||||
actor_type,
|
||||
actor_id
|
||||
);
|
||||
let actor = match state
|
||||
.runtime
|
||||
.get_or_create_actor(&actor_type, &actor_id)
|
||||
.await
|
||||
{
|
||||
Ok(actor) => actor,
|
||||
Err(e) => {
|
||||
log::error!("Error getting actor: {}", e);
|
||||
return Err(dapr::server::actor::ActorRejection::ActorError(e.to_string()));
|
||||
}
|
||||
};
|
||||
let actor = actor.as_ref();
|
||||
let well_known_actor =
|
||||
unsafe { &*(actor as *const dyn dapr::server::actor::Actor as *const #actor_struct_name) };
|
||||
Ok(well_known_actor)
|
||||
}
|
||||
}
|
||||
));
|
||||
|
||||
result.extend(iter::once(item));
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn topic(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
let new_input = proc_macro2::TokenStream::from(input);
|
||||
let mut iter = new_input.clone().into_iter().filter(|i| match i {
|
||||
TokenTree::Group(_) => true,
|
||||
TokenTree::Ident(_) => true,
|
||||
TokenTree::Punct(_) => false,
|
||||
TokenTree::Literal(_) => false,
|
||||
});
|
||||
|
||||
let mut current = iter.next().unwrap();
|
||||
|
||||
while current.to_string() != "fn" {
|
||||
current = iter.next().unwrap()
|
||||
}
|
||||
|
||||
let name = iter.next().unwrap();
|
||||
|
||||
let struct_name = name
|
||||
.to_string()
|
||||
.split('_')
|
||||
.map(|i| {
|
||||
let mut chars: Vec<char> = i.chars().collect();
|
||||
chars[0] = chars[0].to_ascii_uppercase();
|
||||
let new_string: String = chars.into_iter().collect();
|
||||
new_string
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join("");
|
||||
|
||||
let name_ident = Ident::new(name.to_string().as_str(), name.span());
|
||||
|
||||
let struct_name_ident = Ident::new(struct_name.as_str(), name.span());
|
||||
|
||||
let vars: Vec<String> = iter
|
||||
.next()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.replace(['(', ')'], "")
|
||||
.split(':')
|
||||
.enumerate()
|
||||
.filter(|&(i, _)| i % 2 != 0)
|
||||
.map(|(_, i)| i.trim().to_string())
|
||||
.collect();
|
||||
|
||||
assert_eq!(vars.len(), 1, "Expected to only have one input variable");
|
||||
|
||||
let parse_statement = match vars[0] == *"String" {
|
||||
true => {
|
||||
quote! {
|
||||
let message = message.to_string();
|
||||
}
|
||||
}
|
||||
false => {
|
||||
let type_ident = format_ident!("{}", vars[0]);
|
||||
quote! {
|
||||
let message: #type_ident = dapr::serde_json::from_str(message.to_string().as_str()).unwrap();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let args = parse_macro_input!(args as TopicArgs);
|
||||
|
||||
let topic = args.topic.value();
|
||||
|
||||
let pub_sub_name = args.pub_sub_name.value();
|
||||
|
||||
let tokens = quote! {
|
||||
#new_input
|
||||
|
||||
#[derive(Default)]
|
||||
struct #struct_name_ident;
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl dapr::appcallback::HandlerMethod for #struct_name_ident {
|
||||
async fn handler(
|
||||
&self,
|
||||
request: TopicEventRequest,
|
||||
) -> Result<tonic::Response<TopicEventResponse>, tonic::Status> {
|
||||
let data = &request.data;
|
||||
let data_content_type = &request.data_content_type;
|
||||
|
||||
let message = String::from_utf8_lossy(&data);
|
||||
|
||||
#parse_statement
|
||||
|
||||
#name_ident(message).await;
|
||||
|
||||
Ok(tonic::Response::new(TopicEventResponse::default()))
|
||||
}
|
||||
}
|
||||
impl #struct_name_ident {
|
||||
pub fn get_handler(self) -> dapr::appcallback::Handler {
|
||||
dapr::appcallback::Handler {
|
||||
pub_sub_name: #pub_sub_name.to_string(),
|
||||
topic: #topic.to_string(),
|
||||
handler: Box::new(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
tokens.into()
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
[package]
|
||||
name = "dapr"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
description = "Rust SDK for dapr"
|
||||
readme = "README.md"
|
||||
keywords = ["microservices", "dapr"]
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
axum = "0.7"
|
||||
chrono = "0.4"
|
||||
futures = "0.3"
|
||||
log = "0.4"
|
||||
prost = { workspace = true }
|
||||
prost-types = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
tokio-util = { workspace = true, features = ["io"] }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
axum-test = "=16.4.0" # TODO: Remove problematic dep
|
||||
litemap = "=0.7.4" # TODO: Remove pinned - linked to axum_test
|
||||
zerofrom = "=0.1.5" # TODO: Remove pinned - linked to axum_test
|
||||
reserve-port = "=2.1.0" # TODO: Remove pinned - linked to axum_test
|
||||
idna_adapter = "=1.2.0"
|
||||
|
||||
once_cell = "1.19"
|
||||
dapr = { path = "./" }
|
||||
dapr-macros = { path = "../dapr-macros" }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
uuid = { version = "1.10", features = ["v4"] }
|
||||
tokio-stream = { workspace = true }
|
|
@ -0,0 +1 @@
|
|||
../README.md
|
|
@ -1,598 +0,0 @@
|
|||
/*
|
||||
Copyright 2021 The Dapr Authors
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package dapr.proto.runtime.v1;
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
import "dapr/proto/common/v1/common.proto";
|
||||
|
||||
option csharp_namespace = "Dapr.Client.Autogen.Grpc.v1";
|
||||
option java_outer_classname = "DaprProtos";
|
||||
option java_package = "io.dapr.v1";
|
||||
option go_package = "github.com/dapr/dapr/pkg/proto/runtime/v1;runtime";
|
||||
|
||||
// Dapr service provides APIs to user application to access Dapr building blocks.
|
||||
service Dapr {
|
||||
// Invokes a method on a remote Dapr app.
|
||||
rpc InvokeService(InvokeServiceRequest) returns (common.v1.InvokeResponse) {}
|
||||
|
||||
// Gets the state for a specific key.
|
||||
rpc GetState(GetStateRequest) returns (GetStateResponse) {}
|
||||
|
||||
// Gets a bulk of state items for a list of keys
|
||||
rpc GetBulkState(GetBulkStateRequest) returns (GetBulkStateResponse) {}
|
||||
|
||||
// Saves the state for a specific key.
|
||||
rpc SaveState(SaveStateRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Queries the state.
|
||||
rpc QueryStateAlpha1(QueryStateRequest) returns (QueryStateResponse) {}
|
||||
|
||||
// Deletes the state for a specific key.
|
||||
rpc DeleteState(DeleteStateRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Deletes a bulk of state items for a list of keys
|
||||
rpc DeleteBulkState(DeleteBulkStateRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Executes transactions for a specified store
|
||||
rpc ExecuteStateTransaction(ExecuteStateTransactionRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Publishes events to the specific topic.
|
||||
rpc PublishEvent(PublishEventRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Invokes binding data to specific output bindings
|
||||
rpc InvokeBinding(InvokeBindingRequest) returns (InvokeBindingResponse) {}
|
||||
|
||||
// Gets secrets from secret stores.
|
||||
rpc GetSecret(GetSecretRequest) returns (GetSecretResponse) {}
|
||||
|
||||
// Gets a bulk of secrets
|
||||
rpc GetBulkSecret(GetBulkSecretRequest) returns (GetBulkSecretResponse) {}
|
||||
|
||||
// Register an actor timer.
|
||||
rpc RegisterActorTimer(RegisterActorTimerRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Unregister an actor timer.
|
||||
rpc UnregisterActorTimer(UnregisterActorTimerRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Register an actor reminder.
|
||||
rpc RegisterActorReminder(RegisterActorReminderRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Unregister an actor reminder.
|
||||
rpc UnregisterActorReminder(UnregisterActorReminderRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Rename an actor reminder.
|
||||
rpc RenameActorReminder(RenameActorReminderRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Gets the state for a specific actor.
|
||||
rpc GetActorState(GetActorStateRequest) returns (GetActorStateResponse) {}
|
||||
|
||||
// Executes state transactions for a specified actor
|
||||
rpc ExecuteActorStateTransaction(ExecuteActorStateTransactionRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// InvokeActor calls a method on an actor.
|
||||
rpc InvokeActor (InvokeActorRequest) returns (InvokeActorResponse) {}
|
||||
|
||||
// GetConfiguration gets configuration from configuration store.
|
||||
rpc GetConfigurationAlpha1(GetConfigurationRequest) returns (GetConfigurationResponse) {}
|
||||
|
||||
// SubscribeConfiguration gets configuration from configuration store and subscribe the updates event by grpc stream
|
||||
rpc SubscribeConfigurationAlpha1(SubscribeConfigurationRequest) returns (stream SubscribeConfigurationResponse) {}
|
||||
|
||||
// UnSubscribeConfiguration unsubscribe the subscription of configuration
|
||||
rpc UnsubscribeConfigurationAlpha1(UnsubscribeConfigurationRequest) returns (UnsubscribeConfigurationResponse) {}
|
||||
|
||||
// TryLockAlpha1 tries to get a lock with an expiry.
|
||||
rpc TryLockAlpha1(TryLockRequest)returns (TryLockResponse) {}
|
||||
|
||||
// UnlockAlpha1 unlocks a lock.
|
||||
rpc UnlockAlpha1(UnlockRequest)returns (UnlockResponse) {}
|
||||
|
||||
// Gets metadata of the sidecar
|
||||
rpc GetMetadata (google.protobuf.Empty) returns (GetMetadataResponse) {}
|
||||
|
||||
// Sets value in extended metadata of the sidecar
|
||||
rpc SetMetadata (SetMetadataRequest) returns (google.protobuf.Empty) {}
|
||||
|
||||
// Shutdown the sidecar
|
||||
rpc Shutdown (google.protobuf.Empty) returns (google.protobuf.Empty) {}
|
||||
}
|
||||
|
||||
// InvokeServiceRequest represents the request message for Service invocation.
|
||||
message InvokeServiceRequest {
|
||||
// Required. Callee's app id.
|
||||
string id = 1;
|
||||
|
||||
// Required. message which will be delivered to callee.
|
||||
common.v1.InvokeRequest message = 3;
|
||||
}
|
||||
|
||||
// GetStateRequest is the message to get key-value states from specific state store.
|
||||
message GetStateRequest {
|
||||
// The name of state store.
|
||||
string store_name = 1;
|
||||
|
||||
// The key of the desired state
|
||||
string key = 2;
|
||||
|
||||
// The read consistency of the state store.
|
||||
common.v1.StateOptions.StateConsistency consistency = 3;
|
||||
|
||||
// The metadata which will be sent to state store components.
|
||||
map<string, string> metadata = 4;
|
||||
}
|
||||
|
||||
// GetBulkStateRequest is the message to get a list of key-value states from specific state store.
|
||||
message GetBulkStateRequest {
|
||||
// The name of state store.
|
||||
string store_name = 1;
|
||||
|
||||
// The keys to get.
|
||||
repeated string keys = 2;
|
||||
|
||||
// The number of parallel operations executed on the state store for a get operation.
|
||||
int32 parallelism = 3;
|
||||
|
||||
// The metadata which will be sent to state store components.
|
||||
map<string, string> metadata = 4;
|
||||
}
|
||||
|
||||
// GetBulkStateResponse is the response conveying the list of state values.
|
||||
message GetBulkStateResponse {
|
||||
// The list of items containing the keys to get values for.
|
||||
repeated BulkStateItem items = 1;
|
||||
}
|
||||
|
||||
// BulkStateItem is the response item for a bulk get operation.
|
||||
// Return values include the item key, data and etag.
|
||||
message BulkStateItem {
|
||||
// state item key
|
||||
string key = 1;
|
||||
|
||||
// The byte array data
|
||||
bytes data = 2;
|
||||
|
||||
// The entity tag which represents the specific version of data.
|
||||
// ETag format is defined by the corresponding data store.
|
||||
string etag = 3;
|
||||
|
||||
// The error that was returned from the state store in case of a failed get operation.
|
||||
string error = 4;
|
||||
|
||||
// The metadata which will be sent to app.
|
||||
map<string, string> metadata = 5;
|
||||
}
|
||||
|
||||
// GetStateResponse is the response conveying the state value and etag.
|
||||
message GetStateResponse {
|
||||
// The byte array data
|
||||
bytes data = 1;
|
||||
|
||||
// The entity tag which represents the specific version of data.
|
||||
// ETag format is defined by the corresponding data store.
|
||||
string etag = 2;
|
||||
|
||||
// The metadata which will be sent to app.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
// DeleteStateRequest is the message to delete key-value states in the specific state store.
|
||||
message DeleteStateRequest {
|
||||
// The name of state store.
|
||||
string store_name = 1;
|
||||
|
||||
// The key of the desired state
|
||||
string key = 2;
|
||||
|
||||
// The entity tag which represents the specific version of data.
|
||||
// The exact ETag format is defined by the corresponding data store.
|
||||
common.v1.Etag etag = 3;
|
||||
|
||||
// State operation options which includes concurrency/
|
||||
// consistency/retry_policy.
|
||||
common.v1.StateOptions options = 4;
|
||||
|
||||
// The metadata which will be sent to state store components.
|
||||
map<string, string> metadata = 5;
|
||||
}
|
||||
|
||||
// DeleteBulkStateRequest is the message to delete a list of key-value states from specific state store.
|
||||
message DeleteBulkStateRequest {
|
||||
// The name of state store.
|
||||
string store_name = 1;
|
||||
|
||||
// The array of the state key values.
|
||||
repeated common.v1.StateItem states = 2;
|
||||
}
|
||||
|
||||
// SaveStateRequest is the message to save multiple states into state store.
|
||||
message SaveStateRequest {
|
||||
// The name of state store.
|
||||
string store_name = 1;
|
||||
|
||||
// The array of the state key values.
|
||||
repeated common.v1.StateItem states = 2;
|
||||
}
|
||||
|
||||
// QueryStateRequest is the message to query state store.
|
||||
message QueryStateRequest {
|
||||
// The name of state store.
|
||||
string store_name = 1;
|
||||
|
||||
// The query in JSON format.
|
||||
string query = 2;
|
||||
|
||||
// The metadata which will be sent to state store components.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
message QueryStateItem {
|
||||
// The object key.
|
||||
string key = 1;
|
||||
|
||||
// The object value.
|
||||
bytes data = 2;
|
||||
|
||||
// The entity tag which represents the specific version of data.
|
||||
// ETag format is defined by the corresponding data store.
|
||||
string etag = 3;
|
||||
|
||||
// The error message indicating an error in processing of the query result.
|
||||
string error = 4;
|
||||
}
|
||||
|
||||
// QueryStateResponse is the response conveying the query results.
|
||||
message QueryStateResponse {
|
||||
// An array of query results.
|
||||
repeated QueryStateItem results = 1;
|
||||
|
||||
// Pagination token.
|
||||
string token = 2;
|
||||
|
||||
// The metadata which will be sent to app.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
// PublishEventRequest is the message to publish event data to pubsub topic
|
||||
message PublishEventRequest {
|
||||
// The name of the pubsub component
|
||||
string pubsub_name = 1;
|
||||
|
||||
// The pubsub topic
|
||||
string topic = 2;
|
||||
|
||||
// The data which will be published to topic.
|
||||
bytes data = 3;
|
||||
|
||||
// The content type for the data (optional).
|
||||
string data_content_type = 4;
|
||||
|
||||
// The metadata passing to pub components
|
||||
//
|
||||
// metadata property:
|
||||
// - key : the key of the message.
|
||||
map<string, string> metadata = 5;
|
||||
}
|
||||
|
||||
// InvokeBindingRequest is the message to send data to output bindings
|
||||
message InvokeBindingRequest {
|
||||
// The name of the output binding to invoke.
|
||||
string name = 1;
|
||||
|
||||
// The data which will be sent to output binding.
|
||||
bytes data = 2;
|
||||
|
||||
// The metadata passing to output binding components
|
||||
//
|
||||
// Common metadata property:
|
||||
// - ttlInSeconds : the time to live in seconds for the message.
|
||||
// If set in the binding definition will cause all messages to
|
||||
// have a default time to live. The message ttl overrides any value
|
||||
// in the binding definition.
|
||||
map<string, string> metadata = 3;
|
||||
|
||||
// The name of the operation type for the binding to invoke
|
||||
string operation = 4;
|
||||
}
|
||||
|
||||
// InvokeBindingResponse is the message returned from an output binding invocation
|
||||
message InvokeBindingResponse {
|
||||
// The data which will be sent to output binding.
|
||||
bytes data = 1;
|
||||
|
||||
// The metadata returned from an external system
|
||||
map<string, string> metadata = 2;
|
||||
}
|
||||
|
||||
// GetSecretRequest is the message to get secret from secret store.
|
||||
message GetSecretRequest {
|
||||
// The name of secret store.
|
||||
string store_name = 1;
|
||||
|
||||
// The name of secret key.
|
||||
string key = 2;
|
||||
|
||||
// The metadata which will be sent to secret store components.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
// GetSecretResponse is the response message to convey the requested secret.
|
||||
message GetSecretResponse {
|
||||
// data is the secret value. Some secret store, such as kubernetes secret
|
||||
// store, can save multiple secrets for single secret key.
|
||||
map<string, string> data = 1;
|
||||
}
|
||||
|
||||
// GetBulkSecretRequest is the message to get the secrets from secret store.
|
||||
message GetBulkSecretRequest {
|
||||
// The name of secret store.
|
||||
string store_name = 1;
|
||||
|
||||
// The metadata which will be sent to secret store components.
|
||||
map<string, string> metadata = 2;
|
||||
}
|
||||
|
||||
// SecretResponse is a map of decrypted string/string values
|
||||
message SecretResponse {
|
||||
map<string, string> secrets = 1;
|
||||
}
|
||||
|
||||
// GetBulkSecretResponse is the response message to convey the requested secrets.
|
||||
message GetBulkSecretResponse {
|
||||
// data hold the secret values. Some secret store, such as kubernetes secret
|
||||
// store, can save multiple secrets for single secret key.
|
||||
map<string, SecretResponse> data = 1;
|
||||
}
|
||||
|
||||
// TransactionalStateOperation is the message to execute a specified operation with a key-value pair.
|
||||
message TransactionalStateOperation {
|
||||
// The type of operation to be executed
|
||||
string operationType = 1;
|
||||
|
||||
// State values to be operated on
|
||||
common.v1.StateItem request = 2;
|
||||
}
|
||||
|
||||
// ExecuteStateTransactionRequest is the message to execute multiple operations on a specified store.
|
||||
message ExecuteStateTransactionRequest {
|
||||
// Required. name of state store.
|
||||
string storeName = 1;
|
||||
|
||||
// Required. transactional operation list.
|
||||
repeated TransactionalStateOperation operations = 2;
|
||||
|
||||
// The metadata used for transactional operations.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
// RegisterActorTimerRequest is the message to register a timer for an actor of a given type and id.
|
||||
message RegisterActorTimerRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string name = 3;
|
||||
string due_time = 4;
|
||||
string period = 5;
|
||||
string callback = 6;
|
||||
bytes data = 7;
|
||||
string ttl = 8;
|
||||
}
|
||||
|
||||
// UnregisterActorTimerRequest is the message to unregister an actor timer
|
||||
message UnregisterActorTimerRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string name = 3;
|
||||
}
|
||||
|
||||
// RegisterActorReminderRequest is the message to register a reminder for an actor of a given type and id.
|
||||
message RegisterActorReminderRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string name = 3;
|
||||
string due_time = 4;
|
||||
string period = 5;
|
||||
bytes data = 6;
|
||||
string ttl = 7;
|
||||
}
|
||||
|
||||
// UnregisterActorReminderRequest is the message to unregister an actor reminder.
|
||||
message UnregisterActorReminderRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string name = 3;
|
||||
}
|
||||
|
||||
// RenameActorReminderRequest is the message to rename an actor reminder.
|
||||
message RenameActorReminderRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string old_name = 3;
|
||||
string new_name = 4;
|
||||
}
|
||||
|
||||
// GetActorStateRequest is the message to get key-value states from specific actor.
|
||||
message GetActorStateRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string key = 3;
|
||||
}
|
||||
|
||||
// GetActorStateResponse is the response conveying the actor's state value.
|
||||
message GetActorStateResponse {
|
||||
bytes data = 1;
|
||||
}
|
||||
|
||||
// ExecuteActorStateTransactionRequest is the message to execute multiple operations on a specified actor.
|
||||
message ExecuteActorStateTransactionRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
repeated TransactionalActorStateOperation operations = 3;
|
||||
}
|
||||
|
||||
// TransactionalActorStateOperation is the message to execute a specified operation with a key-value pair.
|
||||
message TransactionalActorStateOperation {
|
||||
string operationType = 1;
|
||||
string key = 2;
|
||||
google.protobuf.Any value = 3;
|
||||
}
|
||||
|
||||
// InvokeActorRequest is the message to call an actor.
|
||||
message InvokeActorRequest {
|
||||
string actor_type = 1;
|
||||
string actor_id = 2;
|
||||
string method = 3;
|
||||
bytes data = 4;
|
||||
}
|
||||
|
||||
// InvokeActorResponse is the method that returns an actor invocation response.
|
||||
message InvokeActorResponse {
|
||||
bytes data = 1;
|
||||
}
|
||||
|
||||
// GetMetadataResponse is a message that is returned on GetMetadata rpc call
|
||||
message GetMetadataResponse {
|
||||
string id = 1;
|
||||
repeated ActiveActorsCount active_actors_count = 2;
|
||||
repeated RegisteredComponents registered_components = 3;
|
||||
map<string, string> extended_metadata = 4;
|
||||
}
|
||||
|
||||
message ActiveActorsCount {
|
||||
string type = 1;
|
||||
int32 count = 2;
|
||||
}
|
||||
|
||||
message RegisteredComponents {
|
||||
string name = 1;
|
||||
string type = 2;
|
||||
string version = 3;
|
||||
repeated string capabilities = 4;
|
||||
}
|
||||
|
||||
message SetMetadataRequest {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
}
|
||||
|
||||
// GetConfigurationRequest is the message to get a list of key-value configuration from specified configuration store.
|
||||
message GetConfigurationRequest {
|
||||
// Required. The name of configuration store.
|
||||
string store_name = 1;
|
||||
|
||||
// Optional. The key of the configuration item to fetch.
|
||||
// If set, only query for the specified configuration items.
|
||||
// Empty list means fetch all.
|
||||
repeated string keys = 2;
|
||||
|
||||
// Optional. The metadata which will be sent to configuration store components.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
// GetConfigurationResponse is the response conveying the list of configuration values.
|
||||
// It should be the FULL configuration of specified application which contains all of its configuration items.
|
||||
message GetConfigurationResponse {
|
||||
repeated common.v1.ConfigurationItem items = 1;
|
||||
}
|
||||
|
||||
// SubscribeConfigurationRequest is the message to get a list of key-value configuration from specified configuration store.
|
||||
message SubscribeConfigurationRequest {
|
||||
// The name of configuration store.
|
||||
string store_name = 1;
|
||||
|
||||
// Optional. The key of the configuration item to fetch.
|
||||
// If set, only query for the specified configuration items.
|
||||
// Empty list means fetch all.
|
||||
repeated string keys = 2;
|
||||
|
||||
// The metadata which will be sent to configuration store components.
|
||||
map<string, string> metadata = 3;
|
||||
}
|
||||
|
||||
// UnSubscribeConfigurationRequest is the message to stop watching the key-value configuration.
|
||||
message UnsubscribeConfigurationRequest {
|
||||
// The name of configuration store.
|
||||
string store_name = 1;
|
||||
|
||||
// The id to unsubscribe.
|
||||
string id = 2;
|
||||
}
|
||||
|
||||
message SubscribeConfigurationResponse {
|
||||
// Subscribe id, used to stop subscription.
|
||||
string id = 1;
|
||||
|
||||
// The list of items containing configuration values
|
||||
repeated common.v1.ConfigurationItem items = 2;
|
||||
}
|
||||
|
||||
message UnsubscribeConfigurationResponse {
|
||||
bool ok = 1;
|
||||
string message = 2;
|
||||
}
|
||||
|
||||
message TryLockRequest {
|
||||
// Required. The lock store name,e.g. `redis`.
|
||||
string store_name = 1;
|
||||
|
||||
// Required. resource_id is the lock key. e.g. `order_id_111`
|
||||
// It stands for "which resource I want to protect"
|
||||
string resource_id = 2;
|
||||
|
||||
// Required. lock_owner indicate the identifier of lock owner.
|
||||
// You can generate a uuid as lock_owner.For example,in golang:
|
||||
//
|
||||
// req.LockOwner = uuid.New().String()
|
||||
//
|
||||
// This field is per request,not per process,so it is different for each request,
|
||||
// which aims to prevent multi-thread in the same process trying the same lock concurrently.
|
||||
//
|
||||
// The reason why we don't make it automatically generated is:
|
||||
// 1. If it is automatically generated,there must be a 'my_lock_owner_id' field in the response.
|
||||
// This name is so weird that we think it is inappropriate to put it into the api spec
|
||||
// 2. If we change the field 'my_lock_owner_id' in the response to 'lock_owner',which means the current lock owner of this lock,
|
||||
// we find that in some lock services users can't get the current lock owner.Actually users don't need it at all.
|
||||
// 3. When reentrant lock is needed,the existing lock_owner is required to identify client and check "whether this client can reenter this lock".
|
||||
// So this field in the request shouldn't be removed.
|
||||
string lock_owner = 3;
|
||||
|
||||
// Required. The time before expiry.The time unit is second.
|
||||
int32 expiryInSeconds = 4;
|
||||
}
|
||||
|
||||
|
||||
message TryLockResponse {
|
||||
|
||||
bool success = 1;
|
||||
}
|
||||
|
||||
message UnlockRequest {
|
||||
string store_name = 1;
|
||||
// resource_id is the lock key.
|
||||
string resource_id = 2;
|
||||
|
||||
string lock_owner = 3;
|
||||
}
|
||||
|
||||
message UnlockResponse {
|
||||
enum Status {
|
||||
SUCCESS = 0;
|
||||
LOCK_UNEXIST = 1;
|
||||
LOCK_BELONG_TO_OTHERS = 2;
|
||||
INTERNAL_ERROR = 3;
|
||||
}
|
||||
|
||||
Status status = 1;
|
||||
}
|
|
@ -0,0 +1,168 @@
|
|||
use crate::dapr::proto::runtime::v1::app_callback_server::AppCallback;
|
||||
use crate::dapr::proto::{common, runtime};
|
||||
use std::collections::HashMap;
|
||||
use tonic::{Code, Request, Response, Status};
|
||||
|
||||
/// InvokeRequest is the message to invoke a method with the data.
|
||||
pub type InvokeRequest = common::v1::InvokeRequest;
|
||||
|
||||
/// InvokeResponse is the response message inclduing data and its content type
|
||||
/// from app callback.
|
||||
pub type InvokeResponse = common::v1::InvokeResponse;
|
||||
|
||||
/// ListTopicSubscriptionsResponse is the message including the list of the subscribing topics.
|
||||
pub type ListTopicSubscriptionsResponse = runtime::v1::ListTopicSubscriptionsResponse;
|
||||
|
||||
/// TopicSubscription represents a topic and it's metadata (session id etc.)
|
||||
pub type TopicSubscription = runtime::v1::TopicSubscription;
|
||||
|
||||
/// TopicEventRequest message is compatiable with CloudEvent spec v1.0.
|
||||
pub type TopicEventRequest = runtime::v1::TopicEventRequest;
|
||||
|
||||
/// TopicEventResponse is response from app on published message
|
||||
pub type TopicEventResponse = runtime::v1::TopicEventResponse;
|
||||
|
||||
/// ListInputBindingsResponse is the message including the list of input bindings.
|
||||
pub type ListInputBindingsResponse = runtime::v1::ListInputBindingsResponse;
|
||||
|
||||
/// BindingEventRequest represents input bindings event.
|
||||
pub type BindingEventRequest = runtime::v1::BindingEventRequest;
|
||||
|
||||
/// BindingEventResponse includes operations to save state or
|
||||
/// send data to output bindings optionally.
|
||||
pub type BindingEventResponse = runtime::v1::BindingEventResponse;
|
||||
|
||||
impl ListTopicSubscriptionsResponse {
|
||||
/// Create `ListTopicSubscriptionsResponse` with a topic.
|
||||
pub fn topic(pubsub_name: String, topic: String) -> Self {
|
||||
let topic_subscription = TopicSubscription::new(pubsub_name, topic, None);
|
||||
|
||||
Self {
|
||||
subscriptions: vec![topic_subscription],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TopicSubscription {
|
||||
/// Create a new `TopicSubscription` for a give topic.
|
||||
pub fn new(
|
||||
pubsub_name: String,
|
||||
topic: String,
|
||||
metadata: Option<HashMap<String, String>>,
|
||||
) -> Self {
|
||||
let mut topic_subscription = TopicSubscription {
|
||||
pubsub_name,
|
||||
topic,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Some(metadata) = metadata {
|
||||
topic_subscription.metadata = metadata;
|
||||
}
|
||||
|
||||
topic_subscription
|
||||
}
|
||||
}
|
||||
|
||||
impl ListInputBindingsResponse {
|
||||
pub fn binding(binding_name: String) -> Self {
|
||||
Self {
|
||||
bindings: vec![binding_name],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AppCallbackService {
|
||||
handlers: Vec<Handler>,
|
||||
}
|
||||
|
||||
pub struct Handler {
|
||||
pub pub_sub_name: String,
|
||||
pub topic: String,
|
||||
pub handler: Box<dyn HandlerMethod>,
|
||||
}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl AppCallback for AppCallbackService {
|
||||
async fn on_invoke(
|
||||
&self,
|
||||
_request: Request<common::v1::InvokeRequest>,
|
||||
) -> Result<Response<common::v1::InvokeResponse>, Status> {
|
||||
Ok(Response::new(InvokeResponse::default()))
|
||||
}
|
||||
|
||||
async fn list_topic_subscriptions(
|
||||
&self,
|
||||
_request: Request<()>,
|
||||
) -> Result<Response<runtime::v1::ListTopicSubscriptionsResponse>, Status> {
|
||||
let topics = self
|
||||
.handlers
|
||||
.iter()
|
||||
.fold(Vec::new(), |mut topics, handler| {
|
||||
topics.push(TopicSubscription::new(
|
||||
handler.pub_sub_name.clone(),
|
||||
handler.topic.clone(),
|
||||
None,
|
||||
));
|
||||
topics
|
||||
});
|
||||
Ok(Response::new(ListTopicSubscriptionsResponse {
|
||||
subscriptions: topics,
|
||||
}))
|
||||
}
|
||||
|
||||
async fn on_topic_event(
|
||||
&self,
|
||||
request: Request<runtime::v1::TopicEventRequest>,
|
||||
) -> Result<Response<runtime::v1::TopicEventResponse>, Status> {
|
||||
let request_inner = request.into_inner();
|
||||
let pub_sub_name = request_inner.pubsub_name.clone();
|
||||
let topic_name = request_inner.topic.clone();
|
||||
let handler = self
|
||||
.handlers
|
||||
.iter()
|
||||
.find(|x| x.pub_sub_name == pub_sub_name && x.topic == topic_name);
|
||||
if let Some(handler) = handler {
|
||||
return handler.handler.handler(request_inner).await;
|
||||
}
|
||||
Err(Status::new(Code::Internal, "Handler Not Found"))
|
||||
}
|
||||
|
||||
async fn list_input_bindings(
|
||||
&self,
|
||||
_request: Request<()>,
|
||||
) -> Result<Response<runtime::v1::ListInputBindingsResponse>, Status> {
|
||||
Ok(Response::new(ListInputBindingsResponse::default()))
|
||||
}
|
||||
|
||||
async fn on_binding_event(
|
||||
&self,
|
||||
_request: Request<runtime::v1::BindingEventRequest>,
|
||||
) -> Result<Response<runtime::v1::BindingEventResponse>, Status> {
|
||||
Ok(Response::new(BindingEventResponse::default()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AppCallbackService {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AppCallbackService {
|
||||
pub fn new() -> AppCallbackService {
|
||||
AppCallbackService { handlers: vec![] }
|
||||
}
|
||||
|
||||
pub fn add_handler(&mut self, handler: Handler) {
|
||||
self.handlers.push(handler)
|
||||
}
|
||||
}
|
||||
|
||||
#[tonic::async_trait]
|
||||
pub trait HandlerMethod: Send + Sync + 'static {
|
||||
async fn handler(
|
||||
&self,
|
||||
request: runtime::v1::TopicEventRequest,
|
||||
) -> Result<Response<runtime::v1::TopicEventResponse>, Status>;
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,3 @@
|
|||
# dapr
|
||||
|
||||
These are compiled by running the proto-gen crate
|
|
@ -0,0 +1,272 @@
|
|||
// This file is @generated by prost-build.
|
||||
/// HTTPExtension includes HTTP verb and querystring
|
||||
/// when Dapr runtime delivers HTTP content.
|
||||
///
|
||||
/// For example, when callers calls http invoke api
|
||||
/// `POST <http://localhost:3500/v1.0/invoke/<app_id>/method/<method>?query1=value1&query2=value2`>
|
||||
///
|
||||
/// Dapr runtime will parse POST as a verb and extract querystring to quersytring map.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HttpExtension {
|
||||
/// Required. HTTP verb.
|
||||
#[prost(enumeration = "http_extension::Verb", tag = "1")]
|
||||
pub verb: i32,
|
||||
/// Optional. querystring represents an encoded HTTP url query string in the following format: name=value&name2=value2
|
||||
#[prost(string, tag = "2")]
|
||||
pub querystring: ::prost::alloc::string::String,
|
||||
}
|
||||
/// Nested message and enum types in `HTTPExtension`.
|
||||
pub mod http_extension {
|
||||
/// Type of HTTP 1.1 Methods
|
||||
/// RFC 7231: <https://tools.ietf.org/html/rfc7231#page-24>
|
||||
/// RFC 5789: <https://datatracker.ietf.org/doc/html/rfc5789>
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
::prost::Enumeration
|
||||
)]
|
||||
#[repr(i32)]
|
||||
pub enum Verb {
|
||||
None = 0,
|
||||
Get = 1,
|
||||
Head = 2,
|
||||
Post = 3,
|
||||
Put = 4,
|
||||
Delete = 5,
|
||||
Connect = 6,
|
||||
Options = 7,
|
||||
Trace = 8,
|
||||
Patch = 9,
|
||||
}
|
||||
impl Verb {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::None => "NONE",
|
||||
Self::Get => "GET",
|
||||
Self::Head => "HEAD",
|
||||
Self::Post => "POST",
|
||||
Self::Put => "PUT",
|
||||
Self::Delete => "DELETE",
|
||||
Self::Connect => "CONNECT",
|
||||
Self::Options => "OPTIONS",
|
||||
Self::Trace => "TRACE",
|
||||
Self::Patch => "PATCH",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"NONE" => Some(Self::None),
|
||||
"GET" => Some(Self::Get),
|
||||
"HEAD" => Some(Self::Head),
|
||||
"POST" => Some(Self::Post),
|
||||
"PUT" => Some(Self::Put),
|
||||
"DELETE" => Some(Self::Delete),
|
||||
"CONNECT" => Some(Self::Connect),
|
||||
"OPTIONS" => Some(Self::Options),
|
||||
"TRACE" => Some(Self::Trace),
|
||||
"PATCH" => Some(Self::Patch),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// InvokeRequest is the message to invoke a method with the data.
|
||||
/// This message is used in InvokeService of Dapr gRPC Service and OnInvoke
|
||||
/// of AppCallback gRPC service.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct InvokeRequest {
|
||||
/// Required. method is a method name which will be invoked by caller.
|
||||
#[prost(string, tag = "1")]
|
||||
pub method: ::prost::alloc::string::String,
|
||||
/// Required in unary RPCs. Bytes value or Protobuf message which caller sent.
|
||||
/// Dapr treats Any.value as bytes type if Any.type_url is unset.
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub data: ::core::option::Option<::prost_types::Any>,
|
||||
/// The type of data content.
|
||||
///
|
||||
/// This field is required if data delivers http request body
|
||||
/// Otherwise, this is optional.
|
||||
#[prost(string, tag = "3")]
|
||||
pub content_type: ::prost::alloc::string::String,
|
||||
/// HTTP specific fields if request conveys http-compatible request.
|
||||
///
|
||||
/// This field is required for http-compatible request. Otherwise,
|
||||
/// this field is optional.
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub http_extension: ::core::option::Option<HttpExtension>,
|
||||
}
|
||||
/// InvokeResponse is the response message including data and its content type
|
||||
/// from app callback.
|
||||
/// This message is used in InvokeService of Dapr gRPC Service and OnInvoke
|
||||
/// of AppCallback gRPC service.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct InvokeResponse {
|
||||
/// Required in unary RPCs. The content body of InvokeService response.
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub data: ::core::option::Option<::prost_types::Any>,
|
||||
/// Required. The type of data content.
|
||||
#[prost(string, tag = "2")]
|
||||
pub content_type: ::prost::alloc::string::String,
|
||||
}
|
||||
/// Chunk of data sent in a streaming request or response.
|
||||
/// This is used in requests including InternalInvokeRequestStream.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct StreamPayload {
|
||||
/// Data sent in the chunk.
|
||||
/// The amount of data included in each chunk is up to the discretion of the sender, and can be empty.
|
||||
/// Additionally, the amount of data doesn't need to be fixed and subsequent messages can send more, or less, data.
|
||||
/// Receivers must not make assumptions about the number of bytes they'll receive in each chunk.
|
||||
#[prost(bytes = "vec", tag = "1")]
|
||||
pub data: ::prost::alloc::vec::Vec<u8>,
|
||||
/// Sequence number. This is a counter that starts from 0 and increments by 1 on each chunk sent.
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub seq: u64,
|
||||
}
|
||||
/// StateItem represents state key, value, and additional options to save state.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct StateItem {
|
||||
/// Required. The state key
|
||||
#[prost(string, tag = "1")]
|
||||
pub key: ::prost::alloc::string::String,
|
||||
/// Required. The state data for key
|
||||
#[prost(bytes = "vec", tag = "2")]
|
||||
pub value: ::prost::alloc::vec::Vec<u8>,
|
||||
/// The entity tag which represents the specific version of data.
|
||||
/// The exact ETag format is defined by the corresponding data store.
|
||||
#[prost(message, optional, tag = "3")]
|
||||
pub etag: ::core::option::Option<Etag>,
|
||||
/// The metadata which will be passed to state store component.
|
||||
#[prost(map = "string, string", tag = "4")]
|
||||
pub metadata: ::std::collections::HashMap<
|
||||
::prost::alloc::string::String,
|
||||
::prost::alloc::string::String,
|
||||
>,
|
||||
/// Options for concurrency and consistency to save the state.
|
||||
#[prost(message, optional, tag = "5")]
|
||||
pub options: ::core::option::Option<StateOptions>,
|
||||
}
|
||||
/// Etag represents a state item version
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct Etag {
|
||||
/// value sets the etag value
|
||||
#[prost(string, tag = "1")]
|
||||
pub value: ::prost::alloc::string::String,
|
||||
}
|
||||
/// StateOptions configures concurrency and consistency for state operations
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct StateOptions {
|
||||
#[prost(enumeration = "state_options::StateConcurrency", tag = "1")]
|
||||
pub concurrency: i32,
|
||||
#[prost(enumeration = "state_options::StateConsistency", tag = "2")]
|
||||
pub consistency: i32,
|
||||
}
|
||||
/// Nested message and enum types in `StateOptions`.
|
||||
pub mod state_options {
|
||||
/// Enum describing the supported concurrency for state.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
::prost::Enumeration
|
||||
)]
|
||||
#[repr(i32)]
|
||||
pub enum StateConcurrency {
|
||||
ConcurrencyUnspecified = 0,
|
||||
ConcurrencyFirstWrite = 1,
|
||||
ConcurrencyLastWrite = 2,
|
||||
}
|
||||
impl StateConcurrency {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::ConcurrencyUnspecified => "CONCURRENCY_UNSPECIFIED",
|
||||
Self::ConcurrencyFirstWrite => "CONCURRENCY_FIRST_WRITE",
|
||||
Self::ConcurrencyLastWrite => "CONCURRENCY_LAST_WRITE",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"CONCURRENCY_UNSPECIFIED" => Some(Self::ConcurrencyUnspecified),
|
||||
"CONCURRENCY_FIRST_WRITE" => Some(Self::ConcurrencyFirstWrite),
|
||||
"CONCURRENCY_LAST_WRITE" => Some(Self::ConcurrencyLastWrite),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Enum describing the supported consistency for state.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
::prost::Enumeration
|
||||
)]
|
||||
#[repr(i32)]
|
||||
pub enum StateConsistency {
|
||||
ConsistencyUnspecified = 0,
|
||||
ConsistencyEventual = 1,
|
||||
ConsistencyStrong = 2,
|
||||
}
|
||||
impl StateConsistency {
|
||||
/// String value of the enum field names used in the ProtoBuf definition.
|
||||
///
|
||||
/// The values are not transformed in any way and thus are considered stable
|
||||
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
|
||||
pub fn as_str_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::ConsistencyUnspecified => "CONSISTENCY_UNSPECIFIED",
|
||||
Self::ConsistencyEventual => "CONSISTENCY_EVENTUAL",
|
||||
Self::ConsistencyStrong => "CONSISTENCY_STRONG",
|
||||
}
|
||||
}
|
||||
/// Creates an enum from field names used in the ProtoBuf definition.
|
||||
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
|
||||
match value {
|
||||
"CONSISTENCY_UNSPECIFIED" => Some(Self::ConsistencyUnspecified),
|
||||
"CONSISTENCY_EVENTUAL" => Some(Self::ConsistencyEventual),
|
||||
"CONSISTENCY_STRONG" => Some(Self::ConsistencyStrong),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// ConfigurationItem represents all the configuration with its name(key).
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct ConfigurationItem {
|
||||
/// Required. The value of configuration item.
|
||||
#[prost(string, tag = "1")]
|
||||
pub value: ::prost::alloc::string::String,
|
||||
/// Version is response only and cannot be fetched. Store is not expected to keep all versions available
|
||||
#[prost(string, tag = "2")]
|
||||
pub version: ::prost::alloc::string::String,
|
||||
/// the metadata which will be passed to/from configuration store component.
|
||||
#[prost(map = "string, string", tag = "3")]
|
||||
pub metadata: ::std::collections::HashMap<
|
||||
::prost::alloc::string::String,
|
||||
::prost::alloc::string::String,
|
||||
>,
|
||||
}
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
@ -1,4 +1,4 @@
|
|||
use std::{convert::From, fmt, fmt::Display};
|
||||
use std::{convert::From, env::VarError, fmt, fmt::Display, num::ParseIntError};
|
||||
|
||||
use tonic::{transport::Error as TonicError, Status as TonicStatus};
|
||||
|
||||
|
@ -6,16 +6,31 @@ use tonic::{transport::Error as TonicError, Status as TonicStatus};
|
|||
pub enum Error {
|
||||
TransportError,
|
||||
GrpcError(GrpcError),
|
||||
ParseIntError,
|
||||
VarError,
|
||||
SerializationError,
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
write!(f, "{self:?}")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<ParseIntError> for Error {
|
||||
fn from(_error: ParseIntError) -> Self {
|
||||
Error::ParseIntError
|
||||
}
|
||||
}
|
||||
|
||||
impl From<VarError> for Error {
|
||||
fn from(_error: VarError) -> Self {
|
||||
Error::VarError
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TonicError> for Error {
|
||||
fn from(_error: TonicError) -> Self {
|
||||
Error::TransportError
|
||||
|
@ -24,17 +39,17 @@ impl From<TonicError> for Error {
|
|||
|
||||
impl From<TonicStatus> for Error {
|
||||
fn from(error: TonicStatus) -> Self {
|
||||
Error::GrpcError(GrpcError { status: error })
|
||||
Error::GrpcError(GrpcError { _status: error })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GrpcError {
|
||||
status: TonicStatus,
|
||||
_status: TonicStatus,
|
||||
}
|
||||
|
||||
impl Display for GrpcError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
write!(f, "{self:?}")
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
|
||||
pub use serde;
|
||||
pub use serde_json;
|
||||
|
||||
pub use client::Client;
|
||||
|
||||
/// Module containing the Dapr Callback SDK.
|
||||
pub mod appcallback;
|
||||
/// Module containing the 'Client' implementation.
|
||||
pub mod client;
|
||||
|
||||
/// Module importing the Dapr runtime implementation.
|
||||
pub mod dapr {
|
||||
#![allow(clippy::large_enum_variant)]
|
||||
pub mod proto {
|
||||
pub mod common {
|
||||
pub mod v1 {
|
||||
include!("dapr/dapr.proto.common.v1.rs");
|
||||
}
|
||||
}
|
||||
pub mod runtime {
|
||||
pub mod v1 {
|
||||
include!("dapr/dapr.proto.runtime.v1.rs");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Module defining the error implementations.
|
||||
pub mod error;
|
||||
/// Module containing the 'Server' implementation.
|
||||
pub mod server;
|
|
@ -0,0 +1,237 @@
|
|||
use crate::client::TonicClient;
|
||||
use crate::dapr::proto::runtime::v1 as dapr_v1;
|
||||
use crate::error::Error as DaprError;
|
||||
use prost_types::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::time::Duration;
|
||||
use tonic::transport::Channel as TonicChannel;
|
||||
|
||||
pub type GrpcDaprClient = dapr_v1::dapr_client::DaprClient<TonicChannel>;
|
||||
|
||||
pub enum ActorStateOperation {
|
||||
Upsert { key: String, value: Option<Vec<u8>> },
|
||||
Delete { key: String },
|
||||
}
|
||||
|
||||
impl From<ActorStateOperation> for TransactionalActorStateOperation {
|
||||
fn from(val: ActorStateOperation) -> Self {
|
||||
match val {
|
||||
ActorStateOperation::Upsert { key, value } => TransactionalActorStateOperation {
|
||||
operation_type: "upsert".to_string(),
|
||||
key,
|
||||
value: value.map(|v| Any {
|
||||
type_url: "type.googleapis.com/bytes".to_string(),
|
||||
value: v,
|
||||
}),
|
||||
metadata: HashMap::new(),
|
||||
},
|
||||
ActorStateOperation::Delete { key } => TransactionalActorStateOperation {
|
||||
operation_type: "delete".to_string(),
|
||||
key,
|
||||
value: None,
|
||||
metadata: HashMap::new(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A client for interacting with the Dapr runtime within the scope of an actor.
|
||||
///
|
||||
/// Hosts methods for interacting with the Dapr sidecar specific to the actor instance.
|
||||
#[derive(Clone)]
|
||||
pub struct ActorContextClient {
|
||||
client: TonicClient,
|
||||
actor_type: String,
|
||||
actor_id: String,
|
||||
}
|
||||
|
||||
impl ActorContextClient {
|
||||
pub fn new(client: TonicClient, actor_type: &str, actor_id: &str) -> Self {
|
||||
ActorContextClient {
|
||||
client,
|
||||
actor_type: actor_type.to_string(),
|
||||
actor_id: actor_id.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves a keyed state value within the scope of this instance of the actor.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `key` - The key of the state to retrieve.
|
||||
pub async fn get_actor_state<K>(&mut self, key: K) -> Result<GetActorStateResponse, DaprError>
|
||||
where
|
||||
K: Into<String>,
|
||||
{
|
||||
Ok(self
|
||||
.client
|
||||
.get_actor_state(GetActorStateRequest {
|
||||
actor_type: self.actor_type.to_string(),
|
||||
actor_id: self.actor_id.to_string(),
|
||||
key: key.into(),
|
||||
})
|
||||
.await?
|
||||
.into_inner())
|
||||
}
|
||||
|
||||
/// Saves a state value within the scope of this instance of the actor.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `operations` - A list of [ActorStateOperation] to perform on the state.
|
||||
pub async fn execute_actor_state_transaction(
|
||||
&mut self,
|
||||
operations: Vec<ActorStateOperation>,
|
||||
) -> Result<(), DaprError> {
|
||||
self.client
|
||||
.execute_actor_state_transaction(ExecuteActorStateTransactionRequest {
|
||||
actor_type: self.actor_type.to_string(),
|
||||
actor_id: self.actor_id.to_string(),
|
||||
operations: operations.into_iter().map(|o| o.into()).collect(),
|
||||
})
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Registers a reminder with the Dapr runtime.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `name` - The name of the reminder.
|
||||
/// * `due_time` - The time at which the reminder should first be invoked.
|
||||
/// * `period` - The time interval between invocations of the reminder.
|
||||
/// * `data` - The data to pass to the reminder when it is invoked.
|
||||
/// * `ttl` - The time to live for the reminder.
|
||||
pub async fn register_actor_reminder<I>(
|
||||
&mut self,
|
||||
name: I,
|
||||
due_time: Option<Duration>,
|
||||
period: Option<Duration>,
|
||||
data: Vec<u8>,
|
||||
ttl: Option<Duration>,
|
||||
) -> Result<(), DaprError>
|
||||
where
|
||||
I: Into<String>,
|
||||
{
|
||||
self.client
|
||||
.register_actor_reminder(RegisterActorReminderRequest {
|
||||
actor_type: self.actor_type.to_string(),
|
||||
actor_id: self.actor_id.to_string(),
|
||||
name: name.into(),
|
||||
due_time: match due_time {
|
||||
None => "".to_string(),
|
||||
Some(t) => chrono::Duration::from_std(t).unwrap().to_string(),
|
||||
},
|
||||
period: match period {
|
||||
None => "".to_string(),
|
||||
Some(t) => chrono::Duration::from_std(t).unwrap().to_string(),
|
||||
},
|
||||
data,
|
||||
ttl: match ttl {
|
||||
None => "".to_string(),
|
||||
Some(t) => chrono::Duration::from_std(t).unwrap().to_string(),
|
||||
},
|
||||
})
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Unregisters a reminder with the Dapr runtime.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `name` - The name of the reminder to unregister.
|
||||
pub async fn unregister_actor_reminder<I>(&mut self, name: I) -> Result<(), DaprError>
|
||||
where
|
||||
I: Into<String>,
|
||||
{
|
||||
self.client
|
||||
.unregister_actor_reminder(UnregisterActorReminderRequest {
|
||||
actor_type: self.actor_type.to_string(),
|
||||
actor_id: self.actor_id.to_string(),
|
||||
name: name.into(),
|
||||
})
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Registers a timer with the Dapr runtime.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `name` - The name of the timer.
|
||||
/// * `due_time` - The time at which the timer should first be invoked.
|
||||
/// * `period` - The time interval between invocations of the timer.
|
||||
/// * `data` - The data to pass to the timer when it is invoked.
|
||||
/// * `callback` - The callback name to include in the invocation.
|
||||
/// * `ttl` - The time to live for the timer.
|
||||
pub async fn register_actor_timer<I>(
|
||||
&mut self,
|
||||
name: I,
|
||||
due_time: Option<Duration>,
|
||||
period: Option<Duration>,
|
||||
data: Vec<u8>,
|
||||
callback: Option<String>,
|
||||
ttl: Option<Duration>,
|
||||
) -> Result<(), DaprError>
|
||||
where
|
||||
I: Into<String>,
|
||||
{
|
||||
self.client
|
||||
.register_actor_timer(RegisterActorTimerRequest {
|
||||
actor_type: self.actor_type.to_string(),
|
||||
actor_id: self.actor_id.to_string(),
|
||||
name: name.into(),
|
||||
due_time: match due_time {
|
||||
None => "".to_string(),
|
||||
Some(t) => chrono::Duration::from_std(t).unwrap().to_string(),
|
||||
},
|
||||
period: match period {
|
||||
None => "".to_string(),
|
||||
Some(t) => chrono::Duration::from_std(t).unwrap().to_string(),
|
||||
},
|
||||
data,
|
||||
callback: callback.unwrap_or_default(),
|
||||
ttl: match ttl {
|
||||
None => "".to_string(),
|
||||
Some(t) => chrono::Duration::from_std(t).unwrap().to_string(),
|
||||
},
|
||||
})
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Unregisters a timer with the Dapr runtime.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `name` - The name of the timer to unregister.
|
||||
pub async fn unregister_actor_timer<I>(&mut self, name: I) -> Result<(), DaprError>
|
||||
where
|
||||
I: Into<String>,
|
||||
{
|
||||
self.client
|
||||
.unregister_actor_timer(UnregisterActorTimerRequest {
|
||||
actor_type: self.actor_type.to_string(),
|
||||
actor_id: self.actor_id.to_string(),
|
||||
name: name.into(),
|
||||
})
|
||||
.await?
|
||||
.into_inner();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub type GetActorStateRequest = dapr_v1::GetActorStateRequest;
|
||||
|
||||
pub type GetActorStateResponse = dapr_v1::GetActorStateResponse;
|
||||
|
||||
pub type ExecuteActorStateTransactionRequest = dapr_v1::ExecuteActorStateTransactionRequest;
|
||||
|
||||
pub type TransactionalActorStateOperation = dapr_v1::TransactionalActorStateOperation;
|
||||
|
||||
pub type RegisterActorTimerRequest = dapr_v1::RegisterActorTimerRequest;
|
||||
|
||||
pub type RegisterActorReminderRequest = dapr_v1::RegisterActorReminderRequest;
|
||||
|
||||
pub type UnregisterActorTimerRequest = dapr_v1::UnregisterActorTimerRequest;
|
||||
|
||||
pub type UnregisterActorReminderRequest = dapr_v1::UnregisterActorReminderRequest;
|
|
@ -0,0 +1,80 @@
|
|||
use async_trait::async_trait;
|
||||
use axum::{extract::rejection::PathRejection, http::StatusCode, response::IntoResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{error::Error, fmt::Display, sync::Arc};
|
||||
|
||||
use self::context_client::ActorContextClient;
|
||||
|
||||
pub use axum;
|
||||
|
||||
pub mod context_client;
|
||||
pub mod runtime;
|
||||
|
||||
pub type ActorFactory = Box<dyn Fn(&str, &str, ActorContextClient) -> Arc<dyn Actor> + Send + Sync>;
|
||||
|
||||
#[async_trait]
|
||||
pub trait Actor: Send + Sync {
|
||||
async fn on_activate(&self) -> Result<(), ActorError>;
|
||||
async fn on_deactivate(&self) -> Result<(), ActorError>;
|
||||
async fn on_reminder(&self, _reminder_name: &str, _data: Vec<u8>) -> Result<(), ActorError>;
|
||||
async fn on_timer(&self, _timer_name: &str, _data: Vec<u8>) -> Result<(), ActorError>;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ActorError {
|
||||
NotRegistered,
|
||||
CorruptedState,
|
||||
MethodNotFound,
|
||||
ActorNotFound,
|
||||
MethodError(Box<dyn Error>),
|
||||
SerializationError(),
|
||||
}
|
||||
|
||||
impl Display for ActorError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ActorError::NotRegistered => write!(f, "Actor not registered"),
|
||||
ActorError::CorruptedState => write!(f, "Actor state corrupted"),
|
||||
ActorError::MethodNotFound => write!(f, "Method not found"),
|
||||
ActorError::ActorNotFound => write!(f, "Actor not found"),
|
||||
ActorError::MethodError(e) => write!(f, "Method error: {e}"),
|
||||
ActorError::SerializationError() => write!(f, "Serialization error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for ActorError {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
axum::Json(self.to_string()),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct ActorPath {
|
||||
pub actor_id: String,
|
||||
}
|
||||
|
||||
pub enum ActorRejection {
|
||||
ActorError(String),
|
||||
Path(PathRejection),
|
||||
}
|
||||
|
||||
impl IntoResponse for ActorRejection {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
match self {
|
||||
ActorRejection::ActorError(e) => {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, axum::Json(e)).into_response()
|
||||
}
|
||||
ActorRejection::Path(e) => {
|
||||
(StatusCode::BAD_REQUEST, axum::Json(e.body_text())).into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
|
@ -0,0 +1,332 @@
|
|||
use axum::{handler::Handler, routing::put, Router};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::client::TonicClient;
|
||||
|
||||
use super::{context_client::ActorContextClient, Actor, ActorError, ActorFactory};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ActorState {
|
||||
pub actor_type: String,
|
||||
pub runtime: Arc<ActorRuntime>,
|
||||
}
|
||||
|
||||
type MethodRegistrationMap =
|
||||
HashMap<String, Box<dyn (FnOnce(Router, Arc<ActorRuntime>) -> Router) + Send + Sync>>;
|
||||
|
||||
/// Describes the registration of an actor type, including the methods that can be invoked on it and the factory to create instances of it.
|
||||
/// # Example:
|
||||
/// ```ignore
|
||||
/// # use std::sync::Arc;
|
||||
/// # use dapr::server::actor::{context_client::ActorContextClient, Actor, ActorError, ActorFactory, runtime::ActorTypeRegistration};
|
||||
/// # use dapr::server::utils::DaprJson;
|
||||
/// # use dapr::actor;
|
||||
/// # use axum::{Json, Router};
|
||||
/// # use serde::{Deserialize, Serialize};
|
||||
/// # #[dapr::actor]
|
||||
/// # struct MyActor {
|
||||
/// # id: String,
|
||||
/// # client: ActorContextClient,
|
||||
/// # }
|
||||
/// #
|
||||
/// # #[derive(Serialize, Deserialize)]
|
||||
/// # pub struct MyRequest {
|
||||
/// # pub name: String,
|
||||
/// # }
|
||||
/// #
|
||||
/// # #[derive(Serialize, Deserialize)]
|
||||
/// # pub struct MyResponse {
|
||||
/// # pub available: bool,
|
||||
/// # }
|
||||
/// #
|
||||
/// # impl MyActor {
|
||||
/// # async fn do_stuff(&self, DaprJson(req): DaprJson<MyRequest>) -> Json<MyResponse> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn do_other_stuff(&self, DaprJson(req): DaprJson<MyRequest>) -> Json<MyResponse> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// #
|
||||
/// #
|
||||
/// # #[async_trait::async_trait]
|
||||
/// # impl Actor for MyActor {
|
||||
/// # async fn on_activate(&self) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_deactivate(&self) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_reminder(&self, reminder_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_timer(&self, timer_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # }
|
||||
///
|
||||
/// # async fn main_async() {
|
||||
/// let mut dapr_server = dapr::server::DaprHttpServer::new().await;
|
||||
///
|
||||
/// dapr_server.register_actor(ActorTypeRegistration::new::<MyActor>("MyActor", Box::new(|_actor_type, actor_id, context| {
|
||||
/// Arc::new(MyActor {
|
||||
/// id: actor_id.to_string(),
|
||||
/// client: context,
|
||||
/// })}))
|
||||
/// .register_method("do_stuff", MyActor::do_stuff)
|
||||
/// .register_method("do_other_stuff", MyActor::do_other_stuff))
|
||||
/// .await;
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct ActorTypeRegistration {
|
||||
name: String,
|
||||
factory: ActorFactory,
|
||||
method_registrations: MethodRegistrationMap,
|
||||
}
|
||||
|
||||
impl ActorTypeRegistration {
|
||||
pub fn new<TActor>(name: &str, factory: ActorFactory) -> Self
|
||||
where
|
||||
TActor: Actor + Send + Sync + 'static,
|
||||
{
|
||||
ActorTypeRegistration {
|
||||
name: name.to_string(),
|
||||
factory,
|
||||
method_registrations: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers a method on the actor type to be exposed to actor clients.
|
||||
///
|
||||
/// # Arguments:
|
||||
/// * `method_name` - The name of the method to be registered. This name will be used by actor clients to invoke the method.
|
||||
/// * `handler` - The handler function to be invoked when the method is called.
|
||||
/// Can be any valid [Axum handler](https://docs.rs/axum/latest/axum/handler/index.html),
|
||||
/// use [Axum extractors](https://docs.rs/axum/latest/axum/extract/index.html) to access the incoming request and return an [`impl IntoResponse`](https://docs.rs/axum/latest/axum/response/trait.IntoResponse.html).
|
||||
/// Use the `DaprJson` extractor to deserialize the request from Json coming from a Dapr sidecar.
|
||||
/// # Example:
|
||||
/// ```ignore
|
||||
/// # use std::sync::Arc;
|
||||
/// # use dapr::server::actor::{context_client::ActorContextClient, Actor, ActorError, ActorFactory, runtime::ActorTypeRegistration};
|
||||
/// # use dapr::server::utils::DaprJson;
|
||||
/// # use dapr::actor;
|
||||
/// # use axum::{Json, Router};
|
||||
/// # use serde::{Deserialize, Serialize};
|
||||
/// # #[dapr::actor]
|
||||
/// # struct MyActor {
|
||||
/// # id: String,
|
||||
/// # client: ActorContextClient,
|
||||
/// # }
|
||||
/// #
|
||||
/// # #[async_trait::async_trait]
|
||||
/// # impl Actor for MyActor {
|
||||
/// # async fn on_activate(&self) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_deactivate(&self) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_reminder(&self, reminder_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_timer(&self, timer_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ##[derive(Serialize, Deserialize)]
|
||||
/// pub struct MyRequest {
|
||||
/// pub name: String,
|
||||
/// }
|
||||
///
|
||||
///##[derive(Serialize, Deserialize)]
|
||||
///pub struct MyResponse {
|
||||
/// pub available: bool,
|
||||
///}
|
||||
///
|
||||
///impl MyActor {
|
||||
/// fn do_stuff(&self, DaprJson(data): DaprJson<MyRequest>) -> Json<MyResponse> {
|
||||
/// println!("doing stuff with {}", data.name);
|
||||
/// Json(MyResponse {
|
||||
/// available: true
|
||||
/// })
|
||||
/// }
|
||||
///}
|
||||
///
|
||||
/// # async fn main_async() {
|
||||
/// let mut dapr_server = dapr::server::DaprHttpServer::new().await;
|
||||
///
|
||||
/// dapr_server.register_actor(ActorTypeRegistration::new::<MyActor>("MyActor", Box::new(|_actor_type, actor_id, context| {
|
||||
/// Arc::new(MyActor {
|
||||
/// id: actor_id.to_string(),
|
||||
/// client: context,
|
||||
/// })}))
|
||||
/// .register_method("do_stuff", MyActor::do_stuff))
|
||||
/// .await;
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn register_method<T>(
|
||||
mut self,
|
||||
method_name: &str,
|
||||
handler: impl Handler<T, ActorState> + Sync,
|
||||
) -> Self
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
let actor_type = self.name.clone();
|
||||
let method_path = format!("/actors/{actor_type}/:actor_id/method/{method_name}");
|
||||
|
||||
let reg_func = move |router: Router, runtime: Arc<ActorRuntime>| {
|
||||
router.route(
|
||||
&method_path,
|
||||
put(handler).with_state(ActorState {
|
||||
actor_type,
|
||||
runtime,
|
||||
}),
|
||||
)
|
||||
};
|
||||
|
||||
self.method_registrations
|
||||
.insert(method_name.to_string(), Box::new(reg_func));
|
||||
self
|
||||
}
|
||||
|
||||
fn create_actor(&self, actor_id: &str, client: TonicClient) -> Arc<dyn Actor> {
|
||||
let client = ActorContextClient::new(client, &self.name, actor_id);
|
||||
|
||||
(self.factory)(&self.name, actor_id, client) as _
|
||||
}
|
||||
}
|
||||
|
||||
type ActiveActorMap = Arc<RwLock<HashMap<(String, String), Arc<dyn Actor>>>>;
|
||||
type ActorRegistrationMap = Arc<RwLock<HashMap<String, ActorTypeRegistration>>>;
|
||||
|
||||
pub struct ActorRuntime {
|
||||
dapr_client: TonicClient,
|
||||
|
||||
registered_actors_types: ActorRegistrationMap,
|
||||
active_actors: ActiveActorMap,
|
||||
}
|
||||
|
||||
impl ActorRuntime {
|
||||
pub fn new(dapr_client: TonicClient) -> Self {
|
||||
ActorRuntime {
|
||||
dapr_client,
|
||||
registered_actors_types: Arc::new(RwLock::new(HashMap::new())),
|
||||
active_actors: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers an actor type to be exposed to actor clients.
|
||||
/// # Arguments:
|
||||
/// * `registration` - The [ActorTypeRegistration] that describes the actor implementation.
|
||||
pub async fn register_actor(&self, registration: ActorTypeRegistration) {
|
||||
let name = registration.name.clone();
|
||||
let mut g = self.registered_actors_types.write().await;
|
||||
g.insert(name.clone(), registration);
|
||||
log::info!("registered actor {name}");
|
||||
}
|
||||
|
||||
pub async fn configure_method_routes(
|
||||
&self,
|
||||
router: Router,
|
||||
runtime: Arc<ActorRuntime>,
|
||||
) -> Router {
|
||||
let mut router = router;
|
||||
let mut types = self.registered_actors_types.write().await;
|
||||
|
||||
for (_, registration) in types.iter_mut() {
|
||||
for (_, reg_func) in registration.method_registrations.drain() {
|
||||
router = reg_func(router, runtime.clone());
|
||||
}
|
||||
}
|
||||
router
|
||||
}
|
||||
|
||||
pub async fn deactivate_actor(&self, name: &str, id: &str) -> Result<(), ActorError> {
|
||||
let mut actors = self.active_actors.write().await;
|
||||
|
||||
let actor = match actors.remove(&(name.to_string(), id.to_string())) {
|
||||
Some(actor_ref) => actor_ref,
|
||||
None => return Err(ActorError::ActorNotFound),
|
||||
};
|
||||
drop(actors);
|
||||
actor.on_deactivate().await?;
|
||||
drop(actor);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn deactivate_all(&self) {
|
||||
let mut actors = self.active_actors.write().await;
|
||||
|
||||
for actor in actors.values() {
|
||||
_ = actor.on_deactivate().await;
|
||||
}
|
||||
actors.clear();
|
||||
}
|
||||
|
||||
pub async fn invoke_reminder(
|
||||
&self,
|
||||
name: &str,
|
||||
id: &str,
|
||||
reminder_name: &str,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), ActorError> {
|
||||
let actor = self.get_or_create_actor(name, id).await?;
|
||||
actor.on_reminder(reminder_name, data).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn invoke_timer(
|
||||
&self,
|
||||
name: &str,
|
||||
id: &str,
|
||||
timer_name: &str,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(), ActorError> {
|
||||
let actor = self.get_or_create_actor(name, id).await?;
|
||||
actor.on_timer(timer_name, data).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn list_registered_actors(&self) -> Vec<String> {
|
||||
let types = self.registered_actors_types.read().await;
|
||||
|
||||
types.keys().map(|k| k.to_string()).collect()
|
||||
}
|
||||
|
||||
pub async fn get_or_create_actor(
|
||||
&self,
|
||||
actor_type: &str,
|
||||
id: &str,
|
||||
) -> Result<Arc<dyn Actor>, ActorError> {
|
||||
let actors = self.active_actors.read().await;
|
||||
match actors.get(&(actor_type.to_string(), id.to_string())) {
|
||||
Some(actor_ref) => Ok(actor_ref.clone()),
|
||||
None => {
|
||||
drop(actors);
|
||||
self.activate_actor(actor_type, id).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn activate_actor(
|
||||
&self,
|
||||
actor_type: &str,
|
||||
id: &str,
|
||||
) -> Result<Arc<dyn Actor>, ActorError> {
|
||||
let types = self.registered_actors_types.read().await;
|
||||
let actor = match types.get(actor_type) {
|
||||
Some(f) => f.create_actor(id, self.dapr_client.clone()),
|
||||
None => Err(ActorError::NotRegistered)?,
|
||||
};
|
||||
|
||||
actor.on_activate().await?;
|
||||
|
||||
let actor_key = (actor_type.to_string(), id.to_string());
|
||||
let mut actors = self.active_actors.write().await;
|
||||
actors.insert(actor_key, actor.clone());
|
||||
|
||||
Ok(actor)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,249 @@
|
|||
#[cfg(test)]
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use axum::{Json, Router};
|
||||
use axum_test::TestServer;
|
||||
use dapr::server::{
|
||||
actor::{runtime::ActorTypeRegistration, Actor, ActorError},
|
||||
DaprHttpServer,
|
||||
};
|
||||
use dapr_macros::actor;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tokio::{net::TcpListener, sync::Mutex};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct MyResponse {
|
||||
pub actor_id: String,
|
||||
pub name: String,
|
||||
pub available: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct MyRequest {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[actor]
|
||||
struct MyActor {
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Actor for MyActor {
|
||||
async fn on_activate(&self) -> Result<(), ActorError> {
|
||||
TEST_STATE.increment_on_activate(&self.id).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_deactivate(&self) -> Result<(), ActorError> {
|
||||
TEST_STATE.increment_on_deactivate(&self.id).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_reminder(&self, _reminder_name: &str, _data: Vec<u8>) -> Result<(), ActorError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_timer(&self, _timer_name: &str, _data: Vec<u8>) -> Result<(), ActorError> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl MyActor {
|
||||
async fn do_stuff(&self, Json(req): Json<MyRequest>) -> Json<MyResponse> {
|
||||
Json(MyResponse {
|
||||
actor_id: self.id.clone(),
|
||||
name: req.name,
|
||||
available: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_actor_invoke() {
|
||||
let dapr_port = get_available_port().await.unwrap();
|
||||
|
||||
let fake_sidecar = tokio::spawn(async move {
|
||||
let sidecar = Router::new();
|
||||
let address = format!("127.0.0.1:{dapr_port}");
|
||||
let listener = TcpListener::bind(address).await.unwrap();
|
||||
_ = axum::serve(listener, sidecar.into_make_service()).await;
|
||||
});
|
||||
tokio::task::yield_now().await;
|
||||
|
||||
let mut dapr_server = DaprHttpServer::with_dapr_port(dapr_port).await;
|
||||
|
||||
dapr_server
|
||||
.register_actor(
|
||||
ActorTypeRegistration::new::<MyActor>(
|
||||
"MyActor",
|
||||
Box::new(|_actor_type, actor_id, _context| {
|
||||
Arc::new(MyActor {
|
||||
id: actor_id.to_string(),
|
||||
})
|
||||
}),
|
||||
)
|
||||
.register_method("do_stuff", MyActor::do_stuff),
|
||||
)
|
||||
.await;
|
||||
|
||||
let actor_id = Uuid::new_v4().to_string();
|
||||
|
||||
let app = dapr_server.build_test_router().await;
|
||||
let server = TestServer::new(app.into_make_service()).unwrap();
|
||||
|
||||
let invoke_resp = server
|
||||
.put(&format!("/actors/MyActor/{actor_id}/method/do_stuff"))
|
||||
.json(&json!({ "name": "foo" }))
|
||||
.await;
|
||||
invoke_resp.assert_status_ok();
|
||||
|
||||
invoke_resp.assert_json(&MyResponse {
|
||||
actor_id: actor_id.clone(),
|
||||
name: "foo".to_string(),
|
||||
available: true,
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
TEST_STATE
|
||||
.get_actor_state(&actor_id)
|
||||
.await
|
||||
.unwrap()
|
||||
.on_activate,
|
||||
1
|
||||
);
|
||||
|
||||
let invoke_resp2 = server
|
||||
.put(&format!("/actors/MyActor/{actor_id}/method/do_stuff"))
|
||||
.json(&json!({ "name": "foo" }))
|
||||
.await;
|
||||
invoke_resp2.assert_status_ok();
|
||||
|
||||
assert_eq!(
|
||||
TEST_STATE
|
||||
.get_actor_state(&actor_id)
|
||||
.await
|
||||
.unwrap()
|
||||
.on_activate,
|
||||
1
|
||||
);
|
||||
|
||||
fake_sidecar.abort();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_actor_deactivate() {
|
||||
let dapr_port = get_available_port().await.unwrap();
|
||||
|
||||
let fake_sidecar = tokio::spawn(async move {
|
||||
let sidecar = Router::new();
|
||||
let address = format!("127.0.0.1:{dapr_port}");
|
||||
let listener = TcpListener::bind(address).await.unwrap();
|
||||
_ = axum::serve(listener, sidecar.into_make_service()).await;
|
||||
});
|
||||
tokio::task::yield_now().await;
|
||||
|
||||
let mut dapr_server = DaprHttpServer::with_dapr_port(dapr_port).await;
|
||||
|
||||
dapr_server
|
||||
.register_actor(
|
||||
ActorTypeRegistration::new::<MyActor>(
|
||||
"MyActor",
|
||||
Box::new(|_actor_type, actor_id, _context| {
|
||||
Arc::new(MyActor {
|
||||
id: actor_id.to_string(),
|
||||
})
|
||||
}),
|
||||
)
|
||||
.register_method("do_stuff", MyActor::do_stuff),
|
||||
)
|
||||
.await;
|
||||
|
||||
let app = dapr_server.build_test_router().await;
|
||||
let server = TestServer::new(app.into_make_service()).unwrap();
|
||||
|
||||
let actor_id = Uuid::new_v4().to_string();
|
||||
|
||||
let invoke_resp = server
|
||||
.put(&format!("/actors/MyActor/{actor_id}/method/do_stuff"))
|
||||
.json(&json!({ "name": "foo" }))
|
||||
.await;
|
||||
invoke_resp.assert_status_ok();
|
||||
|
||||
let deactivate_resp1 = server.delete(&format!("/actors/MyActor/{actor_id}")).await;
|
||||
deactivate_resp1.assert_status_ok();
|
||||
|
||||
let deactivate_resp2 = server.delete(&format!("/actors/MyActor/{actor_id}")).await;
|
||||
deactivate_resp2.assert_status_not_found();
|
||||
|
||||
assert_eq!(
|
||||
TEST_STATE
|
||||
.get_actor_state(&actor_id)
|
||||
.await
|
||||
.unwrap()
|
||||
.on_deactivate,
|
||||
1
|
||||
);
|
||||
|
||||
fake_sidecar.abort();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct TestActorState {
|
||||
pub on_activate: u32,
|
||||
pub on_deactivate: u32,
|
||||
}
|
||||
|
||||
struct TestState {
|
||||
actors: Arc<Mutex<HashMap<String, TestActorState>>>,
|
||||
}
|
||||
|
||||
impl TestState {
|
||||
pub fn new() -> Self {
|
||||
TestState {
|
||||
actors: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_actor_state(&self, actor_id: &str) -> Option<TestActorState> {
|
||||
let actors = self.actors.lock().await;
|
||||
actors.get(actor_id).cloned()
|
||||
}
|
||||
|
||||
pub async fn increment_on_activate(&self, actor_id: &str) {
|
||||
let mut actors = self.actors.lock().await;
|
||||
let actor_state = actors
|
||||
.entry(actor_id.to_string())
|
||||
.or_insert(TestActorState {
|
||||
on_activate: 0,
|
||||
on_deactivate: 0,
|
||||
});
|
||||
actor_state.on_activate += 1;
|
||||
}
|
||||
|
||||
pub async fn increment_on_deactivate(&self, actor_id: &str) {
|
||||
let mut actors = self.actors.lock().await;
|
||||
let actor_state = actors
|
||||
.entry(actor_id.to_string())
|
||||
.or_insert(TestActorState {
|
||||
on_activate: 0,
|
||||
on_deactivate: 0,
|
||||
});
|
||||
actor_state.on_deactivate += 1;
|
||||
}
|
||||
}
|
||||
|
||||
static TEST_STATE: Lazy<TestState> = Lazy::new(TestState::new);
|
||||
|
||||
async fn get_available_port() -> Option<u16> {
|
||||
for port in 8000..9000 {
|
||||
if TcpListener::bind(format!("127.0.0.1:{port}")).await.is_ok() {
|
||||
return Some(port);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use tonic::{Code, Request, Response, Status};
|
||||
|
||||
use crate::dapr::proto::runtime;
|
||||
use crate::dapr::proto::runtime::v1::app_callback_alpha_server::AppCallbackAlpha;
|
||||
|
||||
pub struct AppCallbackServiceAlpha {
|
||||
pub job_handlers: HashMap<String, Box<dyn JobHandlerMethod + Send + Sync + 'static>>,
|
||||
}
|
||||
|
||||
impl AppCallbackServiceAlpha {
|
||||
pub fn new() -> Self {
|
||||
AppCallbackServiceAlpha {
|
||||
job_handlers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_job_handler(&mut self, job_name: String, handler: Box<dyn JobHandlerMethod>) {
|
||||
self.job_handlers.insert(job_name, handler);
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AppCallbackServiceAlpha {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl AppCallbackAlpha for AppCallbackServiceAlpha {
|
||||
async fn on_bulk_topic_event_alpha1(
|
||||
&self,
|
||||
_request: Request<runtime::v1::TopicEventBulkRequest>,
|
||||
) -> Result<Response<runtime::v1::TopicEventBulkResponse>, Status> {
|
||||
Err(Status::unavailable("unimplemented"))
|
||||
}
|
||||
|
||||
async fn on_job_event_alpha1(
|
||||
&self,
|
||||
request: Request<runtime::v1::JobEventRequest>,
|
||||
) -> Result<Response<runtime::v1::JobEventResponse>, Status> {
|
||||
let request_inner = request.into_inner();
|
||||
let job_name = request_inner
|
||||
.method
|
||||
.strip_prefix("job/")
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
if let Some(handler) = self.job_handlers.get(&job_name) {
|
||||
let handle_response = handler.handler(request_inner).await;
|
||||
handle_response.map(Response::new)
|
||||
} else {
|
||||
Err(Status::new(Code::Internal, "Job Handler Not Found"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! add_job_handler_alpha {
|
||||
($app_callback_service:expr, $handler_name:ident, $handler_fn:expr) => {
|
||||
pub struct $handler_name {}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl JobHandlerMethod for $handler_name {
|
||||
async fn handler(&self, request: JobEventRequest) -> Result<JobEventResponse, Status> {
|
||||
$handler_fn(request).await
|
||||
}
|
||||
}
|
||||
|
||||
impl $handler_name {
|
||||
pub fn new() -> Self {
|
||||
$handler_name {}
|
||||
}
|
||||
}
|
||||
|
||||
let handler_name = $handler_name.to_string();
|
||||
|
||||
$app_callback_service.add_job_handler(handler_name, Box::new($handler_name::new()));
|
||||
};
|
||||
}
|
||||
|
||||
#[tonic::async_trait]
|
||||
pub trait JobHandlerMethod: Send + Sync + 'static {
|
||||
async fn handler(
|
||||
&self,
|
||||
request: runtime::v1::JobEventRequest,
|
||||
) -> Result<runtime::v1::JobEventResponse, Status>;
|
||||
}
|
|
@ -0,0 +1,327 @@
|
|||
use axum::{
|
||||
extract::{OriginalUri, Path, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
routing::{delete, get, put},
|
||||
Json, Router,
|
||||
};
|
||||
use futures::{Future, FutureExt};
|
||||
use std::{pin::Pin, sync::Arc};
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
use super::super::client::TonicClient;
|
||||
use super::actor::runtime::{ActorRuntime, ActorTypeRegistration};
|
||||
|
||||
/// The Dapr HTTP server.
|
||||
///
|
||||
/// Supports Http callbacks from the Dapr sidecar.
|
||||
///
|
||||
/// # Example:
|
||||
/// ```ignore
|
||||
/// # use std::sync::Arc;
|
||||
/// # use dapr::server::actor::{context_client::ActorContextClient, Actor, ActorError, ActorFactory, runtime::ActorTypeRegistration};
|
||||
/// # use dapr::server::utils::DaprJson;
|
||||
/// # use dapr::actor;
|
||||
/// # use axum::{Json, Router};
|
||||
/// # use serde::{Deserialize, Serialize};
|
||||
/// # #[actor]
|
||||
/// # struct MyActor {
|
||||
/// # id: String,
|
||||
/// # client: ActorContextClient,
|
||||
/// # }
|
||||
/// #
|
||||
/// # #[async_trait::async_trait]
|
||||
/// # impl Actor for MyActor {
|
||||
/// # async fn on_activate(&self) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_deactivate(&self) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_reminder(&self, reminder_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # async fn on_timer(&self, timer_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
/// # todo!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ##[derive(Serialize, Deserialize)]
|
||||
/// pub struct MyRequest {
|
||||
/// pub name: String,
|
||||
/// }
|
||||
///
|
||||
///##[derive(Serialize, Deserialize)]
|
||||
///pub struct MyResponse {
|
||||
/// pub available: bool,
|
||||
///}
|
||||
///
|
||||
///impl MyActor {
|
||||
/// fn do_stuff(&self, DaprJson(data): DaprJson<MyRequest>) -> Json<MyResponse> {
|
||||
/// println!("doing stuff with {}", data.name);
|
||||
/// Json(MyResponse {
|
||||
/// available: true
|
||||
/// })
|
||||
/// }
|
||||
///}
|
||||
/// # async fn main_async() {
|
||||
/// let mut dapr_server = dapr::server::DaprHttpServer::new().await;
|
||||
///
|
||||
/// dapr_server.register_actor(ActorTypeRegistration::new::<MyActor>("MyActor", Box::new(|_actor_type, actor_id, context| {
|
||||
/// Arc::new(MyActor {
|
||||
/// id: actor_id.to_string(),
|
||||
/// client: context,
|
||||
/// })}))
|
||||
/// .register_method("do_stuff", MyActor::do_stuff))
|
||||
/// .await;
|
||||
///
|
||||
/// dapr_server.start(None).await;
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct DaprHttpServer {
|
||||
actor_runtime: Arc<ActorRuntime>,
|
||||
shutdown_signal: Option<Pin<Box<dyn Future<Output = ()> + Send>>>,
|
||||
}
|
||||
|
||||
impl DaprHttpServer {
|
||||
/// Creates a new instance of the Dapr HTTP server with default options.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function panics if the Dapr Sidecar cannot be reached!
|
||||
/// For a non-panicking version that allows you to handle any errors yourself, see:
|
||||
/// [DaprHttpServer::try_new_with_dapr_port]
|
||||
pub async fn new() -> Self {
|
||||
let dapr_port: u16 = std::env::var("DAPR_GRPC_PORT")
|
||||
.unwrap_or("3501".into())
|
||||
.parse()
|
||||
.unwrap();
|
||||
Self::with_dapr_port(dapr_port).await
|
||||
}
|
||||
|
||||
/// Creates a new instance of the Dapr HTTP server that connects to the Dapr sidecar on the
|
||||
/// given dapr_port.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function panics if the Dapr Sidecar cannot be reached!
|
||||
/// For a non-panicking version that allows you to handle any errors yourself, see:
|
||||
/// [DaprHttpServer::try_new_with_dapr_port]
|
||||
pub async fn with_dapr_port(dapr_port: u16) -> Self {
|
||||
match Self::try_new_with_dapr_port(dapr_port).await {
|
||||
Ok(c) => c,
|
||||
Err(err) => panic!("failed to connect to dapr: {err}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance of the Dapr HTTP server that connects to the Dapr sidecar on the
|
||||
/// given dapr_port.
|
||||
///
|
||||
/// In contrast to the other functions that create a DaprHttpServer, this function does
|
||||
/// not panic, but instead returns a Result.
|
||||
pub async fn try_new_with_dapr_port(
|
||||
dapr_port: u16,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let dapr_addr = format!("https://127.0.0.1:{dapr_port}");
|
||||
|
||||
let cc = TonicClient::connect(dapr_addr).await?;
|
||||
let rt = ActorRuntime::new(cc);
|
||||
|
||||
Ok(DaprHttpServer {
|
||||
actor_runtime: Arc::new(rt),
|
||||
shutdown_signal: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_graceful_shutdown<F>(self, signal: F) -> Self
|
||||
where
|
||||
F: Future<Output = ()> + Send + 'static,
|
||||
{
|
||||
DaprHttpServer {
|
||||
shutdown_signal: Some(signal.boxed()),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers an actor type with the Dapr runtime.
|
||||
///
|
||||
/// # Arguments:
|
||||
/// * `registration` - The [ActorTypeRegistration] struct, carries the methods that can be invoked on it and the factory to create instances of it.
|
||||
pub async fn register_actor(&self, registration: ActorTypeRegistration) {
|
||||
self.actor_runtime.register_actor(registration).await;
|
||||
}
|
||||
|
||||
/// Starts the Dapr HTTP server.
|
||||
///
|
||||
/// # Arguments:
|
||||
/// * `port` - The port to listen on. If not specified, the APP_PORT environment variable will be used. If that is not specified, 8080 will be used.
|
||||
pub async fn start(&mut self, port: Option<u16>) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let app = self.build_router().await;
|
||||
|
||||
let default_port: u16 = std::env::var("APP_PORT")
|
||||
.unwrap_or(String::from("8080"))
|
||||
.parse()
|
||||
.unwrap_or(8080);
|
||||
|
||||
let address = format!("127.0.0.1:{}", port.unwrap_or(default_port));
|
||||
let listener = TcpListener::bind(address).await?;
|
||||
|
||||
let server = axum::serve(listener, app.into_make_service());
|
||||
|
||||
let final_result = match self.shutdown_signal.take() {
|
||||
Some(signal) => {
|
||||
server
|
||||
.with_graceful_shutdown(async move {
|
||||
signal.await;
|
||||
})
|
||||
.await
|
||||
}
|
||||
None => server.await,
|
||||
};
|
||||
|
||||
self.actor_runtime.deactivate_all().await;
|
||||
|
||||
Ok(final_result?)
|
||||
}
|
||||
|
||||
pub async fn build_test_router(&mut self) -> Router {
|
||||
self.build_router().await
|
||||
}
|
||||
|
||||
async fn build_router(&mut self) -> Router {
|
||||
let rt = self.actor_runtime.clone();
|
||||
|
||||
let app = Router::new()
|
||||
.route("/healthz", get(health_check))
|
||||
.route(
|
||||
"/dapr/config",
|
||||
get(registered_actors).with_state(rt.clone()),
|
||||
)
|
||||
.route(
|
||||
"/actors/:actor_type/:actor_id",
|
||||
delete(deactivate_actor).with_state(rt.clone()),
|
||||
)
|
||||
.route(
|
||||
"/actors/:actor_type/:actor_id/method/remind/:reminder_name",
|
||||
put(invoke_reminder).with_state(rt.clone()),
|
||||
)
|
||||
.route(
|
||||
"/actors/:actor_type/:actor_id/method/timer/:timer_name",
|
||||
put(invoke_timer).with_state(rt.clone()),
|
||||
)
|
||||
.fallback(fallback_handler);
|
||||
|
||||
self.actor_runtime
|
||||
.configure_method_routes(app, rt.clone())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
async fn fallback_handler(OriginalUri(uri): OriginalUri) -> impl IntoResponse {
|
||||
log::warn!("Returning 404 for request: {uri}");
|
||||
(
|
||||
StatusCode::NOT_FOUND,
|
||||
format!("The URI '{uri}' could not be found!"),
|
||||
)
|
||||
}
|
||||
|
||||
async fn health_check() -> impl IntoResponse {
|
||||
log::debug!("recieved health check request");
|
||||
StatusCode::OK
|
||||
}
|
||||
|
||||
async fn registered_actors(State(runtime): State<Arc<ActorRuntime>>) -> impl IntoResponse {
|
||||
log::debug!("daprd requested registered actors");
|
||||
let ra = runtime.list_registered_actors().await;
|
||||
let result = super::models::RegisteredActorsResponse { entities: ra };
|
||||
|
||||
Json(result)
|
||||
}
|
||||
|
||||
async fn deactivate_actor(
|
||||
State(runtime): State<Arc<ActorRuntime>>,
|
||||
Path((actor_type, actor_id)): Path<(String, String)>,
|
||||
) -> impl IntoResponse {
|
||||
match runtime.deactivate_actor(&actor_type, &actor_id).await {
|
||||
Ok(_) => StatusCode::OK,
|
||||
Err(err) => {
|
||||
log::error!("invoke_actor: {err:?}");
|
||||
match err {
|
||||
super::actor::ActorError::ActorNotFound => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
log::error!("deactivate_actor: {err:?}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn invoke_reminder(
|
||||
State(runtime): State<Arc<ActorRuntime>>,
|
||||
Path((actor_type, actor_id, reminder_name)): Path<(String, String, String)>,
|
||||
Json(payload): Json<ReminderPayload>,
|
||||
) -> impl IntoResponse {
|
||||
log::debug!("invoke_reminder: {actor_type} {actor_id} {reminder_name} {payload:?}");
|
||||
|
||||
match runtime
|
||||
.invoke_reminder(
|
||||
&actor_type,
|
||||
&actor_id,
|
||||
&reminder_name,
|
||||
payload.data.unwrap_or_default().into_bytes(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_output) => StatusCode::OK,
|
||||
Err(err) => {
|
||||
log::error!("invoke_actor: {err:?}");
|
||||
match err {
|
||||
super::actor::ActorError::ActorNotFound => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
log::error!("invoke_reminder: {err:?}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn invoke_timer(
|
||||
State(runtime): State<Arc<ActorRuntime>>,
|
||||
Path((actor_type, actor_id, timer_name)): Path<(String, String, String)>,
|
||||
Json(payload): Json<TimerPayload>,
|
||||
) -> impl IntoResponse {
|
||||
log::debug!("invoke_timer: {actor_type} {actor_id} {timer_name}, {payload:?}");
|
||||
|
||||
match runtime
|
||||
.invoke_timer(
|
||||
&actor_type,
|
||||
&actor_id,
|
||||
&timer_name,
|
||||
payload.data.unwrap_or_default().into_bytes(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_output) => StatusCode::OK,
|
||||
Err(err) => {
|
||||
log::error!("invoke_actor: {err:?}");
|
||||
match err {
|
||||
super::actor::ActorError::ActorNotFound => StatusCode::NOT_FOUND,
|
||||
_ => {
|
||||
log::error!("invoke_timer: {err:?}");
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Debug)]
|
||||
struct ReminderPayload {
|
||||
data: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Debug)]
|
||||
struct TimerPayload {
|
||||
data: Option<String>,
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
pub use http::DaprHttpServer;
|
||||
|
||||
#[macro_use]
|
||||
pub mod actor;
|
||||
pub mod appcallbackalpha;
|
||||
mod http;
|
||||
mod models;
|
||||
pub mod utils;
|
|
@ -0,0 +1,6 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct RegisteredActorsResponse {
|
||||
pub entities: Vec<String>,
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
use async_trait::async_trait;
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::FromRequest,
|
||||
http::{Request, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
/// Workaround for Dapr's JSON serialization not correcly setting Content-Type header
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub struct DaprJson<T>(pub T);
|
||||
|
||||
pub enum JsonRejection {
|
||||
JsonError(String),
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<T, S> FromRequest<S> for DaprJson<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = JsonRejection;
|
||||
|
||||
async fn from_request(req: Request<Body>, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let bytes = match axum::body::Bytes::from_request(req, state).await {
|
||||
Ok(bytes) => bytes,
|
||||
Err(e) => {
|
||||
log::error!("Error getting bytes: {e}");
|
||||
return Err(JsonRejection::JsonError(e.to_string()));
|
||||
}
|
||||
};
|
||||
let value = match serde_json::from_slice::<T>(&bytes) {
|
||||
Ok(value) => value,
|
||||
Err(e) => {
|
||||
log::error!("Error deserializing JSON: {e}");
|
||||
return Err(JsonRejection::JsonError(e.to_string()));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(DaprJson(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for JsonRejection {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
match self {
|
||||
JsonRejection::JsonError(e) => (StatusCode::BAD_REQUEST, axum::Json(e)).into_response(),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
# Dapr Rust SDK documentation
|
||||
|
||||
This page covers how the documentation is structured for the Dapr Rust SDK
|
||||
|
||||
## Dapr Docs
|
||||
|
||||
All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [Rust SDK](https://docs.dapr.io/developing-applications/sdks/rust/). Head over there if you want to read the docs.
|
||||
|
||||
### Rust SDK docs source
|
||||
|
||||
Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Rust SDK content and images are within the `content` and `static` directories, respectively.
|
||||
|
||||
This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for.
|
||||
|
||||
## Writing Rust SDK docs
|
||||
|
||||
To get up and running to write Rust SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your
|
||||
environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo.
|
||||
|
||||
Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc.
|
||||
|
||||
## Docs architecture
|
||||
|
||||
The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the rust repo, along with the docs, are cloned as well.
|
||||
|
||||
Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/rust` and `static/go` directories, respectively. Thus, all the content within this repo is folded into the main docs site.
|
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
type: docs
|
||||
title: "Contributing to the Rust SDK"
|
||||
linkTitle: "Rust SDK"
|
||||
weight: 3000
|
||||
description: Guidelines for contributing to the Dapr Rust SDK
|
||||
---
|
||||
|
||||
When contributing to the [Rust SDK](https://github.com/dapr/rust-sdk) the following rules and best-practices should be followed.
|
||||
|
||||
## Examples
|
||||
|
||||
The `examples` directory contains code samples for users to run to try out specific functionality of the various Rust SDK packages and extensions. It also hosts component examples used for validation. When writing new and updated samples keep in mind:
|
||||
|
||||
- All examples should be runnable on Windows, Linux, and MacOS. While Rust code is consistent among operating systems aside from minor OS-feature gating, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}})
|
||||
- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine.
|
||||
- Examples should be pass validation and include mechanical markdown steps and be added to the validation workflow [TBA](#)
|
||||
|
||||
## Docs
|
||||
|
||||
The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind:
|
||||
|
||||
- All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these.
|
||||
- All files and directories should be prefixed with `rust-` to ensure all file/directory names are globally unique across all Dapr documentation.
|
||||
|
||||
## Update Protobufs
|
||||
|
||||
To pull the protobufs from the `dapr/dapr` repo you can run the script in the repo root like so:
|
||||
|
||||
```bash
|
||||
./update-protos.sh
|
||||
```
|
||||
|
||||
By default, the script fetches the latest proto updates from the master branch of the Dapr repository. If you need to choose a specific release or version, use the -v flag:
|
||||
|
||||
```bash
|
||||
./update-protos.sh -v v1.13.0
|
||||
```
|
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
type: docs
|
||||
title: "Dapr Rust SDK"
|
||||
linkTitle: "Rust"
|
||||
weight: 1000
|
||||
description: Rust SDK packages for developing Dapr applications
|
||||
no_list: true
|
||||
cascade:
|
||||
github_repo: https://github.com/dapr/rust-sdk
|
||||
github_subdir: daprdocs/content/en/rust-sdk-docs
|
||||
path_base_for_github_subdir: content/en/developing-applications/sdks/rust/
|
||||
github_branch: main
|
||||
---
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
The Dapr Rust-SDK is currently in Alpha. Work is underway to bring it to a stable release and will likely involve breaking changes.
|
||||
{{% /alert %}}
|
||||
|
||||
A client library to help build Dapr applications using Rust. This client is targeting support for all public Dapr APIs while focusing on idiomatic Rust experiences and developer productivity.
|
||||
|
||||
{{< cardpane >}}
|
||||
{{< card title="**Client**">}}
|
||||
Use the Rust Client SDK for invoking public Dapr APIs
|
||||
|
||||
[**Learn more about the Rust Client SDK**]({{% ref rust-client %}})
|
||||
{{< /card >}}
|
||||
{{< /cardpane >}}
|
|
@ -0,0 +1,128 @@
|
|||
---
|
||||
type: docs
|
||||
title: "Getting started with the Dapr client Rust SDK"
|
||||
linkTitle: "Client"
|
||||
weight: 20000
|
||||
description: How to get up and running with the Dapr Rust SDK
|
||||
no_list: true
|
||||
---
|
||||
|
||||
The Dapr client package allows you to interact with other Dapr applications from
|
||||
a Rust application.
|
||||
|
||||
{{% alert title="Note" color="primary" %}}
|
||||
The Dapr Rust-SDK is currently in Alpha. Work is underway to bring it to a
|
||||
stable release and will likely involve breaking changes.
|
||||
{{% /alert %}}
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed
|
||||
- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}})
|
||||
- [Rust installed](https://www.rust-lang.org/tools/install)
|
||||
|
||||
## Import the client package
|
||||
|
||||
Add Dapr to your `cargo.toml`
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
# Other dependencies
|
||||
dapr = "0.16.0"
|
||||
```
|
||||
|
||||
You can either reference `dapr::Client` or bind the full path to a new name as follows:
|
||||
|
||||
```rust
|
||||
use dapr::Client as DaprClient;
|
||||
```
|
||||
|
||||
## Instantiating the Dapr client
|
||||
|
||||
```rust
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr,
|
||||
port).await?;
|
||||
```
|
||||
|
||||
Alternatively if you would like to specify a custom port, this can be done by using this connect method:
|
||||
|
||||
```rust
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect_with_port(addr, "3500".to_string()).await?;
|
||||
```
|
||||
|
||||
## Building blocks
|
||||
|
||||
The Rust SDK allows you to interface with the
|
||||
[Dapr building blocks]({{% ref building-blocks %}}).
|
||||
|
||||
### Service Invocation (gRPC)
|
||||
|
||||
To invoke a specific method on another service running with Dapr sidecar, the
|
||||
Dapr client provides two options:
|
||||
|
||||
Invoke a (gRPC) service
|
||||
|
||||
```rust
|
||||
let response = client
|
||||
.invoke_service("service-to-invoke", "method-to-invoke", Some(data))
|
||||
.await
|
||||
.unwrap();
|
||||
```
|
||||
|
||||
For a full guide on service invocation, visit
|
||||
[How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}).
|
||||
|
||||
### State Management
|
||||
|
||||
The Dapr Client provides access to these state management methods: `save_state`
|
||||
, `get_state`, `delete_state` that can be used like so:
|
||||
|
||||
```rust
|
||||
let store_name = String::from("statestore");
|
||||
|
||||
let key = String::from("hello");
|
||||
let val = String::from("world").into_bytes();
|
||||
|
||||
// save key-value pair in the state store
|
||||
client
|
||||
.save_state(store_name, key, val, None, None, None)
|
||||
.await?;
|
||||
|
||||
let get_response = client
|
||||
.get_state("statestore", "hello", None)
|
||||
.await?;
|
||||
|
||||
// delete a value from the state store
|
||||
client
|
||||
.delete_state("statestore", "hello", None)
|
||||
.await?;
|
||||
```
|
||||
|
||||
Multiple states can be sent with the `save_bulk_states` method.
|
||||
|
||||
For a full guide on state management, visit
|
||||
[How-To: Save & get state]({{% ref howto-get-save-state.md %}}).
|
||||
|
||||
### Publish Messages
|
||||
|
||||
To publish data onto a topic, the Dapr client provides a simple method:
|
||||
|
||||
```rust
|
||||
let pubsub_name = "pubsub-name".to_string();
|
||||
let pubsub_topic = "topic-name".to_string();
|
||||
let pubsub_content_type = "text/plain".to_string();
|
||||
|
||||
let data = "content".to_string().into_bytes();
|
||||
client
|
||||
.publish_event(pubsub_name, pubsub_topic, pubsub_content_type, data, None)
|
||||
.await?;
|
||||
```
|
||||
|
||||
For a full guide on pub/sub, visit
|
||||
[How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}).
|
||||
|
||||
## Related links
|
||||
|
||||
[Rust SDK Examples](https://github.com/dapr/rust-sdk/tree/master/examples)
|
|
@ -0,0 +1,95 @@
|
|||
[package]
|
||||
name = "examples"
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
publish = false
|
||||
version = "0.0.1"
|
||||
repository.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
dapr = { path = "../dapr" }
|
||||
dapr-macros = { path = "../dapr-macros" }
|
||||
env_logger = "0.11"
|
||||
log = "0.4"
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
tokio-stream = { workspace = true }
|
||||
tonic = { workspace = true }
|
||||
prost = { workspace = true }
|
||||
prost-types = { workspace = true }
|
||||
|
||||
[[example]]
|
||||
name = "actors-client"
|
||||
path = "src/actors/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "actors-server"
|
||||
path = "src/actors/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bindings-input"
|
||||
path = "src/bindings/input.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bindings-output"
|
||||
path = "src/bindings/output.rs"
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
path = "src/client/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "configuration"
|
||||
path = "src/configuration/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "conversation"
|
||||
path = "src/conversation/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "crypto"
|
||||
path = "src/crypto/main.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-client"
|
||||
path = "src/invoke/grpc/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-server"
|
||||
path = "src/invoke/grpc/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-proxying-client"
|
||||
path = "src/invoke/grpc-proxying/client.rs"
|
||||
|
||||
[[example]]
|
||||
name = "invoke-grpc-proxying-server"
|
||||
path = "src/invoke/grpc-proxying/server.rs"
|
||||
|
||||
[[example]]
|
||||
name = "jobs"
|
||||
path = "src/jobs/jobs.rs"
|
||||
|
||||
[[example]]
|
||||
name = "pubsub-publisher"
|
||||
path = "src/pubsub/publisher.rs"
|
||||
|
||||
[[example]]
|
||||
name = "pubsub-subscriber"
|
||||
path = "src/pubsub/subscriber.rs"
|
||||
|
||||
[[example]]
|
||||
name = "query-state-1"
|
||||
path = "src/query_state/query1.rs"
|
||||
|
||||
[[example]]
|
||||
name = "query-state-2"
|
||||
path = "src/query_state/query2.rs"
|
||||
|
||||
[[example]]
|
||||
name = "secrets-bulk"
|
||||
path = "src/secrets-bulk/app.rs"
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
These examples demonstrates how to use Dapr rust sdk.
|
||||
|
||||
* [client](./client)
|
||||
* [client](src/client)
|
||||
* Simple dapr client example that saves, gets, and deletes state from the state stores
|
||||
* [pubsub](./pubsub)
|
||||
* [pubsub](src/pubsub)
|
||||
* Publishes and subscribes to events
|
||||
|
||||
## Adding new examples
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
Before you run the example make sure local redis state store is running by executing:
|
||||
```
|
||||
docker ps
|
||||
```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
```
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
2. Run the example with dapr using the following command:
|
||||
|
||||
```
|
||||
dapr run --app-id=rustapp --dapr-grpc-port 3500 cargo run -- --example client
|
||||
```
|
||||
|
||||
If everything went well you should see the following output along with dapr logs:
|
||||
```
|
||||
Successfully saved!
|
||||
Value is "world"
|
||||
Deleted value: []
|
||||
```
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
syntax = "proto3";
|
||||
package helloworld;
|
||||
|
||||
service Greeter {
|
||||
rpc SayHello (HelloRequest) returns (HelloReply);
|
||||
}
|
||||
|
||||
message HelloRequest {
|
||||
string name = 1;
|
||||
}
|
||||
|
||||
message HelloReply {
|
||||
string message = 1;
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
# Pub/Sub Example
|
||||
|
||||
This is a simple example that demonstrates Dapr's pub/sub capabilities. To implement pub/sub in your rust application, you need to implement `AppCallback` server for subscribing to events. Specifically, the following two methods need to be implemented for pub/sub to work:
|
||||
|
||||
1. `list_topic_subscriptions` - Dapr runtime calls this method to get list of topics the application is subscribed to.
|
||||
2. `on_topic_event` - Defines how the application handles the topic event.
|
||||
|
||||
> **Note:** Make sure to use latest version of proto bindings.
|
||||
|
||||
## Running
|
||||
|
||||
> Before you run the example make sure local redis state store is running by executing:
|
||||
> ```
|
||||
> docker ps
|
||||
> ```
|
||||
|
||||
To run this example:
|
||||
|
||||
1. Start Subscriber (expose gRPC server receiver on port 50051):
|
||||
```bash
|
||||
dapr run --app-id rust-subscriber --app-protocol grpc --app-port 50051 cargo run -- --example subscriber
|
||||
```
|
||||
|
||||
2. Start Publisher:
|
||||
```bash
|
||||
dapr run --app-id python-publisher --app-protocol grpc cargo run -- --example publisher
|
||||
```
|
|
@ -1,48 +0,0 @@
|
|||
use std::{collections::HashMap, thread, time::Duration};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
thread::sleep(Duration::from_secs(2));
|
||||
|
||||
// Get the Dapr port and create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
let addr = format!("https://127.0.0.1:{}", port);
|
||||
|
||||
// Create the client
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
|
||||
// name of the pubsub component
|
||||
let pubsub_name = "pubsub".to_string();
|
||||
|
||||
// content type of the pubsub data
|
||||
let data_content_type = "text/plain".to_string();
|
||||
|
||||
// topic to publish message to
|
||||
let topic = "A".to_string();
|
||||
|
||||
for count in 0..100 {
|
||||
// message metadata
|
||||
let mut metadata = HashMap::<String, String>::new();
|
||||
metadata.insert("count".to_string(), count.to_string());
|
||||
|
||||
// message
|
||||
let message = format!("{} => hello from rust!", &count).into_bytes();
|
||||
|
||||
client
|
||||
.publish_event(
|
||||
&pubsub_name,
|
||||
&topic,
|
||||
&data_content_type,
|
||||
message,
|
||||
Some(metadata),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// sleep for 2 secs to simulate delay b/w two events
|
||||
tokio::time::sleep(Duration::from_secs(2)).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,145 @@
|
|||
# Actor Example
|
||||
|
||||
This example demonstrates the Dapr actor framework. To author an actor,
|
||||
|
||||
1. Create a struc decorated with the `#[dapr::actor]` macro to house your custom actor methods that map to [Axum handlers](https://docs.rs/axum/latest/axum/handler/index.html), use [Axum extractors](https://docs.rs/axum/latest/axum/extract/index.html) to access the incoming request and return an [`impl IntoResponse`](https://docs.rs/axum/latest/axum/response/trait.IntoResponse.html).
|
||||
Use the `DaprJson` extractor to deserialize the request from Json coming from a Dapr sidecar.
|
||||
```rust
|
||||
#[dapr::actor]
|
||||
struct MyActor {
|
||||
id: String,
|
||||
client: ActorContextClient
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct MyRequest {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct MyResponse {
|
||||
pub available: bool,
|
||||
}
|
||||
|
||||
impl MyActor {
|
||||
fn do_stuff(&self, DaprJson(data): DaprJson<MyRequest>) -> Json<MyResponse> {
|
||||
println!("doing stuff with {}", data.name);
|
||||
Json(MyResponse {
|
||||
available: true
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
There are many ways to write your actor method signature, using Axum handlers, but you also have access to the actor instance via `self`. Here is a super simple example:
|
||||
```rust
|
||||
pub async fn method_2(&self) -> impl IntoResponse {
|
||||
StatusCode::OK
|
||||
}
|
||||
```
|
||||
1. Implement the `Actor` trait. This trait exposes the following methods:
|
||||
- `on_activate` - Called when an actor is activated on a host
|
||||
- `on_deactivate` - Called when an actor is deactivated on a host
|
||||
- `on_reminder` - Called when a reminder is received from the Dapr sidecar
|
||||
- `on_timer` - Called when a timer is received from the Dapr sidecar
|
||||
|
||||
|
||||
```rust
|
||||
#[async_trait]
|
||||
impl Actor for MyActor {
|
||||
|
||||
async fn on_activate(&self) -> Result<(), ActorError> {
|
||||
println!("on_activate {}", self.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_deactivate(&self) -> Result<(), ActorError> {
|
||||
println!("on_deactivate");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. An actor host requires an Http server to receive callbacks from the Dapr sidecar. The `DaprHttpServer` object implements this functionality and also encapsulates the actor runtime to service any hosted actors. Use the `register_actor` method to register an actor type to be serviced, this method takes an `ActorTypeRegistration` which specifies
|
||||
- The actor type name (used by Actor clients), and concrete struct
|
||||
- A factory to construct a new instance of that actor type when one is required to be activated by the runtime. The parameters passed to the factory will be the actor type, actor ID, and a Dapr client for managing state, timers and reminders for the actor.
|
||||
- The methods that you would like to expose to external clients.
|
||||
|
||||
```rust
|
||||
let mut dapr_server = dapr::server::DaprHttpServer::new();
|
||||
|
||||
dapr_server.register_actor(ActorTypeRegistration::new::<MyActor>("MyActor",
|
||||
Box::new(|actor_type, id, client| Arc::new(MyActor{
|
||||
actor_type,
|
||||
id,
|
||||
client
|
||||
})))
|
||||
.register_method("do_stuff", MyActor::do_stuff)
|
||||
.register_method("do_other_stuff", MyActor::do_other_stuff));
|
||||
|
||||
dapr_server.start(None).await?;
|
||||
```
|
||||
|
||||
|
||||
## Running
|
||||
|
||||
> Before you run the example make sure local redis state store is running by executing:
|
||||
> ```
|
||||
> docker ps
|
||||
> ```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run this example (using the multi-app run):
|
||||
|
||||
|
||||
<!-- STEP
|
||||
name: Run Multi-App
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- 'dapr::server::actor::runtime] registered actor MyActor'
|
||||
- 'Request for actor_type: MyActor, actor_id: a1'
|
||||
- '== APP - actor-server == on_activate a1'
|
||||
- '== APP - actor-server == doing stuff with test'
|
||||
- '== APP - actor-server == get_actor_state GetActorStateResponse { data: []'
|
||||
- '== APP - actor-client == Response: Ok('
|
||||
- '== APP - actor-client == MyResponse {'
|
||||
- '== APP - actor-client == available: true,'
|
||||
- '== APP - actor-client == },'
|
||||
- '== APP - actor-client == )'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
### What the multi-run app will achieve:
|
||||
|
||||
1. Start actor host (expose Http server receiver on port 50051):
|
||||
```bash
|
||||
dapr run --app-id actor-host --app-protocol http --app-port 50051 cargo run -- --example actors-server
|
||||
```
|
||||
|
||||
2. Start actor client:
|
||||
```bash
|
||||
dapr run --app-id actor-client --dapr-grpc-port 3502 cargo run -- --example actors-client
|
||||
|
||||
```
|
|
@ -0,0 +1,36 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct MyResponse {
|
||||
pub available: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct MyRequest {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
tokio::time::sleep(std::time::Duration::new(2, 0)).await;
|
||||
|
||||
// Define the Dapr address
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
// Create the client
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
|
||||
let data = MyRequest {
|
||||
name: "test".to_string(),
|
||||
};
|
||||
|
||||
let resp: Result<MyResponse, dapr::error::Error> = client
|
||||
.invoke_actor("MyActor", "a1", "do_stuff", data, None)
|
||||
.await;
|
||||
|
||||
println!("Response: {resp:#?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ./resources/
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: actor-server
|
||||
appDirPath: ./
|
||||
appProtocol: http
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "actors-server"]
|
||||
- appID: actor-client
|
||||
appDirPath: ./
|
||||
daprGRPCPort: 3502
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "actors-client"]
|
|
@ -0,0 +1,14 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: statestore
|
||||
spec:
|
||||
type: state.redis
|
||||
version: v1
|
||||
metadata:
|
||||
- name: redisHost
|
||||
value: localhost:6379
|
||||
- name: redisPassword
|
||||
value: ""
|
||||
- name: actorStateStore
|
||||
value: "true"
|
|
@ -0,0 +1,86 @@
|
|||
use async_trait::async_trait;
|
||||
use dapr::server::{
|
||||
actor::{
|
||||
axum::Json, context_client::ActorContextClient, runtime::ActorTypeRegistration, Actor,
|
||||
ActorError,
|
||||
},
|
||||
utils::DaprJson,
|
||||
};
|
||||
use dapr_macros::actor;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{str::from_utf8, sync::Arc};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct MyResponse {
|
||||
pub available: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct MyRequest {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[actor]
|
||||
struct MyActor {
|
||||
id: String,
|
||||
client: ActorContextClient,
|
||||
}
|
||||
|
||||
impl MyActor {
|
||||
async fn do_stuff(&self, DaprJson(req): DaprJson<MyRequest>) -> Json<MyResponse> {
|
||||
println!("doing stuff with {}", req.name);
|
||||
let mut dapr = self.client.clone();
|
||||
let r = dapr.get_actor_state("key1").await.unwrap();
|
||||
println!("get_actor_state {r:?}");
|
||||
Json(MyResponse { available: true })
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Actor for MyActor {
|
||||
async fn on_activate(&self) -> Result<(), ActorError> {
|
||||
println!("on_activate {}", self.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_deactivate(&self) -> Result<(), ActorError> {
|
||||
println!("on_deactivate");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_reminder(&self, reminder_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
println!("on_reminder {} {:?}", reminder_name, from_utf8(&data));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_timer(&self, timer_name: &str, data: Vec<u8>) -> Result<(), ActorError> {
|
||||
println!("on_timer {} {:?}", timer_name, from_utf8(&data));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
let mut dapr_server = dapr::server::DaprHttpServer::new().await;
|
||||
|
||||
dapr_server
|
||||
.register_actor(
|
||||
ActorTypeRegistration::new::<MyActor>(
|
||||
"MyActor",
|
||||
Box::new(|_actor_type, actor_id, context| {
|
||||
Arc::new(MyActor {
|
||||
id: actor_id.to_string(),
|
||||
client: context,
|
||||
})
|
||||
}),
|
||||
)
|
||||
.register_method("do_stuff", MyActor::do_stuff)
|
||||
.register_method("do_stuff2", MyActor::do_stuff),
|
||||
)
|
||||
.await;
|
||||
|
||||
dapr_server.start(None).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
# Input and Output Bindings Example
|
||||
|
||||
This is a simple example that demonstrates Dapr's binding capabilities. To implement input bindings in your rust application, you need to implement `AppCallback` server for subscribing to events. Specifically, the following two methods need to be implemented for input bindings to work:
|
||||
|
||||
1. `list_input_bindings` - Dapr runtime calls this method to get list of bindings the application is subscribed to.
|
||||
2. `on_binding_event` - Defines how the application handles the input binding event.
|
||||
|
||||
> **Note:** Make sure to use latest version of proto bindings.
|
||||
|
||||
In order to have both examples working with the same binding configuration ServiceBus was used here. If you don't have it available you can change to a binding that works for both Input and Output from [this list](https://docs.dapr.io/reference/components-reference/supported-bindings/)
|
||||
|
||||
|
||||
## Running
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2Run a kafka container
|
||||
|
||||
<!-- STEP
|
||||
name: Run kafka instance
|
||||
background: true
|
||||
sleep: 60
|
||||
timeout_seconds: 120
|
||||
expected_return_code:
|
||||
expected_stderr_lines:
|
||||
-->
|
||||
|
||||
```bash
|
||||
docker run -p 9092:9092 apache/kafka:3.7.1
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
3. Run the multi-app run template (`dapr.yaml`)
|
||||
|
||||
<!-- STEP
|
||||
name: Run Multi-app Run
|
||||
output_match_mode: substring
|
||||
match_order: sequential
|
||||
expected_stdout_lines:
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 0 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 1 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 2 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 3 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 4 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 5 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 6 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 7 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 8 => hello from rust!'
|
||||
- '== APP - rust-input-b == Binding Name: binding-example'
|
||||
- '== APP - rust-input-b == Message: 9 => hello from rust!'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
|
@ -0,0 +1,23 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: binding-example
|
||||
spec:
|
||||
type: bindings.kafka
|
||||
metadata:
|
||||
- name: direction
|
||||
value: "input, output"
|
||||
# Kafka broker connection setting
|
||||
- name: brokers
|
||||
value: localhost:9092
|
||||
# consumer configuration: topic and consumer group
|
||||
- name: topics
|
||||
value: sample
|
||||
- name: consumerGroup
|
||||
value: group1
|
||||
# publisher configuration: topic
|
||||
- name: publishTopic
|
||||
value: sample
|
||||
- name: authType
|
||||
value: "none"
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ./components/
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: rust-input-b
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "bindings-input"]
|
||||
- appID: rust-output-b
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "bindings-output"]
|
|
@ -1,8 +1,10 @@
|
|||
use tonic::{transport::Server, Request, Response, Status};
|
||||
|
||||
use dapr::{
|
||||
appcallback::*,
|
||||
dapr::dapr::proto::runtime::v1::app_callback_server::{AppCallback, AppCallbackServer},
|
||||
use dapr::dapr::proto::common::v1::{InvokeRequest, InvokeResponse};
|
||||
use dapr::dapr::proto::runtime::v1::{
|
||||
app_callback_server::{AppCallback, AppCallbackServer},
|
||||
BindingEventRequest, BindingEventResponse, ListInputBindingsResponse,
|
||||
ListTopicSubscriptionsResponse, TopicEventRequest, TopicEventResponse,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -19,51 +21,50 @@ impl AppCallback for AppCallbackService {
|
|||
}
|
||||
|
||||
/// Lists all topics subscribed by this app.
|
||||
///
|
||||
/// NOTE: Dapr runtime will call this method to get
|
||||
/// the list of topics the app wants to subscribe to.
|
||||
/// In this example, the app is subscribing to topic `A`.
|
||||
async fn list_topic_subscriptions(
|
||||
&self,
|
||||
_request: Request<()>,
|
||||
) -> Result<Response<ListTopicSubscriptionsResponse>, Status> {
|
||||
let topic = "A".to_string();
|
||||
let pubsub_name = "pubsub".to_string();
|
||||
|
||||
let list_subscriptions = ListTopicSubscriptionsResponse::topic(pubsub_name, topic);
|
||||
|
||||
Ok(Response::new(list_subscriptions))
|
||||
Ok(Response::new(ListTopicSubscriptionsResponse::default()))
|
||||
}
|
||||
|
||||
/// Subscribes events from Pubsub.
|
||||
async fn on_topic_event(
|
||||
&self,
|
||||
request: Request<TopicEventRequest>,
|
||||
_request: Request<TopicEventRequest>,
|
||||
) -> Result<Response<TopicEventResponse>, Status> {
|
||||
let r = request.into_inner();
|
||||
let data = &r.data;
|
||||
let data_content_type = &r.data_content_type;
|
||||
|
||||
let message = String::from_utf8_lossy(&data);
|
||||
println!("Message: {}", &message);
|
||||
println!("Content-Type: {}", &data_content_type);
|
||||
|
||||
Ok(Response::new(TopicEventResponse::default()))
|
||||
}
|
||||
|
||||
/// Lists all input bindings subscribed by this app.
|
||||
/// NOTE: Dapr runtime will call this method to get
|
||||
/// the list of bindings the app wants to subscribe to.
|
||||
/// In this example, the app is subscribing to a local pubsub binding named "binding-example"
|
||||
|
||||
async fn list_input_bindings(
|
||||
&self,
|
||||
_request: Request<()>,
|
||||
) -> Result<Response<ListInputBindingsResponse>, Status> {
|
||||
Ok(Response::new(ListInputBindingsResponse::default()))
|
||||
let list_bindings = ListInputBindingsResponse {
|
||||
bindings: vec![String::from("binding-example")],
|
||||
};
|
||||
|
||||
Ok(Response::new(list_bindings))
|
||||
}
|
||||
|
||||
/// Listens events from the input bindings.
|
||||
async fn on_binding_event(
|
||||
&self,
|
||||
_request: Request<BindingEventRequest>,
|
||||
request: Request<BindingEventRequest>,
|
||||
) -> Result<Response<BindingEventResponse>, Status> {
|
||||
let r = request.into_inner();
|
||||
let name = &r.name;
|
||||
let data = &r.data;
|
||||
|
||||
let message = String::from_utf8_lossy(data);
|
||||
println!("Binding Name: {}", &name);
|
||||
println!("Message: {}", &message);
|
||||
|
||||
Ok(Response::new(BindingEventResponse::default()))
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +75,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
|
||||
let callback_service = AppCallbackService::default();
|
||||
|
||||
println!("AppCallback server listening on: {}", addr);
|
||||
println!("AppCallback server listening on: {addr}");
|
||||
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
|
@ -0,0 +1,35 @@
|
|||
use std::{collections::HashMap, time::Duration};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
tokio::time::sleep(Duration::from_secs(2)).await;
|
||||
|
||||
// Get the Dapr port and create a connection
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
// Create the client
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
|
||||
// name of the component
|
||||
let binding_name = "binding-example";
|
||||
|
||||
for count in 0..10 {
|
||||
// message metadata
|
||||
let mut metadata = HashMap::<String, String>::new();
|
||||
metadata.insert("count".to_string(), count.to_string());
|
||||
|
||||
// message
|
||||
let message = format!("{} => hello from rust!", &count).into_bytes();
|
||||
|
||||
client
|
||||
.invoke_binding(binding_name, message, "create", Some(metadata))
|
||||
.await?;
|
||||
|
||||
// sleep for 500ms to simulate delay b/w two events
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
Before you run the example make sure local redis state store is running by executing:
|
||||
```
|
||||
docker ps
|
||||
```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Run client example
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- '== APP == Successfully saved!'
|
||||
- '== APP == Value is "world"'
|
||||
- '== APP == Deleted value: []'
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run --app-id=rustapp --dapr-grpc-port 3500 --resources-path ./resources cargo run -- --example client
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
If everything went well you should see the following output along with dapr logs:
|
||||
```
|
||||
Successfully saved!
|
||||
Value is "world"
|
||||
Deleted value: []
|
||||
```
|
||||
|
|
@ -2,11 +2,10 @@
|
|||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
std::thread::sleep(std::time::Duration::new(2, 0));
|
||||
tokio::time::sleep(std::time::Duration::new(2, 0)).await;
|
||||
|
||||
// Get the Dapr port and create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
let addr = format!("https://127.0.0.1:{}", port);
|
||||
// Set the Dapr address
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
// Create the client
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
|
@ -18,7 +17,9 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
let store_name = String::from("statestore");
|
||||
|
||||
// save key-value pair in the state store
|
||||
client.save_state(store_name, vec![(key, val)]).await?;
|
||||
client
|
||||
.save_state(store_name, key, val, None, None, None)
|
||||
.await?;
|
||||
|
||||
println!("Successfully saved!");
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: statestore
|
||||
spec:
|
||||
type: state.redis
|
||||
version: v1
|
||||
metadata:
|
||||
- name: redisHost
|
||||
value: localhost:6379
|
||||
- name: redisPassword
|
||||
value: ""
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: configstore
|
||||
spec:
|
||||
type: configuration.redis
|
||||
version: v1
|
||||
metadata:
|
||||
- name: redisHost
|
||||
value: localhost:6379
|
||||
- name: redisPassword
|
||||
value: ""
|
|
@ -0,0 +1,88 @@
|
|||
Before you run the example make sure local redis state store is running by executing:
|
||||
```bash
|
||||
docker ps
|
||||
```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Insert a key with the value `hello` to redis using the following command:
|
||||
|
||||
|
||||
<!-- STEP
|
||||
name: Insert test configuration item
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- 'OK'
|
||||
background: false
|
||||
sleep: 5
|
||||
timeout_seconds: 5
|
||||
-->
|
||||
|
||||
```bash
|
||||
docker exec dapr_redis redis-cli MSET hello "world"
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
3. Run the example with dapr using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Run configuration app
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- '== APP == Configuration value: ConfigurationItem { value: "world"'
|
||||
- '== APP == App subscribed to config changes with subscription id:'
|
||||
- '== APP == Configuration value: {"hello": ConfigurationItem { value: "world2"'
|
||||
- '== APP == App unsubscribed from config changes'
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run --app-id=rustapp --resources-path ../components --dapr-grpc-port 3500 -- cargo run --example configuration
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Change the value of the key `hello` in redis using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Update test configuration item
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- 'OK'
|
||||
background: true
|
||||
sleep: 5
|
||||
timeout_seconds: 5
|
||||
-->
|
||||
|
||||
```bash
|
||||
docker exec dapr_redis redis-cli MSET hello "world2"
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
|
||||
If everything went well you should see the following output along with dapr logs:
|
||||
```
|
||||
Configuration value: ConfigurationItem { value: "world", version: "", metadata: {} }
|
||||
App subscribed to config changes with subscription id: "d383169a-0893-4c64-adde-fc3145b56d07"
|
||||
Configuration value: {"hello": ConfigurationItem { value: "world2", version: "", metadata: {} }}
|
||||
App unsubscribed from config changes
|
||||
```
|
||||
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
use tokio_stream::StreamExt;
|
||||
|
||||
const CONFIGSTORE_NAME: &str = "configstore";
|
||||
type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// TODO: Handle this issue in the sdk
|
||||
// Introduce delay so that dapr grpc port is assigned before app tries to connect
|
||||
tokio::time::sleep(std::time::Duration::new(2, 0)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
// Create the client
|
||||
let mut client = DaprClient::connect(addr).await?;
|
||||
|
||||
let key = String::from("hello");
|
||||
|
||||
// get key-value pair in the state store
|
||||
let response = client
|
||||
.get_configuration(CONFIGSTORE_NAME, vec![&key], None)
|
||||
.await?;
|
||||
let val = response.items.get("hello").unwrap();
|
||||
println!("Configuration value: {val:?}");
|
||||
|
||||
// Subscribe for configuration changes
|
||||
let mut stream = client
|
||||
.subscribe_configuration(CONFIGSTORE_NAME, vec![&key], None)
|
||||
.await?;
|
||||
|
||||
let mut subscription_id = String::new();
|
||||
while let Some(result) = stream.next().await {
|
||||
let subscribe = result.unwrap();
|
||||
if subscribe.items.is_empty() {
|
||||
// Update the subscription_id
|
||||
subscription_id = subscribe.id.clone();
|
||||
println!("App subscribed to config changes with subscription id: {subscription_id:?} ");
|
||||
continue;
|
||||
}
|
||||
println!("Configuration value: {:?}", subscribe.items);
|
||||
unsubscribe(&mut client, &subscription_id).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Function to unsubscribe from configuration updates and exit the app
|
||||
async fn unsubscribe(client: &mut DaprClient, subscription_id: &str) {
|
||||
match client
|
||||
.unsubscribe_configuration(CONFIGSTORE_NAME, subscription_id)
|
||||
.await
|
||||
{
|
||||
Ok(_) => println!("App unsubscribed from config changes"),
|
||||
Err(e) => println!("Error unsubscribing from config updates: {e}"),
|
||||
}
|
||||
std::process::exit(0);
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
# Dapr Conversation Example with the Rust-SDK
|
||||
|
||||
This example uses the echo component to send a request and the component response will be the exact message received.
|
||||
|
||||
## Step
|
||||
|
||||
### Prepare
|
||||
|
||||
- Dapr installed
|
||||
|
||||
### Run Conversation Example
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example using the Dapr CLI
|
||||
|
||||
<!-- STEP
|
||||
name: Run Conversation
|
||||
output_match_mode: substring
|
||||
expected_stdout_lines:
|
||||
- 'conversation input: "hello world"'
|
||||
- 'conversation output: "hello world"'
|
||||
|
||||
background: true
|
||||
sleep: 15
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run --app-id=conversation --resources-path ./config --dapr-grpc-port 3500 -- cargo run --example conversation
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
## Result
|
||||
|
||||
```
|
||||
- 'conversation input: hello world'
|
||||
- 'conversation output: hello world'
|
||||
```
|
|
@ -0,0 +1,7 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: echo
|
||||
spec:
|
||||
type: conversation.echo
|
||||
version: v1
|
|
@ -0,0 +1,30 @@
|
|||
use dapr::client::{ConversationInputBuilder, ConversationRequestBuilder};
|
||||
use std::time::Duration;
|
||||
|
||||
type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Sleep to allow for the server to become available
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let address = "https://127.0.0.1".to_string();
|
||||
let port = "3500".to_string();
|
||||
|
||||
let mut client = DaprClient::connect_with_port(address, port).await?;
|
||||
|
||||
let input = ConversationInputBuilder::new("hello world").build();
|
||||
|
||||
let conversation_component = "echo";
|
||||
|
||||
let request =
|
||||
ConversationRequestBuilder::new(conversation_component, vec![input.clone()]).build();
|
||||
|
||||
println!("conversation input: {:?}", input.content);
|
||||
|
||||
let response = client.converse_alpha1(request).await?;
|
||||
|
||||
println!("conversation output: {:?}", response.outputs[0].result);
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
# Crypto Example
|
||||
|
||||
This is a simple example that demonstrates Dapr's Cryptography capabilities.
|
||||
|
||||
> **Note:** Make sure to use latest version of proto bindings.
|
||||
|
||||
## Running
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Generate keys in examples/crypto/keys directory:
|
||||
|
||||
<!-- STEP
|
||||
name: Generate keys
|
||||
background: false
|
||||
sleep: 5
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
mkdir -p keys
|
||||
# Generate a private RSA key, 4096-bit keys
|
||||
openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:4096 -out keys/rsa-private-key.pem
|
||||
# Generate a 256-bit key for AES
|
||||
openssl rand -out keys/symmetric-key-256 32
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
3. Run the multi-app run template:
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi-app
|
||||
output_match_mode: substring
|
||||
match_order: none
|
||||
expected_stdout_lines:
|
||||
- '== APP - crypto-example == Successfully Decrypted String'
|
||||
- '== APP - crypto-example == Successfully Decrypted Image'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
4. Stop with `ctrl + c`
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Component
|
||||
metadata:
|
||||
name: localstorage
|
||||
spec:
|
||||
type: crypto.dapr.localstorage
|
||||
version: v1
|
||||
metadata:
|
||||
- name: path
|
||||
# Path is relative to the folder where the example is located
|
||||
value: ./keys
|
|
@ -0,0 +1,10 @@
|
|||
version: 1
|
||||
common:
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: crypto-example
|
||||
appDirPath: ./
|
||||
daprGRPCPort: 35002
|
||||
logLevel: debug
|
||||
command: [ "cargo", "run", "--example", "crypto" ]
|
||||
resourcesPath: ./components
|
Binary file not shown.
After Width: | Height: | Size: 3.9 KiB |
|
@ -0,0 +1,80 @@
|
|||
use std::fs;
|
||||
|
||||
use tokio::fs::File;
|
||||
use tokio::time::sleep;
|
||||
|
||||
use dapr::client::ReaderStream;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
sleep(std::time::Duration::new(2, 0)).await;
|
||||
let addr = "https://127.0.0.1".to_string();
|
||||
|
||||
let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?;
|
||||
|
||||
let encrypted = client
|
||||
.encrypt(
|
||||
ReaderStream::new("Test".as_bytes()),
|
||||
dapr::client::EncryptRequestOptions {
|
||||
component_name: "localstorage".to_string(),
|
||||
key_name: "rsa-private-key.pem".to_string(),
|
||||
key_wrap_algorithm: "RSA".to_string(),
|
||||
data_encryption_cipher: "aes-gcm".to_string(),
|
||||
omit_decryption_key_name: false,
|
||||
decryption_key_name: "rsa-private-key.pem".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let decrypted = client
|
||||
.decrypt(
|
||||
encrypted,
|
||||
dapr::client::DecryptRequestOptions {
|
||||
component_name: "localstorage".to_string(),
|
||||
key_name: "rsa-private-key.pem".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(String::from_utf8(decrypted).unwrap().as_str(), "Test");
|
||||
|
||||
println!("Successfully Decrypted String");
|
||||
|
||||
let image = File::open("./image.png").await.unwrap();
|
||||
|
||||
let encrypted = client
|
||||
.encrypt(
|
||||
ReaderStream::new(image),
|
||||
dapr::client::EncryptRequestOptions {
|
||||
component_name: "localstorage".to_string(),
|
||||
key_name: "rsa-private-key.pem".to_string(),
|
||||
key_wrap_algorithm: "RSA".to_string(),
|
||||
data_encryption_cipher: "aes-gcm".to_string(),
|
||||
omit_decryption_key_name: false,
|
||||
decryption_key_name: "rsa-private-key.pem".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let decrypted = client
|
||||
.decrypt(
|
||||
encrypted,
|
||||
dapr::client::DecryptRequestOptions {
|
||||
component_name: "localstorage".to_string(),
|
||||
key_name: "rsa-private-key.pem".to_string(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let image = fs::read("./image.png").unwrap();
|
||||
|
||||
assert_eq!(decrypted, image);
|
||||
|
||||
println!("Successfully Decrypted Image");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
Before you run the example make sure local redis state store is running by executing:
|
||||
```
|
||||
docker ps
|
||||
```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Run Multi-app
|
||||
output_match_mode: substring
|
||||
match_order: none
|
||||
expected_stdout_lines:
|
||||
- '== APP - invoke-grpc-server == AppCallback server listening on: [::]:50051'
|
||||
- '== APP - invoke-grpc-client == Response: HelloReply {'
|
||||
- '== APP - invoke-grpc-client == message: "Hello Test!",'
|
||||
- '== APP - invoke-grpc-client == }'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
What the multi-run step effectively runs for you:
|
||||
1. Runs the invoke-grpc-server:
|
||||
```bash
|
||||
dapr run --app-id=invoke-grpc-server --app-protocol grpc --app-port 50051 -- cargo run --example invoke-grpc-proxying-server
|
||||
```
|
||||
|
||||
2. Runs the invoke-grpc-client:
|
||||
```bash
|
||||
dapr run --app-id=invoke-grpc-client -- cargo run --example invoke-grpc-proxying-client
|
||||
```
|
||||
|
||||
If everything went well you should see the following output along with dapr logs:
|
||||
```
|
||||
Response: HelloReply {
|
||||
message: "Hello Test!",
|
||||
}
|
||||
```
|
|
@ -0,0 +1,37 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use hello_world::{greeter_client::GreeterClient, HelloRequest};
|
||||
|
||||
use tonic::metadata::MetadataValue;
|
||||
|
||||
pub mod hello_world {
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Sleep to allow for the server to become available
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Get the Dapr port and create a connection
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT").unwrap().parse().unwrap();
|
||||
let address = format!("https://127.0.0.1:{port}");
|
||||
|
||||
let mut client = GreeterClient::connect(address).await?;
|
||||
|
||||
let request = HelloRequest {
|
||||
name: "Test".to_string(),
|
||||
};
|
||||
let mut request = tonic::Request::new(request);
|
||||
request.metadata_mut().append(
|
||||
"dapr-app-id",
|
||||
MetadataValue::from_static("invoke-grpc-server"),
|
||||
);
|
||||
|
||||
let response = client.say_hello(request).await.unwrap();
|
||||
let hello_reply = response.into_inner();
|
||||
|
||||
println!("Response: {hello_reply:#?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ./resources/
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: invoke-grpc-server
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "invoke-grpc-proxying-server"]
|
||||
- appID: invoke-grpc-client
|
||||
appDirPath: ./
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "invoke-grpc-proxying-client"]
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Resiliency
|
||||
metadata:
|
||||
name: myresiliency
|
||||
spec:
|
||||
policies:
|
||||
retries:
|
||||
DaprBuiltInInitializationRetries:
|
||||
policy: constant
|
||||
maxInterval: 5s
|
||||
maxRetries: 10
|
|
@ -0,0 +1,44 @@
|
|||
use crate::hello_world::greeter_server::{Greeter, GreeterServer};
|
||||
use crate::hello_world::{HelloReply, HelloRequest};
|
||||
use tonic::{transport::Server, Request, Response, Status};
|
||||
|
||||
pub mod hello_world {
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GreeterService {}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl Greeter for GreeterService {
|
||||
async fn say_hello(
|
||||
&self,
|
||||
request: Request<HelloRequest>,
|
||||
) -> Result<Response<HelloReply>, Status> {
|
||||
let req = request.into_inner();
|
||||
|
||||
let name = req.name;
|
||||
|
||||
let response = HelloReply {
|
||||
message: format!("Hello {name}!"),
|
||||
};
|
||||
|
||||
Ok(Response::new(response))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let server_address = "[::]:50051".parse().unwrap();
|
||||
|
||||
let greeter_service = GreeterService::default();
|
||||
|
||||
println!("AppCallback server listening on: {server_address}");
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
||||
.add_service(GreeterServer::new(greeter_service))
|
||||
.serve(server_address)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
Before you run the example make sure local redis state store is running by executing:
|
||||
```
|
||||
docker ps
|
||||
```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the example with dapr using the following command to start the multi-app run:
|
||||
|
||||
<!-- STEP
|
||||
name: Run Multi-app
|
||||
output_match_mode: substring
|
||||
match_order: sequential
|
||||
expected_stdout_lines:
|
||||
- '== APP - invoke-grpc-server == Method: say_hello'
|
||||
- '== APP - invoke-grpc-server == Name: "Test"'
|
||||
- '== APP - invoke-grpc-client == Message: "Hello World!"'
|
||||
- '== APP - invoke-grpc-client == Response: InvokeResponse {'
|
||||
- '== APP - invoke-grpc-client == data: Some('
|
||||
- '== APP - invoke-grpc-client == Any {'
|
||||
- '== APP - invoke-grpc-client == type_url: "",'
|
||||
- '== APP - invoke-grpc-client == value: ['
|
||||
- '== APP - invoke-grpc-client == 10,'
|
||||
- '== APP - invoke-grpc-client == 12,'
|
||||
- '== APP - invoke-grpc-client == 72,'
|
||||
- '== APP - invoke-grpc-client == 101,'
|
||||
- '== APP - invoke-grpc-client == 108,'
|
||||
- '== APP - invoke-grpc-client == 108,'
|
||||
- '== APP - invoke-grpc-client == 111,'
|
||||
- '== APP - invoke-grpc-client == 32,'
|
||||
- '== APP - invoke-grpc-client == 87,'
|
||||
- '== APP - invoke-grpc-client == 111,'
|
||||
- '== APP - invoke-grpc-client == 114,'
|
||||
- '== APP - invoke-grpc-client == 108,'
|
||||
- '== APP - invoke-grpc-client == 100,'
|
||||
- '== APP - invoke-grpc-client == 33,'
|
||||
- '== APP - invoke-grpc-client == ],'
|
||||
- '== APP - invoke-grpc-client == },'
|
||||
- '== APP - invoke-grpc-client == ),'
|
||||
- '== APP - invoke-grpc-client == content_type: "application/json",'
|
||||
- '== APP - invoke-grpc-client == }'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
== APP - invoke-grpc-server == Method: say_hello
|
||||
== APP - invoke-grpc-server == Name: "Test"
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
The multi-app run is the equivalent of running:
|
||||
1. The server application with dapr
|
||||
```bash
|
||||
dapr run --app-id=invoke-grpc-server --app-protocol grpc --app-port 50051 -- cargo run --example invoke-grpc-server
|
||||
```
|
||||
|
||||
2. The client application
|
||||
```bash
|
||||
dapr run --app-id=invoke-grpc-client -- cargo run --example invoke-grpc-client
|
||||
```
|
||||
|
||||
If everything went well you should see the following output along with dapr logs:
|
||||
```
|
||||
Message: "Hello World!"
|
||||
Response: InvokeResponse {
|
||||
data: Some(
|
||||
Any {
|
||||
type_url: "",
|
||||
value: [
|
||||
10,
|
||||
12,
|
||||
72,
|
||||
101,
|
||||
108,
|
||||
108,
|
||||
111,
|
||||
32,
|
||||
87,
|
||||
111,
|
||||
114,
|
||||
108,
|
||||
100,
|
||||
33,
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type: "application/json",
|
||||
}
|
||||
```
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
use crate::hello_world::HelloReply;
|
||||
use std::time::Duration;
|
||||
|
||||
use prost::Message;
|
||||
|
||||
pub mod hello_world {
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Sleep to allow for the server to become available
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Set the Dapr address
|
||||
let address = "https://127.0.0.1".to_string();
|
||||
|
||||
let mut client = DaprClient::connect(address).await?;
|
||||
|
||||
let request = hello_world::HelloRequest {
|
||||
name: "Test".to_string(),
|
||||
};
|
||||
let data = request.encode_to_vec();
|
||||
let data = prost_types::Any {
|
||||
type_url: "".to_string(),
|
||||
value: data,
|
||||
};
|
||||
|
||||
let response = client
|
||||
.invoke_service("invoke-grpc-server", "say_hello", Some(data))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
if let Some(any) = &response.data {
|
||||
let data = &any.value;
|
||||
let resp = HelloReply::decode(&data[..]).unwrap();
|
||||
println!("Message: {:#?}", &resp.message);
|
||||
};
|
||||
|
||||
println!("Response: {response:#?}");
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ./resources/
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: invoke-grpc-server
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "invoke-grpc-server"]
|
||||
- appID: invoke-grpc-client
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "invoke-grpc-client"]
|
|
@ -0,0 +1,11 @@
|
|||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Resiliency
|
||||
metadata:
|
||||
name: myresiliency
|
||||
spec:
|
||||
policies:
|
||||
retries:
|
||||
DaprBuiltInInitializationRetries:
|
||||
policy: constant
|
||||
maxInterval: 5s
|
||||
maxRetries: 10
|
|
@ -0,0 +1,108 @@
|
|||
use dapr::{
|
||||
appcallback::*,
|
||||
dapr::proto::runtime::v1::app_callback_server::{AppCallback, AppCallbackServer},
|
||||
};
|
||||
use tonic::{transport::Server, Request, Response, Status};
|
||||
|
||||
use prost::Message;
|
||||
|
||||
use hello_world::{HelloReply, HelloRequest};
|
||||
|
||||
pub mod hello_world {
|
||||
include!("../protos/helloworld.rs");
|
||||
}
|
||||
|
||||
pub struct AppCallbackService {}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl AppCallback for AppCallbackService {
|
||||
/// Invokes service method with InvokeRequest.
|
||||
async fn on_invoke(
|
||||
&self,
|
||||
request: Request<InvokeRequest>,
|
||||
) -> Result<Response<InvokeResponse>, Status> {
|
||||
let r = request.into_inner();
|
||||
|
||||
let method = &r.method;
|
||||
println!("Method: {method}");
|
||||
let data = &r.data;
|
||||
|
||||
if let Some(any) = data {
|
||||
let data = &any.value;
|
||||
let resp = HelloRequest::decode(&data[..]).unwrap();
|
||||
println!("Name: {:#?}", &resp.name);
|
||||
|
||||
let response = HelloReply {
|
||||
message: "Hello World!".to_string(),
|
||||
};
|
||||
let data = response.encode_to_vec();
|
||||
|
||||
let data = prost_types::Any {
|
||||
type_url: "".to_string(),
|
||||
value: data,
|
||||
};
|
||||
|
||||
let invoke_response = InvokeResponse {
|
||||
content_type: "application/json".to_string(),
|
||||
data: Some(data),
|
||||
};
|
||||
|
||||
return Ok(Response::new(invoke_response));
|
||||
};
|
||||
|
||||
Ok(Response::new(InvokeResponse::default()))
|
||||
}
|
||||
|
||||
/// Lists all topics subscribed by this app.
|
||||
///
|
||||
/// NOTE: Dapr runtime will call this method to get
|
||||
/// the list of topics the app wants to subscribe to.
|
||||
/// In this example, the app is subscribing to topic `A`.
|
||||
async fn list_topic_subscriptions(
|
||||
&self,
|
||||
_request: Request<()>,
|
||||
) -> Result<Response<ListTopicSubscriptionsResponse>, Status> {
|
||||
let list_subscriptions = ListTopicSubscriptionsResponse::default();
|
||||
Ok(Response::new(list_subscriptions))
|
||||
}
|
||||
|
||||
/// Subscribes events from Pubsub.
|
||||
async fn on_topic_event(
|
||||
&self,
|
||||
_request: Request<TopicEventRequest>,
|
||||
) -> Result<Response<TopicEventResponse>, Status> {
|
||||
Ok(Response::new(TopicEventResponse::default()))
|
||||
}
|
||||
|
||||
/// Lists all input bindings subscribed by this app.
|
||||
async fn list_input_bindings(
|
||||
&self,
|
||||
_request: Request<()>,
|
||||
) -> Result<Response<ListInputBindingsResponse>, Status> {
|
||||
Ok(Response::new(ListInputBindingsResponse::default()))
|
||||
}
|
||||
|
||||
/// Listens events from the input bindings.
|
||||
async fn on_binding_event(
|
||||
&self,
|
||||
_request: Request<BindingEventRequest>,
|
||||
) -> Result<Response<BindingEventResponse>, Status> {
|
||||
Ok(Response::new(BindingEventResponse::default()))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let server_address = "[::]:50051".parse().unwrap();
|
||||
|
||||
let callback_service = AppCallbackService {};
|
||||
|
||||
println!("AppCallback server listening on: {server_address}");
|
||||
// Create a gRPC server with the callback_service.
|
||||
Server::builder()
|
||||
.add_service(AppCallbackServer::new(callback_service))
|
||||
.serve(server_address)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,300 @@
|
|||
// This file is @generated by prost-build.
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HelloRequest {
|
||||
#[prost(string, tag = "1")]
|
||||
pub name: ::prost::alloc::string::String,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct HelloReply {
|
||||
#[prost(string, tag = "1")]
|
||||
pub message: ::prost::alloc::string::String,
|
||||
}
|
||||
/// Generated client implementations.
|
||||
pub mod greeter_client {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value,
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
use tonic::codegen::http::Uri;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GreeterClient<T> {
|
||||
inner: tonic::client::Grpc<T>,
|
||||
}
|
||||
impl GreeterClient<tonic::transport::Channel> {
|
||||
/// Attempt to create a new client by connecting to a given endpoint.
|
||||
pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>
|
||||
where
|
||||
D: TryInto<tonic::transport::Endpoint>,
|
||||
D::Error: Into<StdError>,
|
||||
{
|
||||
let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
|
||||
Ok(Self::new(conn))
|
||||
}
|
||||
}
|
||||
impl<T> GreeterClient<T>
|
||||
where
|
||||
T: tonic::client::GrpcService<tonic::body::BoxBody>,
|
||||
T::Error: Into<StdError>,
|
||||
T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
|
||||
<T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
|
||||
{
|
||||
pub fn new(inner: T) -> Self {
|
||||
let inner = tonic::client::Grpc::new(inner);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_origin(inner: T, origin: Uri) -> Self {
|
||||
let inner = tonic::client::Grpc::with_origin(inner, origin);
|
||||
Self { inner }
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> GreeterClient<InterceptedService<T, F>>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
T::ResponseBody: Default,
|
||||
T: tonic::codegen::Service<
|
||||
http::Request<tonic::body::BoxBody>,
|
||||
Response = http::Response<
|
||||
<T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,
|
||||
>,
|
||||
>,
|
||||
<T as tonic::codegen::Service<
|
||||
http::Request<tonic::body::BoxBody>,
|
||||
>>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
|
||||
{
|
||||
GreeterClient::new(InterceptedService::new(inner, interceptor))
|
||||
}
|
||||
/// Compress requests with the given encoding.
|
||||
///
|
||||
/// This requires the server to support it otherwise it might respond with an
|
||||
/// error.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.send_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Enable decompressing responses.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.inner = self.inner.accept_compressed(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_decoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.inner = self.inner.max_encoding_message_size(limit);
|
||||
self
|
||||
}
|
||||
pub async fn say_hello(
|
||||
&mut self,
|
||||
request: impl tonic::IntoRequest<super::HelloRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::HelloReply>, tonic::Status> {
|
||||
self.inner
|
||||
.ready()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tonic::Status::unknown(
|
||||
format!("Service was not ready: {}", e.into()),
|
||||
)
|
||||
})?;
|
||||
let codec = tonic::codec::ProstCodec::default();
|
||||
let path = http::uri::PathAndQuery::from_static(
|
||||
"/helloworld.Greeter/SayHello",
|
||||
);
|
||||
let mut req = request.into_request();
|
||||
req.extensions_mut()
|
||||
.insert(GrpcMethod::new("helloworld.Greeter", "SayHello"));
|
||||
self.inner.unary(req, path, codec).await
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated server implementations.
|
||||
pub mod greeter_server {
|
||||
#![allow(
|
||||
unused_variables,
|
||||
dead_code,
|
||||
missing_docs,
|
||||
clippy::wildcard_imports,
|
||||
clippy::let_unit_value,
|
||||
)]
|
||||
use tonic::codegen::*;
|
||||
/// Generated trait containing gRPC methods that should be implemented for use with GreeterServer.
|
||||
#[async_trait]
|
||||
pub trait Greeter: std::marker::Send + std::marker::Sync + 'static {
|
||||
async fn say_hello(
|
||||
&self,
|
||||
request: tonic::Request<super::HelloRequest>,
|
||||
) -> std::result::Result<tonic::Response<super::HelloReply>, tonic::Status>;
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub struct GreeterServer<T> {
|
||||
inner: Arc<T>,
|
||||
accept_compression_encodings: EnabledCompressionEncodings,
|
||||
send_compression_encodings: EnabledCompressionEncodings,
|
||||
max_decoding_message_size: Option<usize>,
|
||||
max_encoding_message_size: Option<usize>,
|
||||
}
|
||||
impl<T> GreeterServer<T> {
|
||||
pub fn new(inner: T) -> Self {
|
||||
Self::from_arc(Arc::new(inner))
|
||||
}
|
||||
pub fn from_arc(inner: Arc<T>) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: Default::default(),
|
||||
send_compression_encodings: Default::default(),
|
||||
max_decoding_message_size: None,
|
||||
max_encoding_message_size: None,
|
||||
}
|
||||
}
|
||||
pub fn with_interceptor<F>(
|
||||
inner: T,
|
||||
interceptor: F,
|
||||
) -> InterceptedService<Self, F>
|
||||
where
|
||||
F: tonic::service::Interceptor,
|
||||
{
|
||||
InterceptedService::new(Self::new(inner), interceptor)
|
||||
}
|
||||
/// Enable decompressing requests with the given encoding.
|
||||
#[must_use]
|
||||
pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.accept_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Compress responses with the given encoding, if the client supports it.
|
||||
#[must_use]
|
||||
pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
|
||||
self.send_compression_encodings.enable(encoding);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of a decoded message.
|
||||
///
|
||||
/// Default: `4MB`
|
||||
#[must_use]
|
||||
pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_decoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
/// Limits the maximum size of an encoded message.
|
||||
///
|
||||
/// Default: `usize::MAX`
|
||||
#[must_use]
|
||||
pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
|
||||
self.max_encoding_message_size = Some(limit);
|
||||
self
|
||||
}
|
||||
}
|
||||
impl<T, B> tonic::codegen::Service<http::Request<B>> for GreeterServer<T>
|
||||
where
|
||||
T: Greeter,
|
||||
B: Body + std::marker::Send + 'static,
|
||||
B::Error: Into<StdError> + std::marker::Send + 'static,
|
||||
{
|
||||
type Response = http::Response<tonic::body::BoxBody>;
|
||||
type Error = std::convert::Infallible;
|
||||
type Future = BoxFuture<Self::Response, Self::Error>;
|
||||
fn poll_ready(
|
||||
&mut self,
|
||||
_cx: &mut Context<'_>,
|
||||
) -> Poll<std::result::Result<(), Self::Error>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
fn call(&mut self, req: http::Request<B>) -> Self::Future {
|
||||
match req.uri().path() {
|
||||
"/helloworld.Greeter/SayHello" => {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct SayHelloSvc<T: Greeter>(pub Arc<T>);
|
||||
impl<T: Greeter> tonic::server::UnaryService<super::HelloRequest>
|
||||
for SayHelloSvc<T> {
|
||||
type Response = super::HelloReply;
|
||||
type Future = BoxFuture<
|
||||
tonic::Response<Self::Response>,
|
||||
tonic::Status,
|
||||
>;
|
||||
fn call(
|
||||
&mut self,
|
||||
request: tonic::Request<super::HelloRequest>,
|
||||
) -> Self::Future {
|
||||
let inner = Arc::clone(&self.0);
|
||||
let fut = async move {
|
||||
<T as Greeter>::say_hello(&inner, request).await
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
}
|
||||
let accept_compression_encodings = self.accept_compression_encodings;
|
||||
let send_compression_encodings = self.send_compression_encodings;
|
||||
let max_decoding_message_size = self.max_decoding_message_size;
|
||||
let max_encoding_message_size = self.max_encoding_message_size;
|
||||
let inner = self.inner.clone();
|
||||
let fut = async move {
|
||||
let method = SayHelloSvc(inner);
|
||||
let codec = tonic::codec::ProstCodec::default();
|
||||
let mut grpc = tonic::server::Grpc::new(codec)
|
||||
.apply_compression_config(
|
||||
accept_compression_encodings,
|
||||
send_compression_encodings,
|
||||
)
|
||||
.apply_max_message_size_config(
|
||||
max_decoding_message_size,
|
||||
max_encoding_message_size,
|
||||
);
|
||||
let res = grpc.unary(method, req).await;
|
||||
Ok(res)
|
||||
};
|
||||
Box::pin(fut)
|
||||
}
|
||||
_ => {
|
||||
Box::pin(async move {
|
||||
let mut response = http::Response::new(empty_body());
|
||||
let headers = response.headers_mut();
|
||||
headers
|
||||
.insert(
|
||||
tonic::Status::GRPC_STATUS,
|
||||
(tonic::Code::Unimplemented as i32).into(),
|
||||
);
|
||||
headers
|
||||
.insert(
|
||||
http::header::CONTENT_TYPE,
|
||||
tonic::metadata::GRPC_CONTENT_TYPE,
|
||||
);
|
||||
Ok(response)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> Clone for GreeterServer<T> {
|
||||
fn clone(&self) -> Self {
|
||||
let inner = self.inner.clone();
|
||||
Self {
|
||||
inner,
|
||||
accept_compression_encodings: self.accept_compression_encodings,
|
||||
send_compression_encodings: self.send_compression_encodings,
|
||||
max_decoding_message_size: self.max_decoding_message_size,
|
||||
max_encoding_message_size: self.max_encoding_message_size,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generated gRPC service name
|
||||
pub const SERVICE_NAME: &str = "helloworld.Greeter";
|
||||
impl<T> tonic::server::NamedService for GreeterServer<T> {
|
||||
const NAME: &'static str = SERVICE_NAME;
|
||||
}
|
||||
}
|
Binary file not shown.
|
@ -0,0 +1,51 @@
|
|||
# Jobs Example
|
||||
|
||||
This is a simple example that demonstrates Dapr's job scheduling capabilities.
|
||||
|
||||
## Running
|
||||
|
||||
To run this example:
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the multi-app run template:
|
||||
|
||||
<!-- STEP
|
||||
name: Run multi-app
|
||||
output_match_mode: substring
|
||||
match_order: none
|
||||
expected_stdout_lines:
|
||||
- 'job scheduled successfully'
|
||||
- 'job received'
|
||||
- 'job received'
|
||||
- 'job received'
|
||||
- 'received job on ping_pong_handler'
|
||||
- 'received job on ping_pong_handler'
|
||||
- 'received job on ping_pong_handler'
|
||||
- 'received job on ping_pong_handler'
|
||||
- 'received job on ping_pong_handler'
|
||||
background: true
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
3. Stop with `ctrl + c`
|
|
@ -0,0 +1,11 @@
|
|||
version: 1
|
||||
common:
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: jobs-example
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
schedulerHostAddress: localhost
|
||||
command: [ "cargo", "run", "--example", "jobs" ]
|
|
@ -0,0 +1,149 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use dapr::client::JobBuilder;
|
||||
use dapr::dapr::proto::runtime::v1::{
|
||||
app_callback_alpha_server::AppCallbackAlphaServer, JobEventRequest, JobEventResponse,
|
||||
};
|
||||
use dapr::server::appcallbackalpha::{AppCallbackServiceAlpha, JobHandlerMethod};
|
||||
use dapr::{add_job_handler_alpha, serde_json};
|
||||
use prost_types::Any;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::time::sleep;
|
||||
use tonic::transport::Server;
|
||||
use tonic::Status;
|
||||
|
||||
type DaprClient = dapr::Client<dapr::client::TonicClient>;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct Backup {
|
||||
task: String,
|
||||
metadata: Option<Metadata>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct Metadata {
|
||||
db_name: String,
|
||||
backup_location: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct JsonAny {
|
||||
type_url: String,
|
||||
value: String,
|
||||
}
|
||||
|
||||
async fn ping_pong_handler(_request: JobEventRequest) -> Result<JobEventResponse, Status> {
|
||||
println!("received job on ping_pong_handler");
|
||||
|
||||
Ok(JobEventResponse::default())
|
||||
}
|
||||
async fn backup_job_handler(request: JobEventRequest) -> Result<JobEventResponse, Status> {
|
||||
// The logic for handling the backup job request
|
||||
|
||||
if request.data.is_some() {
|
||||
// Deserialize the decoded value into a Backup struct
|
||||
let backup_val: Backup = serde_json::from_slice(&request.data.unwrap().value).unwrap();
|
||||
|
||||
println!("job received: {backup_val:?}");
|
||||
}
|
||||
|
||||
Ok(JobEventResponse::default())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
#[allow(non_camel_case_types)]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tokio::spawn(async move {
|
||||
let server_addr = "127.0.0.1:50051".parse().unwrap();
|
||||
|
||||
println!("AppCallbackAlpha server listening on {server_addr}");
|
||||
|
||||
let mut alpha_callback_service = AppCallbackServiceAlpha::new();
|
||||
|
||||
let backup_job_handler_name = "prod-db-backup";
|
||||
add_job_handler_alpha!(
|
||||
alpha_callback_service,
|
||||
backup_job_handler_name,
|
||||
backup_job_handler
|
||||
);
|
||||
|
||||
let ping_pong_handler_name = "ping-pong";
|
||||
add_job_handler_alpha!(
|
||||
alpha_callback_service,
|
||||
ping_pong_handler_name,
|
||||
ping_pong_handler
|
||||
);
|
||||
|
||||
Server::builder()
|
||||
.add_service(AppCallbackAlphaServer::new(alpha_callback_service))
|
||||
.serve(server_addr)
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Client
|
||||
|
||||
let client_addr = "https://127.0.0.1".to_string();
|
||||
|
||||
let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?;
|
||||
let address = format!("{client_addr}:{port}");
|
||||
|
||||
println!("attempting to create a dapr client: {address}");
|
||||
|
||||
// Create the client
|
||||
let mut client = DaprClient::connect(client_addr).await?;
|
||||
|
||||
println!("client created");
|
||||
|
||||
// define job data in json
|
||||
let job = Backup {
|
||||
task: "db-backup".to_string(),
|
||||
metadata: Some(Metadata {
|
||||
db_name: "prod-db".to_string(),
|
||||
backup_location: "/path/to/backup".to_string(),
|
||||
}),
|
||||
};
|
||||
|
||||
let any = Any {
|
||||
type_url: "type.googleapis.com/io.dapr.RustTest".to_string(),
|
||||
value: serde_json::to_vec(&job).unwrap(),
|
||||
};
|
||||
|
||||
let job = JobBuilder::new("prod-db-backup")
|
||||
.with_schedule("@every 1s")
|
||||
.with_data(any)
|
||||
.build();
|
||||
|
||||
let _schedule_resp = client.schedule_job_alpha1(job).await?;
|
||||
|
||||
println!("job scheduled successfully");
|
||||
|
||||
sleep(Duration::from_secs(3)).await;
|
||||
|
||||
let get_resp = client.get_job_alpha1("prod-db-backup").await?;
|
||||
|
||||
let get_resp_backup: Backup =
|
||||
serde_json::from_slice(&get_resp.clone().job.unwrap().data.unwrap().value).unwrap();
|
||||
|
||||
println!("job retrieved: {get_resp_backup:?}");
|
||||
|
||||
let _delete_resp = client.delete_job_alpha1("prod-db-backup").await?;
|
||||
|
||||
println!("job deleted");
|
||||
|
||||
sleep(Duration::from_secs(5)).await;
|
||||
|
||||
// Second handler
|
||||
|
||||
let ping_pong_job = JobBuilder::new("ping-pong")
|
||||
.with_schedule("@every 1s")
|
||||
.with_repeats(5)
|
||||
.build();
|
||||
let _schedule_resp = client.schedule_job_alpha1(ping_pong_job).await?;
|
||||
|
||||
sleep(Duration::from_secs(10)).await;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
# Pub/Sub Example
|
||||
|
||||
This is a simple example that demonstrates Dapr's pub/sub capabilities. To implement pub/sub in your rust application, you need to implement `AppCallback` server for subscribing to events. Specifically, the following two methods need to be implemented for pub/sub to work:
|
||||
|
||||
1. `list_topic_subscriptions` - Dapr runtime calls this method to get list of topics the application is subscribed to.
|
||||
2. `on_topic_event` - Defines how the application handles the topic event.
|
||||
|
||||
> **Note:** Make sure to use latest version of proto bindings.
|
||||
|
||||
## Running
|
||||
|
||||
> Before you run the example make sure local redis state store is running by executing:
|
||||
> ```
|
||||
> docker ps
|
||||
> ```
|
||||
|
||||
1. To run the example we need to first build the examples using the following command:
|
||||
|
||||
<!-- STEP
|
||||
name: Build
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout: 60
|
||||
-->
|
||||
|
||||
```bash
|
||||
cargo build --examples
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
2. Run the multi-app run template:
|
||||
|
||||
<!-- STEP
|
||||
name: Run PubSub example
|
||||
output_match_mode: substring
|
||||
match_order: sequential
|
||||
expected_stdout_lines:
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 0,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 0",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 1,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 1",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 2,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 2",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 3,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 3",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 4,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 4",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 5,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 5",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 6,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 6",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 7,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 7",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 8,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 8",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic A - Order {'
|
||||
- '== APP - rust-subscriber == order_number: 9,'
|
||||
- '== APP - rust-subscriber == order_details: "Count is 9",'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 0,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 1,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 2,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 3,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 4,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 5,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 6,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 7,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 8,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-subscriber == Topic B - Refund {'
|
||||
- '== APP - rust-subscriber == order_number: 9,'
|
||||
- '== APP - rust-subscriber == refund_amount: 1200,'
|
||||
- '== APP - rust-subscriber == }'
|
||||
- '== APP - rust-publisher == messages published'
|
||||
background: false
|
||||
sleep: 30
|
||||
timeout_seconds: 30
|
||||
-->
|
||||
|
||||
|
||||
```bash
|
||||
dapr run -f .
|
||||
```
|
||||
|
||||
<!-- END_STEP -->
|
||||
|
||||
3. Stop with `ctrl + c`
|
||||
|
||||
### Running without multi-app
|
||||
|
||||
1. Run the subscriber with dapr
|
||||
```bash
|
||||
dapr run --app-id rust-subscriber --app-protocol grpc --app-port 50051 cargo run -- --example pubsub-subscriber
|
||||
```
|
||||
|
||||
2. Run the publisher with dapr
|
||||
```bash
|
||||
dapr run --app-id rust-publisher --app-protocol grpc cargo run -- --example pubsub-publisher
|
||||
```
|
|
@ -0,0 +1,16 @@
|
|||
version: 1
|
||||
common:
|
||||
resourcesPath: ./resources/
|
||||
daprdLogDestination: console
|
||||
apps:
|
||||
- appID: rust-subscriber
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
appPort: 50051
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "pubsub-subscriber"]
|
||||
- appID: rust-publisher
|
||||
appDirPath: ./
|
||||
appProtocol: grpc
|
||||
logLevel: debug
|
||||
command: ["cargo", "run", "--example", "pubsub-publisher"]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue