mirror of https://github.com/dapr/docs.git
Merge branch 'v1.11' into v1.11-release-patch-1
This commit is contained in:
commit
c347960ff7
|
@ -0,0 +1,118 @@
|
|||
import os
|
||||
from re import S
|
||||
import sys
|
||||
import json
|
||||
from bs4 import BeautifulSoup
|
||||
from algoliasearch.search_client import SearchClient
|
||||
|
||||
url = "docs.dapr.io"
|
||||
if len(sys.argv) > 1:
|
||||
starting_directory = os.path.join(os.getcwd(), str(sys.argv[1]))
|
||||
else:
|
||||
starting_directory = os.getcwd()
|
||||
|
||||
ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID')
|
||||
ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY')
|
||||
ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME')
|
||||
|
||||
client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY)
|
||||
index = client.init_index(ALGOLIA_INDEX_NAME)
|
||||
|
||||
excluded_files = [
|
||||
"404.html",
|
||||
]
|
||||
|
||||
exluded_directories = [
|
||||
"zh-hans",
|
||||
]
|
||||
|
||||
rankings = {
|
||||
"Getting started": 0,
|
||||
"Concepts": 100,
|
||||
"Developing applications": 200,
|
||||
"Operations": 300,
|
||||
"Reference": 400,
|
||||
"Contributing": 500,
|
||||
"Home": 600
|
||||
}
|
||||
|
||||
def scan_directory(directory: str, pages: list):
|
||||
if os.path.basename(directory) in exluded_directories:
|
||||
print(f'Skipping directory: {directory}')
|
||||
return
|
||||
for file in os.listdir(directory):
|
||||
path = os.path.join(directory, file)
|
||||
if os.path.isfile(path):
|
||||
if file.endswith(".html") and file not in excluded_files:
|
||||
if '<!-- DISABLE_ALGOLIA -->' not in open(path, encoding="utf8").read():
|
||||
print(f'Indexing: {path}')
|
||||
pages.append(path)
|
||||
else:
|
||||
print(f'Skipping hidden page: {path}')
|
||||
else:
|
||||
scan_directory(path, pages)
|
||||
|
||||
def parse_file(path: str):
|
||||
data = {}
|
||||
data["hierarchy"] = {}
|
||||
data["rank"] = 999
|
||||
data["subrank"] = 99
|
||||
data["type"] = "lvl2"
|
||||
data["lvl0"] = ""
|
||||
data["lvl1"] = ""
|
||||
data["lvl2"] = ""
|
||||
data["lvl3"] = ""
|
||||
text = ""
|
||||
subrank = 0
|
||||
with open(path, "r", errors='ignore') as file:
|
||||
content = file.read()
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
for meta in soup.find_all("meta"):
|
||||
if meta.get("name") == "description":
|
||||
data["lvl2"] = meta.get("content")
|
||||
data["hierarchy"]["lvl1"] = meta.get("content")
|
||||
elif meta.get("property") == "og:title":
|
||||
data["lvl0"] = meta.get("content")
|
||||
data["hierarchy"]["lvl0"] = meta.get("content")
|
||||
data["hierarchy"]["lvl2"] = meta.get("content")
|
||||
elif meta.get("property") == "og:url":
|
||||
data["url"] = meta.get("content")
|
||||
data["path"] = meta.get("content").split(url)[1]
|
||||
data["objectID"] = meta.get("content").split(url)[1]
|
||||
breadcrumbs = soup.find_all("li", class_="breadcrumb-item")
|
||||
try:
|
||||
subrank = len(breadcrumbs)
|
||||
data["subrank"] = subrank
|
||||
except:
|
||||
subrank = 99
|
||||
data["subrank"] = 99
|
||||
for bc in breadcrumbs:
|
||||
section = bc.text.strip()
|
||||
data["lvl1"] = section
|
||||
data["hierarchy"]["lvl0"] = section
|
||||
try:
|
||||
data["rank"] = rankings[section] + subrank
|
||||
except:
|
||||
print(f"Rank not found for section {section}")
|
||||
data["rank"] = 998
|
||||
break
|
||||
for p in soup.find_all("p"):
|
||||
if p.text != "":
|
||||
text = text + p.text
|
||||
data["text"] = text
|
||||
return data
|
||||
|
||||
def index_payload(payload):
|
||||
res = index.replace_all_objects(payload)
|
||||
res.wait()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pages = []
|
||||
payload = []
|
||||
scan_directory(starting_directory, pages)
|
||||
for page in pages:
|
||||
data = parse_file(page)
|
||||
if "objectID" in data:
|
||||
payload.append(data)
|
||||
index_payload(payload)
|
|
@ -1,6 +1,7 @@
|
|||
name: Azure Static Web App Root
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- v1.11
|
||||
|
@ -9,35 +10,66 @@ on:
|
|||
branches:
|
||||
- v1.11
|
||||
|
||||
concurrency:
|
||||
# Cancel the previously triggered build for only PR build.
|
||||
group: website-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build_and_deploy_job:
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed')
|
||||
name: Build Hugo Website
|
||||
if: github.event.action != 'closed'
|
||||
runs-on: ubuntu-latest
|
||||
name: Build and Deploy Job
|
||||
env:
|
||||
SWA_BASE: 'proud-bay-0e9e0e81e'
|
||||
HUGO_ENV: production
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout docs repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Setup Hugo
|
||||
uses: peaceiris/actions-hugo@v2.5.0
|
||||
with:
|
||||
hugo-version: 0.102.3
|
||||
extended: true
|
||||
- name: Setup Docsy
|
||||
run: cd daprdocs && git submodule update --init --recursive && sudo npm install -D --save autoprefixer && sudo npm install -D --save postcss-cli
|
||||
- name: Build And Deploy
|
||||
id: builddeploy
|
||||
run: |
|
||||
cd daprdocs
|
||||
git submodule update --init --recursive
|
||||
sudo npm install -D --save autoprefixer
|
||||
sudo npm install -D --save postcss-cli
|
||||
- name: Build Hugo Website
|
||||
run: |
|
||||
cd daprdocs
|
||||
git config --global --add safe.directory /github/workspace
|
||||
if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then
|
||||
STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/"
|
||||
fi
|
||||
hugo ${STAGING_URL+-b "$STAGING_URL"}
|
||||
- name: Deploy docs site
|
||||
uses: Azure/static-web-apps-deploy@v1
|
||||
env:
|
||||
HUGO_ENV: production
|
||||
HUGO_VERSION: "0.100.2"
|
||||
with:
|
||||
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
|
||||
skip_deploy_on_missing_secrets: true
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }} # Used for Github integrations (i.e. PR comments)
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
action: "upload"
|
||||
app_location: "/daprdocs"
|
||||
app_build_command: "git config --global --add safe.directory /github/workspace && hugo"
|
||||
output_location: "public"
|
||||
skip_api_build: true
|
||||
app_location: "daprdocs/public/"
|
||||
api_location: "daprdocs/public/"
|
||||
output_location: ""
|
||||
skip_app_build: true
|
||||
skip_deploy_on_missing_secrets: true
|
||||
- name: Upload Hugo artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: hugo_build
|
||||
path: ./daprdocs/public/
|
||||
if-no-files-found: error
|
||||
|
||||
close_pull_request_job:
|
||||
close_staging_site:
|
||||
if: github.event_name == 'pull_request' && github.event.action == 'closed'
|
||||
runs-on: ubuntu-latest
|
||||
name: Close Pull Request Job
|
||||
|
@ -48,3 +80,30 @@ jobs:
|
|||
with:
|
||||
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
|
||||
action: "close"
|
||||
skip_deploy_on_missing_secrets: true
|
||||
|
||||
algolia_index:
|
||||
name: Index site for Algolia
|
||||
if: github.event_name == 'push'
|
||||
needs: ['build_and_deploy_job']
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }}
|
||||
ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }}
|
||||
ALGOLIA_INDEX_NAME: daprdocs
|
||||
steps:
|
||||
- name: Checkout docs repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: false
|
||||
- name: Download Hugo artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: hugo_build
|
||||
path: site/
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade bs4
|
||||
pip install --upgrade 'algoliasearch>=2.0,<3.0'
|
||||
- name: Index site
|
||||
run: python ./.github/scripts/algolia.py ./site
|
||||
|
|
|
@ -1,38 +1,12 @@
|
|||
// Code formatting.
|
||||
|
||||
.copy-code-button {
|
||||
color: #272822;
|
||||
background-color: #FFF;
|
||||
border-color: #0D2192;
|
||||
border: 2px solid;
|
||||
border-radius: 3px 3px 0px 0px;
|
||||
|
||||
/* right-align */
|
||||
display: block;
|
||||
margin-left: auto;
|
||||
margin-right: 0;
|
||||
|
||||
margin-bottom: -2px;
|
||||
padding: 3px 8px;
|
||||
font-size: 0.8em;
|
||||
.highlight .copy-icon {
|
||||
position: absolute;
|
||||
right: 20px;
|
||||
top: 18px;
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.copy-code-button:hover {
|
||||
cursor: pointer;
|
||||
background-color: #F2F2F2;
|
||||
}
|
||||
|
||||
.copy-code-button:focus {
|
||||
/* Avoid an ugly focus outline on click in Chrome,
|
||||
but darken the button for accessibility.
|
||||
See https://stackoverflow.com/a/25298082/1481479 */
|
||||
background-color: #E6E6E6;
|
||||
outline: 0;
|
||||
}
|
||||
|
||||
.copy-code-button:active {
|
||||
background-color: #D9D9D9;
|
||||
}
|
||||
|
||||
.highlight pre {
|
||||
/* Avoid pushing up the copy buttons. */
|
||||
|
@ -40,25 +14,31 @@
|
|||
}
|
||||
|
||||
.td-content {
|
||||
// Highlighted code.
|
||||
|
||||
// Highlighted code.
|
||||
.highlight {
|
||||
@extend .card;
|
||||
|
||||
|
||||
margin: 0rem 0;
|
||||
padding: 0rem;
|
||||
|
||||
margin-bottom: 2rem;
|
||||
|
||||
max-width: 100%;
|
||||
|
||||
|
||||
border: none;
|
||||
|
||||
pre {
|
||||
margin: 0;
|
||||
padding: 1rem;
|
||||
border-radius: 10px;
|
||||
}
|
||||
}
|
||||
|
||||
// Inline code
|
||||
p code, li > code, table code {
|
||||
p code,
|
||||
li>code,
|
||||
table code {
|
||||
color: inherit;
|
||||
padding: 0.2em 0.4em;
|
||||
margin: 0;
|
||||
|
@ -78,11 +58,11 @@
|
|||
word-wrap: normal;
|
||||
background-color: $gray-100;
|
||||
padding: $spacer;
|
||||
|
||||
|
||||
max-width: 100%;
|
||||
|
||||
> code {
|
||||
background-color: inherit !important;
|
||||
>code {
|
||||
background-color: inherit !important;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-size: 100%;
|
||||
|
|
|
@ -25,13 +25,14 @@ The diagram below is an example of how dead letter topics work. First a message
|
|||
The following YAML shows how to configure a subscription with a dead letter topic named `poisonMessages` for messages consumed from the `orders` topic. This subscription is scoped to an app with a `checkout` ID.
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: order
|
||||
spec:
|
||||
topic: orders
|
||||
route: /checkout
|
||||
routes:
|
||||
default: /checkout
|
||||
pubsubname: pubsub
|
||||
deadLetterTopic: poisonMessages
|
||||
scopes:
|
||||
|
@ -86,13 +87,16 @@ spec:
|
|||
Remember to now configure a subscription to handling the dead letter topics. For example you can create another declarative subscription to receive these on the same or a different application. The example below shows the checkout application subscribing to the `poisonMessages` topic with another subscription and sending these to be handled by the `/failedmessages` endpoint.
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: deadlettertopics
|
||||
spec:
|
||||
topic: poisonMessages
|
||||
route: /failedMessages
|
||||
routes:
|
||||
rules:
|
||||
- match:
|
||||
path: /failedMessages
|
||||
pubsubname: pubsub
|
||||
scopes:
|
||||
- checkout
|
||||
|
|
|
@ -141,13 +141,14 @@ $app->start();
|
|||
Similarly, you can subscribe to raw events declaratively by adding the `rawPayload` metadata entry to your subscription specification.
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: myevent-subscription
|
||||
spec:
|
||||
topic: deathStarStatus
|
||||
route: /dsstatus
|
||||
routes:
|
||||
default: /dsstatus
|
||||
pubsubname: pubsub
|
||||
metadata:
|
||||
rawPayload: "true"
|
||||
|
|
|
@ -22,13 +22,14 @@ The examples below demonstrate pub/sub messaging between a `checkout` app and an
|
|||
You can subscribe declaratively to a topic using an external component file. This example uses a YAML component file named `subscription.yaml`:
|
||||
|
||||
```yaml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: order
|
||||
spec:
|
||||
topic: orders
|
||||
route: /checkout
|
||||
routes:
|
||||
default: /checkout
|
||||
pubsubname: pubsub
|
||||
scopes:
|
||||
- orderprocessing
|
||||
|
|
|
@ -47,7 +47,7 @@ The diagram below is an overview of how Dapr's service invocation works when inv
|
|||
## Using an HTTPEndpoint resource or FQDN URL for non-Dapr endpoints
|
||||
There are two ways to invoke a non-Dapr endpoint when communicating either to Dapr applications or non-Dapr applications. A Dapr application can invoke a non-Dapr endpoint by providing one of the following:
|
||||
|
||||
- A named `HTTPEndpoint` resource, including defining an `HTTPEndpoint` resource type. See the [HTTPEndpoint reference]({{< ref httpendpoints-reference.md >}}) guide for an example.
|
||||
- A named `HTTPEndpoint` resource, including defining an `HTTPEndpoint` resource type. See the [HTTPEndpoint reference]({{< ref httpendpoints-schema.md >}}) guide for an example.
|
||||
|
||||
```sh
|
||||
localhost:3500/v1.0/invoke/<HTTPEndpoint-name>/method/<my-method>
|
||||
|
@ -81,7 +81,7 @@ curl http://localhost:3602/v1.0/invoke/orderprocessor/method/checkout
|
|||
|
||||
## Related Links
|
||||
|
||||
- [HTTPEndpoint reference]({{< ref httpendpoints-reference.md >}})
|
||||
- [HTTPEndpoint reference]({{< ref httpendpoints-schema.md >}})
|
||||
- [Service invocation overview]({{< ref service-invocation-overview.md >}})
|
||||
- [Service invocation API specification]({{< ref service_invocation_api.md >}})
|
||||
|
||||
|
|
|
@ -16,9 +16,15 @@ When state TTL has native support in the state store component, Dapr forwards th
|
|||
|
||||
When a TTL is not specified, the default behavior of the state store is retained.
|
||||
|
||||
## Persisting state (ignoring an existing TTL)
|
||||
## Explicit persistence bypassing globally defined TTL
|
||||
|
||||
To explicitly persist a state (ignoring any TTLs set for the key), specify a `ttlInSeconds` value of `-1`.
|
||||
Persisting state applies to all state stores that let you specify a default TTL used for all data, either:
|
||||
- Setting a global TTL value via a Dapr component, or
|
||||
- When creating the state store outside of Dapr and setting a global TTL value.
|
||||
|
||||
When no specific TTL is specified, the data expires after that global TTL period of time. This is not facilitated by Dapr.
|
||||
|
||||
In addition, all state stores also support the option to _explicitly_ persist data. This means you can ignore the default database policy (which may have been set outside of Dapr or via a Dapr Component) to indefinitely retain a given database record. You can do this by setting `ttlInSeconds` to the value of `-1`. This value indicates to ignore any TTL value set.
|
||||
|
||||
## Supported components
|
||||
|
||||
|
|
|
@ -12,12 +12,6 @@ The workflow building block is currently in **alpha**.
|
|||
|
||||
Let's take a look at the Dapr [Workflow building block]({{< ref workflow >}}). In this Quickstart, you'll create a simple console application to demonstrate Dapr's workflow programming model and the workflow management APIs.
|
||||
|
||||
The `order-processor` console app starts and manages the lifecycle of the `OrderProcessingWorkflow` workflow that stores and retrieves data in a state store. The workflow consists of four workflow activities, or tasks:
|
||||
- `NotifyActivity`: Utilizes a logger to print out messages throughout the workflow
|
||||
- `ReserveInventoryActivity`: Checks the state store to ensure that there is enough inventory for the purchase
|
||||
- `ProcessPaymentActivity`: Processes and authorizes the payment
|
||||
- `UpdateInventoryActivity`: Removes the requested items from the state store and updates the store with the new remaining inventory value
|
||||
|
||||
In this guide, you'll:
|
||||
|
||||
- Run the `order-processor` application.
|
||||
|
@ -26,13 +20,19 @@ In this guide, you'll:
|
|||
|
||||
<img src="/images/workflow-quickstart-overview.png" width=800 style="padding-bottom:15px;">
|
||||
|
||||
Currently, you can experience the Dapr Workflow using the .NET SDK.
|
||||
|
||||
{{< tabs ".NET" "Python" >}}
|
||||
|
||||
<!-- .NET -->
|
||||
{{% codetab %}}
|
||||
|
||||
The `order-processor` console app starts and manages the lifecycle of an order processing workflow that stores and retrieves data in a state store. The workflow consists of four workflow activities, or tasks:
|
||||
- `NotifyActivity`: Utilizes a logger to print out messages throughout the workflow
|
||||
- `ReserveInventoryActivity`: Checks the state store to ensure that there is enough inventory for the purchase
|
||||
- `ProcessPaymentActivity`: Processes and authorizes the payment
|
||||
- `UpdateInventoryActivity`: Removes the requested items from the state store and updates the store with the new remaining inventory value
|
||||
|
||||
|
||||
### Step 1: Pre-requisites
|
||||
|
||||
For this example, you will need:
|
||||
|
@ -259,6 +259,16 @@ The `Activities` directory holds the four workflow activities used by the workfl
|
|||
<!-- Python -->
|
||||
{{% codetab %}}
|
||||
|
||||
The `order-processor` console app starts and manages the `order_processing_workflow`, which simulates purchasing items from a store. The workflow consists of five unique workflow activities, or tasks:
|
||||
|
||||
- `notify_activity`: Utilizes a logger to print out messages throughout the workflow. These messages notify you when:
|
||||
- You have insufficient inventory
|
||||
- Your payment couldn't be processed, etc.
|
||||
- `process_payment_activity`: Processes and authorizes the payment.
|
||||
- `verify_inventory_activity`: Checks the state store to ensure there is enough inventory present for purchase.
|
||||
- `update_inventory_activity`: Removes the requested items from the state store and updates the store with the new remaining inventory value.
|
||||
- `request_approval_activity`: Seeks approval from the manager if payment is greater than 50,000 USD.
|
||||
|
||||
### Step 1: Pre-requisites
|
||||
|
||||
For this example, you will need:
|
||||
|
|
|
@ -15,118 +15,120 @@ description: "Enable Dapr metrics and logs with Azure Monitor for Azure Kubernet
|
|||
|
||||
## Enable Prometheus metric scrape using config map
|
||||
|
||||
1. Make sure that omsagents are running
|
||||
1. Make sure that Azure Monitor Agents (AMA) are running.
|
||||
|
||||
```bash
|
||||
$ kubectl get pods -n kube-system
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
...
|
||||
omsagent-75qjs 1/1 Running 1 44h
|
||||
omsagent-c7c4t 1/1 Running 0 44h
|
||||
omsagent-rs-74f488997c-dshpx 1/1 Running 1 44h
|
||||
omsagent-smtk7 1/1 Running 1 44h
|
||||
...
|
||||
```
|
||||
```bash
|
||||
$ kubectl get pods -n kube-system
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
...
|
||||
ama-logs-48kpv 2/2 Running 0 2d13h
|
||||
ama-logs-mx24c 2/2 Running 0 2d13h
|
||||
ama-logs-rs-f9bbb9898-vbt6k 1/1 Running 0 30h
|
||||
ama-logs-sm2mz 2/2 Running 0 2d13h
|
||||
ama-logs-z7p4c 2/2 Running 0 2d13h
|
||||
...
|
||||
```
|
||||
|
||||
2. Apply config map to enable Prometheus metrics endpoint scrape.
|
||||
1. Apply config map to enable Prometheus metrics endpoint scrape.
|
||||
|
||||
You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable prometheus metrics endpoint scrape.
|
||||
You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable Prometheus metrics endpoint scrape.
|
||||
|
||||
If you installed Dapr to the different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example:
|
||||
If you installed Dapr to a different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example:
|
||||
|
||||
```yaml
|
||||
...
|
||||
prometheus-data-collection-settings: |-
|
||||
[prometheus_data_collection_settings.cluster]
|
||||
interval = "1m"
|
||||
monitor_kubernetes_pods = true
|
||||
monitor_kubernetes_pods_namespaces = ["dapr-system", "default"]
|
||||
[prometheus_data_collection_settings.node]
|
||||
interval = "1m"
|
||||
...
|
||||
```
|
||||
```yaml
|
||||
...
|
||||
prometheus-data-collection-settings: |-
|
||||
[prometheus_data_collection_settings.cluster]
|
||||
interval = "1m"
|
||||
monitor_kubernetes_pods = true
|
||||
monitor_kubernetes_pods_namespaces = ["dapr-system", "default"]
|
||||
[prometheus_data_collection_settings.node]
|
||||
interval = "1m"
|
||||
...
|
||||
```
|
||||
|
||||
Apply config map:
|
||||
Apply config map:
|
||||
|
||||
```bash
|
||||
kubectl apply -f ./azm-config.map.yaml
|
||||
```
|
||||
```bash
|
||||
kubectl apply -f ./azm-config.map.yaml
|
||||
```
|
||||
|
||||
## Install Dapr with JSON formatted logs
|
||||
|
||||
1. Install Dapr with enabling JSON-formatted logs
|
||||
1. Install Dapr with enabling JSON-formatted logs.
|
||||
|
||||
```bash
|
||||
helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true
|
||||
```
|
||||
```bash
|
||||
helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true
|
||||
```
|
||||
|
||||
2. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations.
|
||||
1. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations.
|
||||
|
||||
> Note: OMS Agent scrapes the metrics only if replicaset has Prometheus annotations.
|
||||
> Note: The Azure Monitor Agents (AMA) only sends the metrics if the Prometheus annotations are set.
|
||||
|
||||
Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml.
|
||||
Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml.
|
||||
|
||||
Example:
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: pythonapp
|
||||
namespace: default
|
||||
labels:
|
||||
app: python
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: python
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: python
|
||||
annotations:
|
||||
dapr.io/enabled: "true"
|
||||
dapr.io/app-id: "pythonapp"
|
||||
dapr.io/log-as-json: "true"
|
||||
prometheus.io/scrape: "true"
|
||||
prometheus.io/port: "9090"
|
||||
prometheus.io/path: "/"
|
||||
Example:
|
||||
|
||||
...
|
||||
```
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: pythonapp
|
||||
namespace: default
|
||||
labels:
|
||||
app: python
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: python
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: python
|
||||
annotations:
|
||||
dapr.io/enabled: "true"
|
||||
dapr.io/app-id: "pythonapp"
|
||||
dapr.io/log-as-json: "true"
|
||||
prometheus.io/scrape: "true"
|
||||
prometheus.io/port: "9090"
|
||||
prometheus.io/path: "/"
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
## Search metrics and logs with Azure Monitor
|
||||
|
||||
1. Go to Azure Monitor
|
||||
1. Go to Azure Monitor in the Azure portal.
|
||||
|
||||
2. Search Dapr logs
|
||||
1. Search Dapr **Logs**.
|
||||
|
||||
Here is an example query, to parse JSON formatted logs and query logs from dapr system processes.
|
||||
Here is an example query, to parse JSON formatted logs and query logs from Dapr system processes.
|
||||
|
||||
```
|
||||
ContainerLog
|
||||
| extend parsed=parse_json(LogEntry)
|
||||
| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance']
|
||||
| where level != ""
|
||||
| sort by Time
|
||||
```
|
||||
```
|
||||
ContainerLog
|
||||
| extend parsed=parse_json(LogEntry)
|
||||
| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance']
|
||||
| where level != ""
|
||||
| sort by Time
|
||||
```
|
||||
|
||||
3. Search metrics
|
||||
1. Search **Metrics**.
|
||||
|
||||
This query, queries process_resident_memory_bytes Prometheus metrics for Dapr system processes and renders timecharts
|
||||
This query, queries `process_resident_memory_bytes` Prometheus metrics for Dapr system processes and renders timecharts.
|
||||
|
||||
```
|
||||
InsightsMetrics
|
||||
| where Namespace == "prometheus" and Name == "process_resident_memory_bytes"
|
||||
| extend tags=parse_json(Tags)
|
||||
| project TimeGenerated, Name, Val, app=tostring(tags['app'])
|
||||
| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app
|
||||
| where app startswith "dapr-"
|
||||
| render timechart
|
||||
```
|
||||
```
|
||||
InsightsMetrics
|
||||
| where Namespace == "prometheus" and Name == "process_resident_memory_bytes"
|
||||
| extend tags=parse_json(Tags)
|
||||
| project TimeGenerated, Name, Val, app=tostring(tags['app'])
|
||||
| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app
|
||||
| where app startswith "dapr-"
|
||||
| render timechart
|
||||
```
|
||||
|
||||
# References
|
||||
## References
|
||||
|
||||
* [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration)
|
||||
* [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config)
|
||||
* [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language)
|
||||
- [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration)
|
||||
- [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config)
|
||||
- [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language)
|
||||
|
|
|
@ -179,7 +179,7 @@ Example:
|
|||
"topic": "newOrder",
|
||||
"route": "/orders",
|
||||
"metadata": {
|
||||
"rawPayload": "true",
|
||||
"rawPayload": "true"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
@ -39,6 +39,8 @@ spec:
|
|||
secretKeyRef:
|
||||
name: kafka-secrets
|
||||
key: saslPasswordSecret
|
||||
- name: saslMechanism
|
||||
value: "SHA-512"
|
||||
- name: initialOffset # Optional. Used for input bindings.
|
||||
value: "newest"
|
||||
- name: maxMessageBytes # Optional.
|
||||
|
@ -61,6 +63,7 @@ spec:
|
|||
| authType | Y | Input/Output | Configure or disable authentication. Supported values: `none`, `password`, `mtls`, or `oidc` | `"password"`, `"none"` |
|
||||
| saslUsername | N | Input/Output | The SASL username used for authentication. Only required if `authRequired` is set to `"true"`. | `"adminuser"` |
|
||||
| saslPassword | N | Input/Output | The SASL password used for authentication. Can be `secretKeyRef` to use a [secret reference]({{< ref component-secrets.md >}}). Only required if `authRequired` is set to `"true"`. | `""`, `"KeFg23!"` |
|
||||
| saslMechanism | N | Input/Output | The SASL authentication mechanism you'd like to use. Only required if `authtype` is set to `"password"`. If not provided, defaults to `PLAINTEXT`, which could cause a break for some services, like Amazon Managed Service for Kafka. | `"SHA-512", "SHA-256", "PLAINTEXT"` |
|
||||
| initialOffset | N | Input | The initial offset to use if no offset was previously committed. Should be "newest" or "oldest". Defaults to "newest". | `"oldest"` |
|
||||
| maxMessageBytes | N | Input/Output | The maximum size in bytes allowed for a single Kafka message. Defaults to 1024. | `2048` |
|
||||
| oidcTokenEndpoint | N | Input/Output | Full URL to an OAuth2 identity provider access token endpoint. Required when `authType` is set to `oidc` | "https://identity.example.com/v1/token" |
|
||||
|
|
|
@ -96,6 +96,8 @@ An HTTP 204 (No Content) and empty body is returned if successful.
|
|||
|
||||
You can get a record in Redis using the `get` operation. This gets a key that was previously set.
|
||||
|
||||
This takes an optional parameter `delete`, which is by default `false`. When it is set to `true`, this operation uses the `GETDEL` operation of Redis. For example, it returns the `value` which was previously set and then deletes it.
|
||||
|
||||
#### Request
|
||||
|
||||
```json
|
||||
|
@ -120,6 +122,20 @@ You can get a record in Redis using the `get` operation. This gets a key that wa
|
|||
}
|
||||
```
|
||||
|
||||
#### Request with delete flag
|
||||
|
||||
```json
|
||||
{
|
||||
"operation": "get",
|
||||
"metadata": {
|
||||
"key": "key1",
|
||||
"delete": "true"
|
||||
},
|
||||
"data": {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### delete
|
||||
|
||||
You can delete a record in Redis using the `delete` operation. Returns success whether the key exists or not.
|
||||
|
|
|
@ -62,6 +62,11 @@ spec:
|
|||
}
|
||||
```
|
||||
|
||||
{{% alert title="Warning" color="warning" %}}
|
||||
The above example uses secrets as plain strings. It is recommended to use a [secret store for the secrets]({{< ref component-secrets.md >}}). This component supports storing the `token` parameter and any other sensitive parameter and data as Kubernetes Secrets.
|
||||
{{% /alert %}}
|
||||
|
||||
|
||||
## Spec metadata fields
|
||||
|
||||
| Field | Required | Details | Example |
|
||||
|
|
|
@ -12,7 +12,7 @@ The following table lists the environment variables used by the Dapr runtime, CL
|
|||
| -------------------- | ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| APP_ID | Your application | The id for your application, used for service discovery |
|
||||
| APP_PORT | Dapr sidecar | The port your application is listening on |
|
||||
| APP_API_TOKEN | Your application | The token used by the application to authenticate requests from Dapr API. Read [authenticate requests from Dapr using token authentication]({{< ref app-api-token >}}) for more information. |
|
||||
| APP_API_TOKEN | Your application | The token used by the application to authenticate requests from Dapr API. Read [authenticate requests from Dapr using token authentication]({{< ref app-api-token >}}) for more information. |
|
||||
| DAPR_HTTP_PORT | Your application | The HTTP port that the Dapr sidecar is listening on. Your application should use this variable to connect to Dapr sidecar instead of hardcoding the port value. Set by the Dapr CLI run command for self-hosted or injected by the `dapr-sidecar-injector` into all the containers in the pod. |
|
||||
| DAPR_GRPC_PORT | Your application | The gRPC port that the Dapr sidecar is listening on. Your application should use this variable to connect to Dapr sidecar instead of hardcoding the port value. Set by the Dapr CLI run command for self-hosted or injected by the `dapr-sidecar-injector` into all the containers in the pod. |
|
||||
| DAPR_API_TOKEN | Dapr sidecar | The token used for Dapr API authentication for requests from the application. [Enable API token authentication in Dapr]({{< ref api-token >}}). |
|
||||
|
@ -24,4 +24,6 @@ The following table lists the environment variables used by the Dapr runtime, CL
|
|||
| DAPR_HELM_REPO_PASSWORD | A password for a private Helm chart |The password required to access the private Dapr helm chart. If it can be accessed publicly, this env variable does not need to be set|
|
||||
| OTEL_EXPORTER_OTLP_ENDPOINT | OpenTelemetry Tracing | Sets the Open Telemetry (OTEL) server address, turns on tracing. (Example: `http://localhost:4318`) |
|
||||
| OTEL_EXPORTER_OTLP_INSECURE | OpenTelemetry Tracing | Sets the connection to the endpoint as unencrypted. (`true`, `false`) |
|
||||
| OTEL_EXPORTER_OTLP_PROTOCOL | OpenTelemetry Tracing | The OTLP protocol to use Transport protocol. (`grpc`, `http/protobuf`, `http/json`) |
|
||||
| OTEL_EXPORTER_OTLP_PROTOCOL | OpenTelemetry Tracing | The OTLP protocol to use Transport protocol. (`grpc`, `http/protobuf`, `http/json`) |
|
||||
| DAPR_COMPONENTS_SOCKETS_FOLDER | Dapr runtime and the .NET, Go, and Java pluggable component SDKs | The location or path where Dapr looks for Pluggable Components Unix Domain Socket files. If unset this location defaults to `/tmp/dapr-components-sockets` |
|
||||
| DAPR_COMPONENTS_SOCKETS_EXTENSION | .NET and Java pluggable component SDKs | A per-SDK configuration that indicates the default file extension applied to socket files created by the SDKs. Not a Dapr-enforced behavior. |
|
|
@ -1,9 +1,9 @@
|
|||
---
|
||||
type: docs
|
||||
title: "Component schema"
|
||||
linkTitle: "Component schema"
|
||||
weight: 100
|
||||
description: "The basic schema for a Dapr component"
|
||||
title: "Component spec"
|
||||
linkTitle: "Component"
|
||||
weight: 1000
|
||||
description: "The basic spec for a Dapr component"
|
||||
---
|
||||
|
||||
Dapr defines and registers components using a [CustomResourceDefinition](https://kubernetes.io/docs/tasks/extend-kubernetes/custom-resources/custom-resource-definitions/). All components are defined as a CRD and can be applied to any hosting environment where Dapr is running, not just Kubernetes.
|
|
@ -1,9 +1,9 @@
|
|||
---
|
||||
type: docs
|
||||
title: "HTTPEndpoint spec"
|
||||
linkTitle: "HTTPEndpoint spec"
|
||||
description: "The HTTPEndpoint resource spec"
|
||||
weight: 300
|
||||
linkTitle: "HTTPEndpoint"
|
||||
description: "The basic spec for a Dapr HTTPEndpoint resource"
|
||||
weight: 4000
|
||||
aliases:
|
||||
- "/operations/httpEndpoints/"
|
||||
---
|
|
@ -0,0 +1,63 @@
|
|||
---
|
||||
type: docs
|
||||
title: "Resiliency spec"
|
||||
linkTitle: "Resiliency"
|
||||
weight: 3000
|
||||
description: "The basic spec for a Dapr resiliency resource"
|
||||
---
|
||||
|
||||
The `Resiliency` Dapr resource allows you to define and apply fault tolerance resiliency policies. Resiliency specs are applied when the Dapr sidecar starts.
|
||||
|
||||
```yml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Resiliency
|
||||
metadata:
|
||||
name: <REPLACE-WITH-RESOURCE-NAME>
|
||||
version: v1alpha1
|
||||
scopes:
|
||||
- <REPLACE-WITH-SCOPED-APPIDS>
|
||||
spec:
|
||||
policies: # Required
|
||||
timeouts: # Replace with any unique name
|
||||
timeoutName: <REPLACE-WITH-TIME-VALUE>
|
||||
retries:
|
||||
retryName: # Replace with any unique name
|
||||
policy: <REPLACE-WITH-VALUE>
|
||||
duration: <REPLACE-WITH-VALUE>
|
||||
maxInterval: <REPLACE-WITH-VALUE>
|
||||
maxRetries: <REPLACE-WITH-VALUE>
|
||||
circuitBreakers:
|
||||
circuitBreakerName: # Replace with any unique name
|
||||
maxRequests: <REPLACE-WITH-VALUE>
|
||||
timeout: <REPLACE-WITH-VALUE>
|
||||
trip: <REPLACE-WITH-CONSECUTIVE-FAILURE-VALUE>
|
||||
targets: # Required
|
||||
apps:
|
||||
appID: # Replace with scoped app ID
|
||||
timeout: <REPLACE-WITH-TIMEOUT-NAME>
|
||||
retry: <REPLACE-WITH-RETRY-NAME>
|
||||
circuitBreaker: <REPLACE-WITH-CIRCUIT-BREAKER-NAME>
|
||||
actors:
|
||||
myActorType:
|
||||
timeout: <REPLACE-WITH-TIMEOUT-NAME>
|
||||
retry: <REPLACE-WITH-RETRY-NAME>
|
||||
circuitBreaker: <REPLACE-WITH-CIRCUIT-BREAKER-NAME>
|
||||
circuitBreakerCacheSize: <REPLACE-WITH-VALUE>
|
||||
components:
|
||||
componentName: # Replace with your component name
|
||||
outbound:
|
||||
timeout: <REPLACE-WITH-TIMEOUT-NAME>
|
||||
retry: <REPLACE-WITH-RETRY-NAME>
|
||||
circuitBreaker: <REPLACE-WITH-CIRCUIT-BREAKER-NAME>
|
||||
```
|
||||
|
||||
## Spec fields
|
||||
|
||||
| Field | Required | Details | Example |
|
||||
|--------------------|:--------:|---------|---------|
|
||||
| policies | Y | The configuration of resiliency policies, including: <br><ul><li>`timeouts`</li><li>`retries`</li><li>`circuitBreakers`</li></ul> <br> [See more examples with all of the built-in policies]({{< ref policies.md >}}) | timeout: `general`<br>retry: `retryForever`<br>circuit breaker: `simpleCB` |
|
||||
| targets | Y | The configuration for the applications, actors, or components that use the resiliency policies. <br>[See more examples in the resiliency targets guide]({{< ref targets.md >}}) | `apps` <br>`components`<br>`actors` |
|
||||
|
||||
|
||||
## Related links
|
||||
[Learn more about resiliency policies and targets]({{< ref resiliency-overview.md >}})
|
|
@ -0,0 +1,88 @@
|
|||
---
|
||||
type: docs
|
||||
title: "Subscription spec"
|
||||
linkTitle: "Subscription"
|
||||
weight: 2000
|
||||
description: "The basic spec for a Dapr subscription"
|
||||
---
|
||||
|
||||
The `Subscription` Dapr resource allows you to subscribe declaratively to a topic using an external component YAML file. This guide demonstrates two subscription API versions:
|
||||
|
||||
- `v2alpha` (default spec)
|
||||
- `v1alpha1` (deprecated)
|
||||
|
||||
## `v2alpha1`
|
||||
|
||||
The following is the basic `v2alpha1` spec for a `Subscription` resource. `v2alpha1` is the default spec for the subscription API.
|
||||
|
||||
```yml
|
||||
apiVersion: dapr.io/v2alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: <REPLACE-WITH-NAME>
|
||||
spec:
|
||||
version: v2alpha1
|
||||
topic: <REPLACE-WITH-TOPIC-NAME> # Required
|
||||
routes: # Required
|
||||
- rules:
|
||||
- match: <REPLACE-WITH-EVENT-TYPE>
|
||||
path: <REPLACE-WITH-PATH>
|
||||
pubsubname: <REPLACE-WITH-PUBSUB-NAME> # Required
|
||||
deadlettertopic: <REPLACE-WITH-TOPIC-NAME> # Optional
|
||||
bulksubscribe: # Optional
|
||||
- enabled: <REPLACE-WITH-TOPIC-NAME>
|
||||
- maxmessages: <REPLACE-WITH-TOPIC-NAME>
|
||||
- maxawaitduration: <REPLACE-WITH-TOPIC-NAME>
|
||||
scopes:
|
||||
- <REPLACE-WITH-SCOPED-APPIDS>
|
||||
```
|
||||
|
||||
### Spec fields
|
||||
|
||||
| Field | Required | Details | Example |
|
||||
|--------------------|:--------:|---------|---------|
|
||||
| topic | Y | The name of the topic to which your component subscribes. | `orders` |
|
||||
| routes | Y | The routes configuration for this topic, including specifying the condition for sending a message to a specific path. Includes the following fields: <br><ul><li>match: _Optional._ The CEL expression used to match the event. If not specified, the route is considered the default. </li><li>path: The path for events that match this rule. </li></ul>The endpoint to which all topic messages are sent. | `match: event.type == "widget"` <br>`path: /widgets` |
|
||||
| pubsubname | N | The name of your pub/sub component. | `pubsub` |
|
||||
| deadlettertopic | N | The name of the dead letter topic that forwards undeliverable messages. | `poisonMessages` |
|
||||
| bulksubscribe | N | Enable bulk subscribe properties. | `true`, `false` |
|
||||
|
||||
|
||||
## `v1alpha1`
|
||||
|
||||
The following is the basic version `v1alpha1` spec for a `Subscription` resource. `v1alpha1` is now deprecated.
|
||||
|
||||
```yml
|
||||
apiVersion: dapr.io/v1alpha1
|
||||
kind: Subscription
|
||||
metadata:
|
||||
name: <REPLACE-WITH-RESOURCE-NAME>
|
||||
spec:
|
||||
version: v1alpha1
|
||||
topic: <REPLACE-WITH-TOPIC-NAME> # Required
|
||||
route: <REPLACE-WITH-ROUTE-NAME> # Required
|
||||
pubsubname: <REPLACE-WITH-PUBSUB-NAME> # Required
|
||||
deadLetterTopic: <REPLACE-WITH-DEAD-LETTER-TOPIC-NAME> # Optional
|
||||
bulkSubscribe: # Optional
|
||||
- enabled: <REPLACE-WITH-BOOLEAN-VALUE>
|
||||
- maxmessages: <REPLACE-WITH-VALUE>
|
||||
- maxawaitduration: <REPLACE-WITH-VALUE>
|
||||
scopes:
|
||||
- <REPLACE-WITH-SCOPED-APPIDS>
|
||||
```
|
||||
|
||||
### Spec fields
|
||||
|
||||
| Field | Required | Details | Example |
|
||||
|--------------------|:--------:|---------|---------|
|
||||
| topic | Y | The name of the topic to which your component subscribes. | `orders` |
|
||||
| route | Y | The endpoint to which all topic messages are sent. | `/checkout` |
|
||||
| pubsubname | N | The name of your pub/sub component. | `pubsub` |
|
||||
| deadlettertopic | N | The name of the dead letter topic that forwards undeliverable messages. | `poisonMessages` |
|
||||
| bulksubscribe | N | Enable bulk subscribe properties. | `true`, `false` |
|
||||
|
||||
## Related links
|
||||
- [Learn more about the declarative subscription method]({{< ref "subscription-methods.md#declarative-subscriptions" >}})
|
||||
- [Learn more about dead letter topics]({{< ref pubsub-deadletter.md >}})
|
||||
- [Learn more about routing messages]({{< ref "howto-route-messages.md#declarative-subscription" >}})
|
||||
- [Learn more about bulk subscribing]({{< ref pubsub-bulk.md >}})
|
|
@ -1,19 +1,13 @@
|
|||
<script src="/js/copy-code-button.js"></script>
|
||||
|
||||
{{ with .Site.Params.algolia_docsearch }}
|
||||
<script src="https://cdn.jsdelivr.net/npm/docsearch.js@2.6.3/dist/cdn/docsearch.min.js"></script>
|
||||
<script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@docsearch/js@3"></script>
|
||||
<script type="text/javascript">
|
||||
docsearch({
|
||||
// Your apiKey and indexName will be given to you once
|
||||
// we create your config
|
||||
apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9',
|
||||
indexName: 'crawler_dapr',
|
||||
container: '#docsearch',
|
||||
appId: 'O0QLQGNF38',
|
||||
// Replace inputSelector with a CSS selector
|
||||
// matching your search input
|
||||
inputSelector: '.td-search-input',
|
||||
// Set debug to true to inspect the dropdown
|
||||
debug: false,
|
||||
apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9',
|
||||
indexName: 'daprdocs',
|
||||
});
|
||||
</script>
|
||||
{{ end }}
|
||||
|
||||
<script src="/js/copy-code-button.js"></script>
|
|
@ -1,3 +1,3 @@
|
|||
{{ with .Site.Params.algolia_docsearch }}
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@docsearch/css@3" />
|
||||
{{ end }}
|
|
@ -0,0 +1,30 @@
|
|||
{{ if .Site.Params.gcs_engine_id -}}
|
||||
<input type="search" class="form-control td-search-input" placeholder=" {{ T "ui_search" }}" aria-label="{{ T "ui_search" }}" autocomplete="off">
|
||||
{{ else if .Site.Params.algolia_docsearch -}}
|
||||
<div id="docsearch"></div>
|
||||
{{ else if .Site.Params.offlineSearch -}}
|
||||
{{ $offlineSearchIndex := resources.Get "json/offline-search-index.json" | resources.ExecuteAsTemplate "offline-search-index.json" . -}}
|
||||
{{ if hugo.IsProduction -}}
|
||||
{{/* Use `md5` as finger print hash function to shorten file name to avoid `file name too long` error. */ -}}
|
||||
{{ $offlineSearchIndex = $offlineSearchIndex | fingerprint "md5" -}}
|
||||
{{ end -}}
|
||||
{{ $offlineSearchLink := $offlineSearchIndex.RelPermalink -}}
|
||||
|
||||
<input
|
||||
type="search"
|
||||
class="form-control td-search-input"
|
||||
placeholder=" {{ T "ui_search" }}"
|
||||
aria-label="{{ T "ui_search" }}"
|
||||
autocomplete="off"
|
||||
{{/*
|
||||
The data attribute name of the json file URL must end with `src` since
|
||||
Hugo's absurlreplacer requires `src`, `href`, `action` or `srcset` suffix for the attribute name.
|
||||
If the absurlreplacer is not applied, the URL will start with `/`.
|
||||
It causes the json file loading error when when relativeURLs is enabled.
|
||||
https://github.com/google/docsy/issues/181
|
||||
*/}}
|
||||
data-offline-search-index-json-src="{{ $offlineSearchLink }}"
|
||||
data-offline-search-base-href="/"
|
||||
data-offline-search-max-results="{{ .Site.Params.offlineSearchMaxResults | default 10 }}"
|
||||
>
|
||||
{{ end -}}
|
|
@ -71,7 +71,7 @@ spec:
|
|||
spec:
|
||||
containers:
|
||||
- name: otel-collector
|
||||
image: otel/opentelemetry-collector-contrib:0.50.0
|
||||
image: otel/opentelemetry-collector-contrib:0.77.0
|
||||
command:
|
||||
- "/otelcol-contrib"
|
||||
- "--config=/conf/otel-collector-config.yaml"
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 31 KiB |
|
@ -1,49 +1,35 @@
|
|||
function addCopyButtons(clipboard) {
|
||||
document.querySelectorAll('pre > code').forEach(function(codeBlock) {
|
||||
var button = document.createElement('button');
|
||||
button.className = 'copy-code-button';
|
||||
button.type = 'button';
|
||||
button.innerText = 'Copy';
|
||||
const highlightClass = document.querySelectorAll('.highlight');
|
||||
|
||||
button.addEventListener('click', function() {
|
||||
clipboard.writeText(codeBlock.textContent).then(
|
||||
function() {
|
||||
button.blur();
|
||||
highlightClass.forEach(element => {
|
||||
const copyIcon = document.createElement('i');
|
||||
copyIcon.classList.add('fas', 'fa-copy', 'copy-icon');
|
||||
copyIcon.style.color = 'white';
|
||||
copyIcon.style.display = 'none';
|
||||
element.appendChild(copyIcon);
|
||||
|
||||
button.innerText = 'Copied!';
|
||||
setTimeout(function() {
|
||||
button.innerText = 'Copy';
|
||||
}, 2000);
|
||||
},
|
||||
function(error) {
|
||||
button.innerText = 'Error';
|
||||
console.error(error);
|
||||
}
|
||||
);
|
||||
});
|
||||
element.addEventListener('mouseenter', () => {
|
||||
copyIcon.style.display = 'inline';
|
||||
});
|
||||
|
||||
var pre = codeBlock.parentNode;
|
||||
if (pre.parentNode.classList.contains('highlight')) {
|
||||
var highlight = pre.parentNode;
|
||||
highlight.parentNode.insertBefore(button, highlight);
|
||||
} else {
|
||||
pre.parentNode.insertBefore(button, pre);
|
||||
}
|
||||
});
|
||||
}
|
||||
element.addEventListener('mouseleave', () => {
|
||||
copyIcon.style.display = 'none';
|
||||
copyIcon.classList.replace('fa-check', 'fa-copy');
|
||||
});
|
||||
|
||||
if (navigator && navigator.clipboard) {
|
||||
addCopyButtons(navigator.clipboard);
|
||||
} else {
|
||||
var script = document.createElement('script');
|
||||
script.src =
|
||||
'https://cdnjs.cloudflare.com/ajax/libs/clipboard-polyfill/2.7.0/clipboard-polyfill.promise.js';
|
||||
script.integrity = 'sha256-waClS2re9NUbXRsryKoof+F9qc1gjjIhc2eT7ZbIv94=';
|
||||
script.crossOrigin = 'anonymous';
|
||||
copyIcon.addEventListener('click', async () => {
|
||||
const selection = window.getSelection();
|
||||
const range = document.createRange();
|
||||
range.selectNodeContents(element);
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
|
||||
script.onload = function() {
|
||||
addCopyButtons(clipboard);
|
||||
};
|
||||
|
||||
document.body.appendChild(script);
|
||||
}
|
||||
try {
|
||||
await navigator.clipboard.writeText(selection.toString());
|
||||
console.log('Text copied to clipboard');
|
||||
copyIcon.classList.replace('fa-copy', 'fa-check');
|
||||
selection.removeAllRanges();
|
||||
} catch (error) {
|
||||
console.error('Failed to copy: ', error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue