Merge branch 'v1.11' into issue_3239

This commit is contained in:
Hannah Hunter 2023-06-19 11:04:17 -04:00 committed by GitHub
commit 36c5496999
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 438 additions and 234 deletions

118
.github/scripts/algolia.py vendored Normal file
View File

@ -0,0 +1,118 @@
import os
from re import S
import sys
import json
from bs4 import BeautifulSoup
from algoliasearch.search_client import SearchClient
url = "docs.dapr.io"
if len(sys.argv) > 1:
starting_directory = os.path.join(os.getcwd(), str(sys.argv[1]))
else:
starting_directory = os.getcwd()
ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID')
ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY')
ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME')
client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY)
index = client.init_index(ALGOLIA_INDEX_NAME)
excluded_files = [
"404.html",
]
exluded_directories = [
"zh-hans",
]
rankings = {
"Getting started": 0,
"Concepts": 100,
"Developing applications": 200,
"Operations": 300,
"Reference": 400,
"Contributing": 500,
"Home": 600
}
def scan_directory(directory: str, pages: list):
if os.path.basename(directory) in exluded_directories:
print(f'Skipping directory: {directory}')
return
for file in os.listdir(directory):
path = os.path.join(directory, file)
if os.path.isfile(path):
if file.endswith(".html") and file not in excluded_files:
if '<!-- DISABLE_ALGOLIA -->' not in open(path, encoding="utf8").read():
print(f'Indexing: {path}')
pages.append(path)
else:
print(f'Skipping hidden page: {path}')
else:
scan_directory(path, pages)
def parse_file(path: str):
data = {}
data["hierarchy"] = {}
data["rank"] = 999
data["subrank"] = 99
data["type"] = "lvl2"
data["lvl0"] = ""
data["lvl1"] = ""
data["lvl2"] = ""
data["lvl3"] = ""
text = ""
subrank = 0
with open(path, "r", errors='ignore') as file:
content = file.read()
soup = BeautifulSoup(content, "html.parser")
for meta in soup.find_all("meta"):
if meta.get("name") == "description":
data["lvl2"] = meta.get("content")
data["hierarchy"]["lvl1"] = meta.get("content")
elif meta.get("property") == "og:title":
data["lvl0"] = meta.get("content")
data["hierarchy"]["lvl0"] = meta.get("content")
data["hierarchy"]["lvl2"] = meta.get("content")
elif meta.get("property") == "og:url":
data["url"] = meta.get("content")
data["path"] = meta.get("content").split(url)[1]
data["objectID"] = meta.get("content").split(url)[1]
breadcrumbs = soup.find_all("li", class_="breadcrumb-item")
try:
subrank = len(breadcrumbs)
data["subrank"] = subrank
except:
subrank = 99
data["subrank"] = 99
for bc in breadcrumbs:
section = bc.text.strip()
data["lvl1"] = section
data["hierarchy"]["lvl0"] = section
try:
data["rank"] = rankings[section] + subrank
except:
print(f"Rank not found for section {section}")
data["rank"] = 998
break
for p in soup.find_all("p"):
if p.text != "":
text = text + p.text
data["text"] = text
return data
def index_payload(payload):
res = index.replace_all_objects(payload)
res.wait()
if __name__ == "__main__":
pages = []
payload = []
scan_directory(starting_directory, pages)
for page in pages:
data = parse_file(page)
if "objectID" in data:
payload.append(data)
index_payload(payload)

View File

@ -1,6 +1,7 @@
name: Azure Static Web App Root
on:
workflow_dispatch:
push:
branches:
- v1.11
@ -9,35 +10,65 @@ on:
branches:
- v1.11
concurrency:
# Cancel the previously triggered build for only PR build.
group: website-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
jobs:
build_and_deploy_job:
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed')
name: Build Hugo Website
if: github.event.action != 'closed'
runs-on: ubuntu-latest
name: Build and Deploy Job
env:
SWA_BASE: 'proud-bay-0e9e0e81e'
HUGO_ENV: production
steps:
- uses: actions/checkout@v3
- name: Checkout docs repo
uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
submodules: true
- name: Setup Node
uses: actions/setup-node@v2
with:
node-version: '14'
- name: Setup Hugo
uses: peaceiris/actions-hugo@v2.5.0
with:
hugo-version: 0.102.3
extended: true
- name: Setup Docsy
run: cd daprdocs && git submodule update --init --recursive && sudo npm install -D --save autoprefixer && sudo npm install -D --save postcss-cli
- name: Build And Deploy
id: builddeploy
run: |
cd daprdocs
git submodule update --init --recursive
sudo npm install -D --save autoprefixer
sudo npm install -D --save postcss-cli
- name: Build Hugo Website
run: |
cd daprdocs
git config --global --add safe.directory /github/workspace
if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then
STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/"
fi
hugo ${STAGING_URL+-b "$STAGING_URL"}
- name: Deploy docs site
uses: Azure/static-web-apps-deploy@v1
env:
HUGO_ENV: production
HUGO_VERSION: "0.100.2"
with:
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
skip_deploy_on_missing_secrets: true
repo_token: ${{ secrets.GITHUB_TOKEN }} # Used for Github integrations (i.e. PR comments)
repo_token: ${{ secrets.GITHUB_TOKEN }}
action: "upload"
app_location: "/daprdocs"
app_build_command: "git config --global --add safe.directory /github/workspace && hugo"
output_location: "public"
skip_api_build: true
app_location: "daprdocs/public/"
api_location: "daprdocs/public/"
output_location: ""
skip_app_build: true
- name: Upload Hugo artifacts
uses: actions/upload-artifact@v3
with:
name: hugo_build
path: ./daprdocs/public/
if-no-files-found: error
close_pull_request_job:
close_staging_site:
if: github.event_name == 'pull_request' && github.event.action == 'closed'
runs-on: ubuntu-latest
name: Close Pull Request Job
@ -48,3 +79,29 @@ jobs:
with:
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
action: "close"
algolia_index:
name: Index site for Algolia
if: github.event_name == 'push'
needs: ['build_and_deploy_job']
runs-on: ubuntu-latest
env:
ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }}
ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }}
ALGOLIA_INDEX_NAME: daprdocs
steps:
- name: Checkout docs repo
uses: actions/checkout@v2
with:
submodules: false
- name: Download Hugo artifacts
uses: actions/download-artifact@v3
with:
name: hugo_build
path: site/
- name: Install Python packages
run: |
pip install --upgrade bs4
pip install --upgrade 'algoliasearch>=2.0,<3.0'
- name: Index site
run: python ./.github/scripts/algolia.py ./site

View File

@ -1,38 +1,12 @@
// Code formatting.
.copy-code-button {
color: #272822;
background-color: #FFF;
border-color: #0D2192;
border: 2px solid;
border-radius: 3px 3px 0px 0px;
/* right-align */
display: block;
margin-left: auto;
margin-right: 0;
margin-bottom: -2px;
padding: 3px 8px;
font-size: 0.8em;
.highlight .copy-icon {
position: absolute;
right: 20px;
top: 18px;
opacity: 0.7;
}
.copy-code-button:hover {
cursor: pointer;
background-color: #F2F2F2;
}
.copy-code-button:focus {
/* Avoid an ugly focus outline on click in Chrome,
but darken the button for accessibility.
See https://stackoverflow.com/a/25298082/1481479 */
background-color: #E6E6E6;
outline: 0;
}
.copy-code-button:active {
background-color: #D9D9D9;
}
.highlight pre {
/* Avoid pushing up the copy buttons. */
@ -40,25 +14,31 @@
}
.td-content {
// Highlighted code.
// Highlighted code.
.highlight {
@extend .card;
margin: 0rem 0;
padding: 0rem;
margin-bottom: 2rem;
max-width: 100%;
border: none;
pre {
margin: 0;
padding: 1rem;
border-radius: 10px;
}
}
// Inline code
p code, li > code, table code {
p code,
li>code,
table code {
color: inherit;
padding: 0.2em 0.4em;
margin: 0;
@ -78,11 +58,11 @@
word-wrap: normal;
background-color: $gray-100;
padding: $spacer;
max-width: 100%;
> code {
background-color: inherit !important;
>code {
background-color: inherit !important;
padding: 0;
margin: 0;
font-size: 100%;

View File

@ -92,7 +92,7 @@ You can add additional fields to a custom CloudEvent that are not part of the of
Publish a CloudEvent to the `orders` topic:
```bash
dapr publish --publish-app-id orderprocessing --pubsub order-pub-sub --topic orders --data '{"specversion" : "1.0", "type" : "com.dapr.cloudevent.sent", "source" : "testcloudeventspubsub", "subject" : "Cloud Events Test", "id" : "someCloudEventId", "time" : "2021-08-02T09:00:00Z", "datacontenttype" : "application/cloudevents+json", "data" : {"orderId": "100"}}'
dapr publish --publish-app-id orderprocessing --pubsub order-pub-sub --topic orders --data '{\"orderId\": \"100\"}'
```
{{% /codetab %}}

View File

@ -15,118 +15,120 @@ description: "Enable Dapr metrics and logs with Azure Monitor for Azure Kubernet
## Enable Prometheus metric scrape using config map
1. Make sure that omsagents are running
1. Make sure that Azure Monitor Agents (AMA) are running.
```bash
$ kubectl get pods -n kube-system
NAME READY STATUS RESTARTS AGE
...
omsagent-75qjs 1/1 Running 1 44h
omsagent-c7c4t 1/1 Running 0 44h
omsagent-rs-74f488997c-dshpx 1/1 Running 1 44h
omsagent-smtk7 1/1 Running 1 44h
...
```
```bash
$ kubectl get pods -n kube-system
NAME READY STATUS RESTARTS AGE
...
ama-logs-48kpv 2/2 Running 0 2d13h
ama-logs-mx24c 2/2 Running 0 2d13h
ama-logs-rs-f9bbb9898-vbt6k 1/1 Running 0 30h
ama-logs-sm2mz 2/2 Running 0 2d13h
ama-logs-z7p4c 2/2 Running 0 2d13h
...
```
2. Apply config map to enable Prometheus metrics endpoint scrape.
1. Apply config map to enable Prometheus metrics endpoint scrape.
You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable prometheus metrics endpoint scrape.
You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable Prometheus metrics endpoint scrape.
If you installed Dapr to the different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example:
If you installed Dapr to a different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example:
```yaml
...
prometheus-data-collection-settings: |-
[prometheus_data_collection_settings.cluster]
interval = "1m"
monitor_kubernetes_pods = true
monitor_kubernetes_pods_namespaces = ["dapr-system", "default"]
[prometheus_data_collection_settings.node]
interval = "1m"
...
```
```yaml
...
prometheus-data-collection-settings: |-
[prometheus_data_collection_settings.cluster]
interval = "1m"
monitor_kubernetes_pods = true
monitor_kubernetes_pods_namespaces = ["dapr-system", "default"]
[prometheus_data_collection_settings.node]
interval = "1m"
...
```
Apply config map:
Apply config map:
```bash
kubectl apply -f ./azm-config.map.yaml
```
```bash
kubectl apply -f ./azm-config.map.yaml
```
## Install Dapr with JSON formatted logs
1. Install Dapr with enabling JSON-formatted logs
1. Install Dapr with enabling JSON-formatted logs.
```bash
helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true
```
```bash
helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true
```
2. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations.
1. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations.
> Note: OMS Agent scrapes the metrics only if replicaset has Prometheus annotations.
> Note: The Azure Monitor Agents (AMA) only sends the metrics if the Prometheus annotations are set.
Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml.
Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml.
Example:
```yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: pythonapp
namespace: default
labels:
app: python
spec:
replicas: 1
selector:
matchLabels:
app: python
template:
metadata:
labels:
app: python
annotations:
dapr.io/enabled: "true"
dapr.io/app-id: "pythonapp"
dapr.io/log-as-json: "true"
prometheus.io/scrape: "true"
prometheus.io/port: "9090"
prometheus.io/path: "/"
Example:
...
```
```yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: pythonapp
namespace: default
labels:
app: python
spec:
replicas: 1
selector:
matchLabels:
app: python
template:
metadata:
labels:
app: python
annotations:
dapr.io/enabled: "true"
dapr.io/app-id: "pythonapp"
dapr.io/log-as-json: "true"
prometheus.io/scrape: "true"
prometheus.io/port: "9090"
prometheus.io/path: "/"
...
```
## Search metrics and logs with Azure Monitor
1. Go to Azure Monitor
1. Go to Azure Monitor in the Azure portal.
2. Search Dapr logs
1. Search Dapr **Logs**.
Here is an example query, to parse JSON formatted logs and query logs from dapr system processes.
Here is an example query, to parse JSON formatted logs and query logs from Dapr system processes.
```
ContainerLog
| extend parsed=parse_json(LogEntry)
| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance']
| where level != ""
| sort by Time
```
```
ContainerLog
| extend parsed=parse_json(LogEntry)
| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance']
| where level != ""
| sort by Time
```
3. Search metrics
1. Search **Metrics**.
This query, queries process_resident_memory_bytes Prometheus metrics for Dapr system processes and renders timecharts
This query, queries `process_resident_memory_bytes` Prometheus metrics for Dapr system processes and renders timecharts.
```
InsightsMetrics
| where Namespace == "prometheus" and Name == "process_resident_memory_bytes"
| extend tags=parse_json(Tags)
| project TimeGenerated, Name, Val, app=tostring(tags['app'])
| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app
| where app startswith "dapr-"
| render timechart
```
```
InsightsMetrics
| where Namespace == "prometheus" and Name == "process_resident_memory_bytes"
| extend tags=parse_json(Tags)
| project TimeGenerated, Name, Val, app=tostring(tags['app'])
| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app
| where app startswith "dapr-"
| render timechart
```
# References
## References
* [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration)
* [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config)
* [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language)
- [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration)
- [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config)
- [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language)

View File

@ -27,6 +27,8 @@ spec:
value: "***********"
- name: queueName
value: "myqueue"
# - name: pollingInterval
# value: "30s"
# - name: ttlInSeconds
# value: "60"
# - name: decodeBase64
@ -50,6 +52,7 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| `accountName` | Y | Input/Output | The name of the Azure Storage account | `"account1"` |
| `accountKey` | Y* | Input/Output | The access key of the Azure Storage account. Only required when not using Azure AD authentication. | `"access-key"` |
| `queueName` | Y | Input/Output | The name of the Azure Storage queue | `"myqueue"` |
| `pollingInterval` | N | Output | Set the interval to poll Azure Storage Queues for new messages, as a Go duration value. Default: `"10s"` | `"30s"` |
| `ttlInSeconds` | N | Output | Parameter to set the default message time to live. If this parameter is omitted, messages will expire after 10 minutes. See [also](#specifying-a-ttl-per-message) | `"60"` |
| `decodeBase64` | N | Output | Configuration to decode base64 file content before saving to Storage Queues. (In case of saving a file with binary content). Defaults to `false` | `true`, `false` |
| `encodeBase64` | N | Output | If enabled base64 encodes the data payload before uploading to Azure storage queues. Default `false`. | `true`, `false` |

View File

@ -2,14 +2,16 @@
type: docs
title: "Microsoft SQL Server & Azure SQL"
linkTitle: "Microsoft SQL Server & Azure SQL"
description: Detailed information on the Microsoft SQL Server and Azure SQL state store component
description: Detailed information on the Microsoft SQL Server state store component
aliases:
- "/operations/components/setup-state-store/supported-state-stores/setup-sqlserver/"
---
## Component format
To set up Microsoft SQL Server and Azure SQL state stores, create a component of type `state.sqlserver`. See [this guide]({{< ref "howto-get-save-state.md#step-1-setup-a-state-store" >}}) on how to create and apply a state store configuration.
This state store component can be used with both [Microsoft SQL Server](https://learn.microsoft.com/sql/) and [Azure SQL](https://learn.microsoft.com/azure/azure-sql/).
To set up this state store, create a component of type `state.sqlserver`. See [this guide]({{< ref "howto-get-save-state.md#step-1-setup-a-state-store" >}}) on how to create and apply a state store configuration.
```yaml
@ -21,30 +23,42 @@ spec:
type: state.sqlserver
version: v1
metadata:
- name: connectionString
value: <REPLACE-WITH-CONNECTION-STRING> # Required.
- name: tableName
value: <REPLACE-WITH-TABLE-NAME> # Optional. defaults to "state"
- name: keyType
value: <REPLACE-WITH-KEY-TYPE> # Optional. defaults to "string"
- name: keyLength
value: <KEY-LENGTH> # Optional. defaults to 200. You be used with "string" keyType
- name: schema
value: <SCHEMA> # Optional. defaults to "dbo"
- name: indexedProperties
value: <INDEXED-PROPERTIES> # Optional. List of IndexedProperties.
- name: metadataTableName # Optional. Name of the table where to store metadata used by Dapr
value: "dapr_metadata"
- name: cleanupIntervalInSeconds # Optional. Cleanup interval in seconds, to remove expired rows
value: 300
# Authenticate using SQL Server credentials
- name: connectionString
value: |
Server=myServerName\myInstanceName;Database=myDataBase;User Id=myUsername;Password=myPassword;
# Authenticate with Azure AD (Azure SQL only)
# "useAzureAD" be set to "true"
- name: useAzureAD
value: true
# Connection string or URL of the Azure SQL database, optionally containing the database
- name: connectionString
value: |
sqlserver://myServerName.database.windows.net:1433?database=myDataBase
# Other optional fields (listing default values)
- name: tableName
value: "state"
- name: metadataTableName
value: "dapr_metadata"
- name: schema
value: "dbo"
- name: keyType
value: "string"
- name: keyLength
value: "200"
- name: indexedProperties
value: ""
- name: cleanupIntervalInSeconds
value: "3600"
```
{{% alert title="Warning" color="warning" %}}
The above example uses secrets as plain strings. It is recommended to use a secret store for the secrets as described [here]({{< ref component-secrets.md >}}).
{{% /alert %}}
If you wish to use SQL server as an [actor state store]({{< ref "state_api.md#configuring-state-store-for-actors" >}}), append the following to the yaml.
If you wish to use SQL server as an [actor state store]({{< ref "state_api.md#configuring-state-store-for-actors" >}}), append the following to the metadata:
```yaml
- name: actorStateStore
@ -53,24 +67,43 @@ If you wish to use SQL server as an [actor state store]({{< ref "state_api.md#co
## Spec metadata fields
### Authenticate using SQL Server credentials
The following metadata options are **required** to authenticate using SQL Server credentials. This is supported on both SQL Server and Azure SQL.
| Field | Required | Details | Example |
|--------|:--------:|---------|---------|
| `connectionString` | Y | The connection string used to connect.<br>If the connection string contains the database, it must already exist. Otherwise, if the database is omitted, a default database named "Dapr" is created. | `"Server=myServerName\myInstanceName;Database=myDataBase;User Id=myUsername;Password=myPassword;"` |
### Authenticate using Azure AD
Authenticating with Azure AD is supported with Azure SQL only. All authentication methods supported by Dapr can be used, including client credentials ("service principal") and Managed Identity.
| Field | Required | Details | Example |
|--------|:--------:|---------|---------|
| `useAzureAD` | Y | Must be set to `true` to enable the component to retrieve access tokens from Azure AD. | `"true"` |
| `connectionString` | Y | The connection string or URL of the Azure SQL database, **without credentials**.<br>If the connection string contains the database, it must already exist. Otherwise, if the database is omitted, a default database named "Dapr" is created. | `"sqlserver://myServerName.database.windows.net:1433?database=myDataBase"` |
| `azureTenantId` | N | ID of the Azure AD tenant | `"cd4b2887-304c-47e1-b4d5-65447fdd542b"` |
| `azureClientId` | N | Client ID (application ID) | `"c7dd251f-811f-4ba2-a905-acd4d3f8f08b"` |
| `azureClientSecret` | N | Client secret (application password) | `"Ecy3XG7zVZK3/vl/a2NSB+a1zXLa8RnMum/IgD0E"` |
### Other metadata options
| Field | Required | Details | Example |
|--------------------|:--------:|---------|---------|
| connectionString | Y | The connection string used to connect. If the connection string contains the database it must already exist. If the database is omitted a default database named `"Dapr"` is created. | `"Server=myServerName\myInstanceName;Database=myDataBase;User Id=myUsername;Password=myPassword;"`
| tableName | N | The name of the table to use. Alpha-numeric with underscores. Defaults to `"state"` | `"table_name"`
| keyType | N | The type of key used. Defaults to `"string"` | `"string"`
| keyLength | N | The max length of key. Used along with `"string"` keytype. Defaults to `"200"` | `"200"`
| schema | N | The schema to use. Defaults to `"dbo"` | `"dapr"`,`"dbo"`
| indexedProperties | N | List of IndexedProperties. | `'[{"column": "transactionid", "property": "id", "type": "int"}, {"column": "customerid", "property": "customer", "type": "nvarchar(100)"}]'`
| actorStateStore | N | Indicates that Dapr should configure this component for the actor state store ([more information]({{< ref "state_api.md#configuring-state-store-for-actors" >}})). | `"true"`
| metadataTableName | N | Name of the table Dapr uses to store a few metadata properties. Defaults to `dapr_metadata`. | `"dapr_metadata"`
| cleanupIntervalInSeconds | N | Interval, in seconds, to clean up rows with an expired TTL. Default: `3600` (i.e. 1 hour). Setting this to values <=0 disables the periodic cleanup. | `1800`, `-1`
| `tableName` | N | The name of the table to use. Alpha-numeric with underscores. Defaults to `"state"` | `"table_name"`
| `metadataTableName` | N | Name of the table Dapr uses to store a few metadata properties. Defaults to `dapr_metadata`. | `"dapr_metadata"`
| `keyType` | N | The type of key used. Supported values: `"string"` (default), `"uuid"`, `"integer"`.| `"string"`
| `keyLength` | N | The max length of key. Ignored if "keyType" is not `string`. Defaults to `"200"` | `"200"`
| `schema` | N | The schema to use. Defaults to `"dbo"` | `"dapr"`,`"dbo"`
| `indexedProperties` | N | List of indexed properties, as a string containing a JSON document. | `'[{"column": "transactionid", "property": "id", "type": "int"}, {"column": "customerid", "property": "customer", "type": "nvarchar(100)"}]'`
| `actorStateStore` | N | Indicates that Dapr should configure this component for the actor state store ([more information]({{< ref "state_api.md#configuring-state-store-for-actors" >}})). | `"true"`
| `cleanupIntervalInSeconds` | N | Interval, in seconds, to clean up rows with an expired TTL. Default: `"3600"` (i.e. 1 hour). Setting this to values <=0 disables the periodic cleanup. | `"1800"`, `"-1"`
## Create a Microsoft SQL Server/Azure SQL instance
[Follow the instructions](https://docs.microsoft.com/azure/azure-sql/database/single-database-create-quickstart?view=azuresql&tabs=azure-portal) from the Azure documentation on how to create a SQL database. The database must be created before Dapr consumes it.
> Note: Microsoft SQL Server/Azure SQL state store also supports SQL Server running on VMs and in Docker.**
[Follow the instructions](https://docs.microsoft.com/azure/azure-sql/database/single-database-create-quickstart?view=azuresql&tabs=azure-portal) from the Azure documentation on how to create a SQL database. The database must be created before Dapr consumes it.
In order to setup SQL Server as a state store, you need the following properties:
@ -104,6 +137,7 @@ CREATE CLUSTERED INDEX expiredate_idx ON state(ExpireDate ASC)
```
## Related links
- [Basic schema for a Dapr component]({{< ref component-schema >}})
- Read [this guide]({{< ref "howto-get-save-state.md#step-2-save-and-retrieve-a-single-state" >}}) for instructions on configuring state store components
- [State management building block]({{< ref state-management >}})

View File

@ -1,19 +1,13 @@
<script src="/js/copy-code-button.js"></script>
{{ with .Site.Params.algolia_docsearch }}
<script src="https://cdn.jsdelivr.net/npm/docsearch.js@2.6.3/dist/cdn/docsearch.min.js"></script>
<script>
<script src="https://cdn.jsdelivr.net/npm/@docsearch/js@3"></script>
<script type="text/javascript">
docsearch({
// Your apiKey and indexName will be given to you once
// we create your config
apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9',
indexName: 'crawler_dapr',
container: '#docsearch',
appId: 'O0QLQGNF38',
// Replace inputSelector with a CSS selector
// matching your search input
inputSelector: '.td-search-input',
// Set debug to true to inspect the dropdown
debug: false,
apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9',
indexName: 'daprdocs',
});
</script>
{{ end }}
<script src="/js/copy-code-button.js"></script>

View File

@ -1,3 +1,3 @@
{{ with .Site.Params.algolia_docsearch }}
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@docsearch/css@3" />
{{ end }}

View File

@ -0,0 +1,30 @@
{{ if .Site.Params.gcs_engine_id -}}
<input type="search" class="form-control td-search-input" placeholder="&#xf002; {{ T "ui_search" }}" aria-label="{{ T "ui_search" }}" autocomplete="off">
{{ else if .Site.Params.algolia_docsearch -}}
<div id="docsearch"></div>
{{ else if .Site.Params.offlineSearch -}}
{{ $offlineSearchIndex := resources.Get "json/offline-search-index.json" | resources.ExecuteAsTemplate "offline-search-index.json" . -}}
{{ if hugo.IsProduction -}}
{{/* Use `md5` as finger print hash function to shorten file name to avoid `file name too long` error. */ -}}
{{ $offlineSearchIndex = $offlineSearchIndex | fingerprint "md5" -}}
{{ end -}}
{{ $offlineSearchLink := $offlineSearchIndex.RelPermalink -}}
<input
type="search"
class="form-control td-search-input"
placeholder="&#xf002; {{ T "ui_search" }}"
aria-label="{{ T "ui_search" }}"
autocomplete="off"
{{/*
The data attribute name of the json file URL must end with `src` since
Hugo's absurlreplacer requires `src`, `href`, `action` or `srcset` suffix for the attribute name.
If the absurlreplacer is not applied, the URL will start with `/`.
It causes the json file loading error when when relativeURLs is enabled.
https://github.com/google/docsy/issues/181
*/}}
data-offline-search-index-json-src="{{ $offlineSearchLink }}"
data-offline-search-base-href="/"
data-offline-search-max-results="{{ .Site.Params.offlineSearchMaxResults | default 10 }}"
>
{{ end -}}

View File

@ -1,49 +1,35 @@
function addCopyButtons(clipboard) {
document.querySelectorAll('pre > code').forEach(function(codeBlock) {
var button = document.createElement('button');
button.className = 'copy-code-button';
button.type = 'button';
button.innerText = 'Copy';
const highlightClass = document.querySelectorAll('.highlight');
button.addEventListener('click', function() {
clipboard.writeText(codeBlock.textContent).then(
function() {
button.blur();
highlightClass.forEach(element => {
const copyIcon = document.createElement('i');
copyIcon.classList.add('fas', 'fa-copy', 'copy-icon');
copyIcon.style.color = 'white';
copyIcon.style.display = 'none';
element.appendChild(copyIcon);
button.innerText = 'Copied!';
setTimeout(function() {
button.innerText = 'Copy';
}, 2000);
},
function(error) {
button.innerText = 'Error';
console.error(error);
}
);
});
element.addEventListener('mouseenter', () => {
copyIcon.style.display = 'inline';
});
var pre = codeBlock.parentNode;
if (pre.parentNode.classList.contains('highlight')) {
var highlight = pre.parentNode;
highlight.parentNode.insertBefore(button, highlight);
} else {
pre.parentNode.insertBefore(button, pre);
}
});
}
element.addEventListener('mouseleave', () => {
copyIcon.style.display = 'none';
copyIcon.classList.replace('fa-check', 'fa-copy');
});
if (navigator && navigator.clipboard) {
addCopyButtons(navigator.clipboard);
} else {
var script = document.createElement('script');
script.src =
'https://cdnjs.cloudflare.com/ajax/libs/clipboard-polyfill/2.7.0/clipboard-polyfill.promise.js';
script.integrity = 'sha256-waClS2re9NUbXRsryKoof+F9qc1gjjIhc2eT7ZbIv94=';
script.crossOrigin = 'anonymous';
copyIcon.addEventListener('click', async () => {
const selection = window.getSelection();
const range = document.createRange();
range.selectNodeContents(element);
selection.removeAllRanges();
selection.addRange(range);
script.onload = function() {
addCopyButtons(clipboard);
};
document.body.appendChild(script);
}
try {
await navigator.clipboard.writeText(selection.toString());
console.log('Text copied to clipboard');
copyIcon.classList.replace('fa-copy', 'fa-check');
selection.removeAllRanges();
} catch (error) {
console.error('Failed to copy: ', error);
}
});
});