mirror of https://github.com/dapr/docs.git
Merge branch 'v1.11' into issue_3250
This commit is contained in:
commit
9f35db9837
|
@ -0,0 +1,118 @@
|
|||
import os
|
||||
from re import S
|
||||
import sys
|
||||
import json
|
||||
from bs4 import BeautifulSoup
|
||||
from algoliasearch.search_client import SearchClient
|
||||
|
||||
url = "docs.dapr.io"
|
||||
if len(sys.argv) > 1:
|
||||
starting_directory = os.path.join(os.getcwd(), str(sys.argv[1]))
|
||||
else:
|
||||
starting_directory = os.getcwd()
|
||||
|
||||
ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID')
|
||||
ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY')
|
||||
ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME')
|
||||
|
||||
client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY)
|
||||
index = client.init_index(ALGOLIA_INDEX_NAME)
|
||||
|
||||
excluded_files = [
|
||||
"404.html",
|
||||
]
|
||||
|
||||
exluded_directories = [
|
||||
"zh-hans",
|
||||
]
|
||||
|
||||
rankings = {
|
||||
"Getting started": 0,
|
||||
"Concepts": 100,
|
||||
"Developing applications": 200,
|
||||
"Operations": 300,
|
||||
"Reference": 400,
|
||||
"Contributing": 500,
|
||||
"Home": 600
|
||||
}
|
||||
|
||||
def scan_directory(directory: str, pages: list):
|
||||
if os.path.basename(directory) in exluded_directories:
|
||||
print(f'Skipping directory: {directory}')
|
||||
return
|
||||
for file in os.listdir(directory):
|
||||
path = os.path.join(directory, file)
|
||||
if os.path.isfile(path):
|
||||
if file.endswith(".html") and file not in excluded_files:
|
||||
if '<!-- DISABLE_ALGOLIA -->' not in open(path, encoding="utf8").read():
|
||||
print(f'Indexing: {path}')
|
||||
pages.append(path)
|
||||
else:
|
||||
print(f'Skipping hidden page: {path}')
|
||||
else:
|
||||
scan_directory(path, pages)
|
||||
|
||||
def parse_file(path: str):
|
||||
data = {}
|
||||
data["hierarchy"] = {}
|
||||
data["rank"] = 999
|
||||
data["subrank"] = 99
|
||||
data["type"] = "lvl2"
|
||||
data["lvl0"] = ""
|
||||
data["lvl1"] = ""
|
||||
data["lvl2"] = ""
|
||||
data["lvl3"] = ""
|
||||
text = ""
|
||||
subrank = 0
|
||||
with open(path, "r", errors='ignore') as file:
|
||||
content = file.read()
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
for meta in soup.find_all("meta"):
|
||||
if meta.get("name") == "description":
|
||||
data["lvl2"] = meta.get("content")
|
||||
data["hierarchy"]["lvl1"] = meta.get("content")
|
||||
elif meta.get("property") == "og:title":
|
||||
data["lvl0"] = meta.get("content")
|
||||
data["hierarchy"]["lvl0"] = meta.get("content")
|
||||
data["hierarchy"]["lvl2"] = meta.get("content")
|
||||
elif meta.get("property") == "og:url":
|
||||
data["url"] = meta.get("content")
|
||||
data["path"] = meta.get("content").split(url)[1]
|
||||
data["objectID"] = meta.get("content").split(url)[1]
|
||||
breadcrumbs = soup.find_all("li", class_="breadcrumb-item")
|
||||
try:
|
||||
subrank = len(breadcrumbs)
|
||||
data["subrank"] = subrank
|
||||
except:
|
||||
subrank = 99
|
||||
data["subrank"] = 99
|
||||
for bc in breadcrumbs:
|
||||
section = bc.text.strip()
|
||||
data["lvl1"] = section
|
||||
data["hierarchy"]["lvl0"] = section
|
||||
try:
|
||||
data["rank"] = rankings[section] + subrank
|
||||
except:
|
||||
print(f"Rank not found for section {section}")
|
||||
data["rank"] = 998
|
||||
break
|
||||
for p in soup.find_all("p"):
|
||||
if p.text != "":
|
||||
text = text + p.text
|
||||
data["text"] = text
|
||||
return data
|
||||
|
||||
def index_payload(payload):
|
||||
res = index.replace_all_objects(payload)
|
||||
res.wait()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pages = []
|
||||
payload = []
|
||||
scan_directory(starting_directory, pages)
|
||||
for page in pages:
|
||||
data = parse_file(page)
|
||||
if "objectID" in data:
|
||||
payload.append(data)
|
||||
index_payload(payload)
|
|
@ -1,6 +1,7 @@
|
|||
name: Azure Static Web App Root
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- v1.11
|
||||
|
@ -9,35 +10,66 @@ on:
|
|||
branches:
|
||||
- v1.11
|
||||
|
||||
concurrency:
|
||||
# Cancel the previously triggered build for only PR build.
|
||||
group: website-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build_and_deploy_job:
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed')
|
||||
name: Build Hugo Website
|
||||
if: github.event.action != 'closed'
|
||||
runs-on: ubuntu-latest
|
||||
name: Build and Deploy Job
|
||||
env:
|
||||
SWA_BASE: 'proud-bay-0e9e0e81e'
|
||||
HUGO_ENV: production
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout docs repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '14'
|
||||
- name: Setup Hugo
|
||||
uses: peaceiris/actions-hugo@v2.5.0
|
||||
with:
|
||||
hugo-version: 0.102.3
|
||||
extended: true
|
||||
- name: Setup Docsy
|
||||
run: cd daprdocs && git submodule update --init --recursive && sudo npm install -D --save autoprefixer && sudo npm install -D --save postcss-cli
|
||||
- name: Build And Deploy
|
||||
id: builddeploy
|
||||
run: |
|
||||
cd daprdocs
|
||||
git submodule update --init --recursive
|
||||
sudo npm install -D --save autoprefixer
|
||||
sudo npm install -D --save postcss-cli
|
||||
- name: Build Hugo Website
|
||||
run: |
|
||||
cd daprdocs
|
||||
git config --global --add safe.directory /github/workspace
|
||||
if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then
|
||||
STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/"
|
||||
fi
|
||||
hugo ${STAGING_URL+-b "$STAGING_URL"}
|
||||
- name: Deploy docs site
|
||||
uses: Azure/static-web-apps-deploy@v1
|
||||
env:
|
||||
HUGO_ENV: production
|
||||
HUGO_VERSION: "0.100.2"
|
||||
with:
|
||||
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
|
||||
skip_deploy_on_missing_secrets: true
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }} # Used for Github integrations (i.e. PR comments)
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
action: "upload"
|
||||
app_location: "/daprdocs"
|
||||
app_build_command: "git config --global --add safe.directory /github/workspace && hugo"
|
||||
output_location: "public"
|
||||
skip_api_build: true
|
||||
app_location: "daprdocs/public/"
|
||||
api_location: "daprdocs/public/"
|
||||
output_location: ""
|
||||
skip_app_build: true
|
||||
skip_deploy_on_missing_secrets: true
|
||||
- name: Upload Hugo artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: hugo_build
|
||||
path: ./daprdocs/public/
|
||||
if-no-files-found: error
|
||||
|
||||
close_pull_request_job:
|
||||
close_staging_site:
|
||||
if: github.event_name == 'pull_request' && github.event.action == 'closed'
|
||||
runs-on: ubuntu-latest
|
||||
name: Close Pull Request Job
|
||||
|
@ -48,3 +80,29 @@ jobs:
|
|||
with:
|
||||
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
|
||||
action: "close"
|
||||
|
||||
algolia_index:
|
||||
name: Index site for Algolia
|
||||
if: github.event_name == 'push'
|
||||
needs: ['build_and_deploy_job']
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }}
|
||||
ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }}
|
||||
ALGOLIA_INDEX_NAME: daprdocs
|
||||
steps:
|
||||
- name: Checkout docs repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: false
|
||||
- name: Download Hugo artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: hugo_build
|
||||
path: site/
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade bs4
|
||||
pip install --upgrade 'algoliasearch>=2.0,<3.0'
|
||||
- name: Index site
|
||||
run: python ./.github/scripts/algolia.py ./site
|
||||
|
|
|
@ -15,118 +15,120 @@ description: "Enable Dapr metrics and logs with Azure Monitor for Azure Kubernet
|
|||
|
||||
## Enable Prometheus metric scrape using config map
|
||||
|
||||
1. Make sure that omsagents are running
|
||||
1. Make sure that Azure Monitor Agents (AMA) are running.
|
||||
|
||||
```bash
|
||||
$ kubectl get pods -n kube-system
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
...
|
||||
omsagent-75qjs 1/1 Running 1 44h
|
||||
omsagent-c7c4t 1/1 Running 0 44h
|
||||
omsagent-rs-74f488997c-dshpx 1/1 Running 1 44h
|
||||
omsagent-smtk7 1/1 Running 1 44h
|
||||
...
|
||||
```
|
||||
```bash
|
||||
$ kubectl get pods -n kube-system
|
||||
NAME READY STATUS RESTARTS AGE
|
||||
...
|
||||
ama-logs-48kpv 2/2 Running 0 2d13h
|
||||
ama-logs-mx24c 2/2 Running 0 2d13h
|
||||
ama-logs-rs-f9bbb9898-vbt6k 1/1 Running 0 30h
|
||||
ama-logs-sm2mz 2/2 Running 0 2d13h
|
||||
ama-logs-z7p4c 2/2 Running 0 2d13h
|
||||
...
|
||||
```
|
||||
|
||||
2. Apply config map to enable Prometheus metrics endpoint scrape.
|
||||
1. Apply config map to enable Prometheus metrics endpoint scrape.
|
||||
|
||||
You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable prometheus metrics endpoint scrape.
|
||||
You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable Prometheus metrics endpoint scrape.
|
||||
|
||||
If you installed Dapr to the different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example:
|
||||
If you installed Dapr to a different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example:
|
||||
|
||||
```yaml
|
||||
...
|
||||
prometheus-data-collection-settings: |-
|
||||
[prometheus_data_collection_settings.cluster]
|
||||
interval = "1m"
|
||||
monitor_kubernetes_pods = true
|
||||
monitor_kubernetes_pods_namespaces = ["dapr-system", "default"]
|
||||
[prometheus_data_collection_settings.node]
|
||||
interval = "1m"
|
||||
...
|
||||
```
|
||||
```yaml
|
||||
...
|
||||
prometheus-data-collection-settings: |-
|
||||
[prometheus_data_collection_settings.cluster]
|
||||
interval = "1m"
|
||||
monitor_kubernetes_pods = true
|
||||
monitor_kubernetes_pods_namespaces = ["dapr-system", "default"]
|
||||
[prometheus_data_collection_settings.node]
|
||||
interval = "1m"
|
||||
...
|
||||
```
|
||||
|
||||
Apply config map:
|
||||
Apply config map:
|
||||
|
||||
```bash
|
||||
kubectl apply -f ./azm-config.map.yaml
|
||||
```
|
||||
```bash
|
||||
kubectl apply -f ./azm-config.map.yaml
|
||||
```
|
||||
|
||||
## Install Dapr with JSON formatted logs
|
||||
|
||||
1. Install Dapr with enabling JSON-formatted logs
|
||||
1. Install Dapr with enabling JSON-formatted logs.
|
||||
|
||||
```bash
|
||||
helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true
|
||||
```
|
||||
```bash
|
||||
helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true
|
||||
```
|
||||
|
||||
2. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations.
|
||||
1. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations.
|
||||
|
||||
> Note: OMS Agent scrapes the metrics only if replicaset has Prometheus annotations.
|
||||
> Note: The Azure Monitor Agents (AMA) only sends the metrics if the Prometheus annotations are set.
|
||||
|
||||
Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml.
|
||||
Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml.
|
||||
|
||||
Example:
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: pythonapp
|
||||
namespace: default
|
||||
labels:
|
||||
app: python
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: python
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: python
|
||||
annotations:
|
||||
dapr.io/enabled: "true"
|
||||
dapr.io/app-id: "pythonapp"
|
||||
dapr.io/log-as-json: "true"
|
||||
prometheus.io/scrape: "true"
|
||||
prometheus.io/port: "9090"
|
||||
prometheus.io/path: "/"
|
||||
Example:
|
||||
|
||||
...
|
||||
```
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: pythonapp
|
||||
namespace: default
|
||||
labels:
|
||||
app: python
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: python
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: python
|
||||
annotations:
|
||||
dapr.io/enabled: "true"
|
||||
dapr.io/app-id: "pythonapp"
|
||||
dapr.io/log-as-json: "true"
|
||||
prometheus.io/scrape: "true"
|
||||
prometheus.io/port: "9090"
|
||||
prometheus.io/path: "/"
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
## Search metrics and logs with Azure Monitor
|
||||
|
||||
1. Go to Azure Monitor
|
||||
1. Go to Azure Monitor in the Azure portal.
|
||||
|
||||
2. Search Dapr logs
|
||||
1. Search Dapr **Logs**.
|
||||
|
||||
Here is an example query, to parse JSON formatted logs and query logs from dapr system processes.
|
||||
Here is an example query, to parse JSON formatted logs and query logs from Dapr system processes.
|
||||
|
||||
```
|
||||
ContainerLog
|
||||
| extend parsed=parse_json(LogEntry)
|
||||
| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance']
|
||||
| where level != ""
|
||||
| sort by Time
|
||||
```
|
||||
```
|
||||
ContainerLog
|
||||
| extend parsed=parse_json(LogEntry)
|
||||
| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance']
|
||||
| where level != ""
|
||||
| sort by Time
|
||||
```
|
||||
|
||||
3. Search metrics
|
||||
1. Search **Metrics**.
|
||||
|
||||
This query, queries process_resident_memory_bytes Prometheus metrics for Dapr system processes and renders timecharts
|
||||
This query, queries `process_resident_memory_bytes` Prometheus metrics for Dapr system processes and renders timecharts.
|
||||
|
||||
```
|
||||
InsightsMetrics
|
||||
| where Namespace == "prometheus" and Name == "process_resident_memory_bytes"
|
||||
| extend tags=parse_json(Tags)
|
||||
| project TimeGenerated, Name, Val, app=tostring(tags['app'])
|
||||
| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app
|
||||
| where app startswith "dapr-"
|
||||
| render timechart
|
||||
```
|
||||
```
|
||||
InsightsMetrics
|
||||
| where Namespace == "prometheus" and Name == "process_resident_memory_bytes"
|
||||
| extend tags=parse_json(Tags)
|
||||
| project TimeGenerated, Name, Val, app=tostring(tags['app'])
|
||||
| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app
|
||||
| where app startswith "dapr-"
|
||||
| render timechart
|
||||
```
|
||||
|
||||
# References
|
||||
## References
|
||||
|
||||
* [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration)
|
||||
* [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config)
|
||||
* [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language)
|
||||
- [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration)
|
||||
- [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config)
|
||||
- [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language)
|
||||
|
|
|
@ -179,7 +179,7 @@ Example:
|
|||
"topic": "newOrder",
|
||||
"route": "/orders",
|
||||
"metadata": {
|
||||
"rawPayload": "true",
|
||||
"rawPayload": "true"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
@ -39,6 +39,8 @@ spec:
|
|||
secretKeyRef:
|
||||
name: kafka-secrets
|
||||
key: saslPasswordSecret
|
||||
- name: saslMechanism
|
||||
value: "SHA-512"
|
||||
- name: initialOffset # Optional. Used for input bindings.
|
||||
value: "newest"
|
||||
- name: maxMessageBytes # Optional.
|
||||
|
@ -61,6 +63,7 @@ spec:
|
|||
| authType | Y | Input/Output | Configure or disable authentication. Supported values: `none`, `password`, `mtls`, or `oidc` | `"password"`, `"none"` |
|
||||
| saslUsername | N | Input/Output | The SASL username used for authentication. Only required if `authRequired` is set to `"true"`. | `"adminuser"` |
|
||||
| saslPassword | N | Input/Output | The SASL password used for authentication. Can be `secretKeyRef` to use a [secret reference]({{< ref component-secrets.md >}}). Only required if `authRequired` is set to `"true"`. | `""`, `"KeFg23!"` |
|
||||
| saslMechanism | N | Input/Output | The SASL authentication mechanism you'd like to use. Only required if `authtype` is set to `"password"`. If not provided, defaults to `PLAINTEXT`, which could cause a break for some services, like Amazon Managed Service for Kafka. | `"SHA-512", "SHA-256", "PLAINTEXT"` |
|
||||
| initialOffset | N | Input | The initial offset to use if no offset was previously committed. Should be "newest" or "oldest". Defaults to "newest". | `"oldest"` |
|
||||
| maxMessageBytes | N | Input/Output | The maximum size in bytes allowed for a single Kafka message. Defaults to 1024. | `2048` |
|
||||
| oidcTokenEndpoint | N | Input/Output | Full URL to an OAuth2 identity provider access token endpoint. Required when `authType` is set to `oidc` | "https://identity.example.com/v1/token" |
|
||||
|
|
|
@ -96,6 +96,8 @@ An HTTP 204 (No Content) and empty body is returned if successful.
|
|||
|
||||
You can get a record in Redis using the `get` operation. This gets a key that was previously set.
|
||||
|
||||
This takes an optional parameter `delete`, which is by default `false`. When it is set to `true`, this operation uses the `GETDEL` operation of Redis. For example, it returns the `value` which was previously set and then deletes it.
|
||||
|
||||
#### Request
|
||||
|
||||
```json
|
||||
|
@ -120,6 +122,20 @@ You can get a record in Redis using the `get` operation. This gets a key that wa
|
|||
}
|
||||
```
|
||||
|
||||
#### Request with delete flag
|
||||
|
||||
```json
|
||||
{
|
||||
"operation": "get",
|
||||
"metadata": {
|
||||
"key": "key1",
|
||||
"delete": "true"
|
||||
},
|
||||
"data": {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### delete
|
||||
|
||||
You can delete a record in Redis using the `delete` operation. Returns success whether the key exists or not.
|
||||
|
|
|
@ -1,19 +1,13 @@
|
|||
<script src="/js/copy-code-button.js"></script>
|
||||
|
||||
{{ with .Site.Params.algolia_docsearch }}
|
||||
<script src="https://cdn.jsdelivr.net/npm/docsearch.js@2.6.3/dist/cdn/docsearch.min.js"></script>
|
||||
<script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@docsearch/js@3"></script>
|
||||
<script type="text/javascript">
|
||||
docsearch({
|
||||
// Your apiKey and indexName will be given to you once
|
||||
// we create your config
|
||||
apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9',
|
||||
indexName: 'crawler_dapr',
|
||||
container: '#docsearch',
|
||||
appId: 'O0QLQGNF38',
|
||||
// Replace inputSelector with a CSS selector
|
||||
// matching your search input
|
||||
inputSelector: '.td-search-input',
|
||||
// Set debug to true to inspect the dropdown
|
||||
debug: false,
|
||||
apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9',
|
||||
indexName: 'daprdocs',
|
||||
});
|
||||
</script>
|
||||
{{ end }}
|
||||
|
||||
<script src="/js/copy-code-button.js"></script>
|
|
@ -1,3 +1,3 @@
|
|||
{{ with .Site.Params.algolia_docsearch }}
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@docsearch/css@3" />
|
||||
{{ end }}
|
|
@ -0,0 +1,30 @@
|
|||
{{ if .Site.Params.gcs_engine_id -}}
|
||||
<input type="search" class="form-control td-search-input" placeholder=" {{ T "ui_search" }}" aria-label="{{ T "ui_search" }}" autocomplete="off">
|
||||
{{ else if .Site.Params.algolia_docsearch -}}
|
||||
<div id="docsearch"></div>
|
||||
{{ else if .Site.Params.offlineSearch -}}
|
||||
{{ $offlineSearchIndex := resources.Get "json/offline-search-index.json" | resources.ExecuteAsTemplate "offline-search-index.json" . -}}
|
||||
{{ if hugo.IsProduction -}}
|
||||
{{/* Use `md5` as finger print hash function to shorten file name to avoid `file name too long` error. */ -}}
|
||||
{{ $offlineSearchIndex = $offlineSearchIndex | fingerprint "md5" -}}
|
||||
{{ end -}}
|
||||
{{ $offlineSearchLink := $offlineSearchIndex.RelPermalink -}}
|
||||
|
||||
<input
|
||||
type="search"
|
||||
class="form-control td-search-input"
|
||||
placeholder=" {{ T "ui_search" }}"
|
||||
aria-label="{{ T "ui_search" }}"
|
||||
autocomplete="off"
|
||||
{{/*
|
||||
The data attribute name of the json file URL must end with `src` since
|
||||
Hugo's absurlreplacer requires `src`, `href`, `action` or `srcset` suffix for the attribute name.
|
||||
If the absurlreplacer is not applied, the URL will start with `/`.
|
||||
It causes the json file loading error when when relativeURLs is enabled.
|
||||
https://github.com/google/docsy/issues/181
|
||||
*/}}
|
||||
data-offline-search-index-json-src="{{ $offlineSearchLink }}"
|
||||
data-offline-search-base-href="/"
|
||||
data-offline-search-max-results="{{ .Site.Params.offlineSearchMaxResults | default 10 }}"
|
||||
>
|
||||
{{ end -}}
|
Loading…
Reference in New Issue