From e23ae809da09cf2ba9e3d9932fef5aff070d7087 Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Thu, 15 Jun 2023 14:34:03 -0400 Subject: [PATCH 01/10] add saslmechanism Signed-off-by: Hannah Hunter --- .../reference/components-reference/supported-bindings/kafka.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md b/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md index 3ec497552..cc40d1b95 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md @@ -39,6 +39,8 @@ spec: secretKeyRef: name: kafka-secrets key: saslPasswordSecret + - name: saslMechanism + value: "SHA-512" - name: initialOffset # Optional. Used for input bindings. value: "newest" - name: maxMessageBytes # Optional. @@ -61,6 +63,7 @@ spec: | authType | Y | Input/Output | Configure or disable authentication. Supported values: `none`, `password`, `mtls`, or `oidc` | `"password"`, `"none"` | | saslUsername | N | Input/Output | The SASL username used for authentication. Only required if `authRequired` is set to `"true"`. | `"adminuser"` | | saslPassword | N | Input/Output | The SASL password used for authentication. Can be `secretKeyRef` to use a [secret reference]({{< ref component-secrets.md >}}). Only required if `authRequired` is set to `"true"`. | `""`, `"KeFg23!"` | +| saslMechanism | N | Input/Output | The SASL authentication mechanism you'd like to use. Only required if `authtype` is set to `"password"`. If not provided, will default to `PLAINTEXT`, which could cause a break for some services, like Amazon Managed Service for Kafka. | `""`, `"SHA-512"` | | initialOffset | N | Input | The initial offset to use if no offset was previously committed. Should be "newest" or "oldest". Defaults to "newest". | `"oldest"` | | maxMessageBytes | N | Input/Output | The maximum size in bytes allowed for a single Kafka message. Defaults to 1024. | `2048` | | oidcTokenEndpoint | N | Input/Output | Full URL to an OAuth2 identity provider access token endpoint. Required when `authType` is set to `oidc` | "https://identity.example.com/v1/token" | From e388bd56e7538ad9734f73c00e27ab4274e1000b Mon Sep 17 00:00:00 2001 From: Hannah Hunter Date: Thu, 15 Jun 2023 15:44:20 -0400 Subject: [PATCH 02/10] duplicate changes from deleted repo Signed-off-by: Hannah Hunter --- .../supported-bindings/redis.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md index 8fe7638e0..d399d2650 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md @@ -96,6 +96,8 @@ An HTTP 204 (No Content) and empty body is returned if successful. You can get a record in Redis using the `get` operation. This gets a key that was previously set. +This takes an optional parameter `delete`, which is by default `false`. When it is set to `true`, this operation will use the `GETDEL` operation of Redis. For example, it will return the `value` which was previously set and delete it. + #### Request ```json @@ -120,6 +122,20 @@ You can get a record in Redis using the `get` operation. This gets a key that wa } ``` +#### Request with delete flag + +```json +{ + "operation": "get", + "metadata": { + "key": "key1", + "delete": "true" + }, + "data": { + } +} +``` + ### delete You can delete a record in Redis using the `delete` operation. Returns success whether the key exists or not. From 92e83e57defb92b5f8fd0d029afa134b45e2414f Mon Sep 17 00:00:00 2001 From: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Date: Fri, 16 Jun 2023 10:13:17 -0400 Subject: [PATCH 03/10] Update daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md Co-authored-by: Mark Fussell Signed-off-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> --- .../reference/components-reference/supported-bindings/kafka.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md b/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md index cc40d1b95..912471cbb 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/kafka.md @@ -63,7 +63,7 @@ spec: | authType | Y | Input/Output | Configure or disable authentication. Supported values: `none`, `password`, `mtls`, or `oidc` | `"password"`, `"none"` | | saslUsername | N | Input/Output | The SASL username used for authentication. Only required if `authRequired` is set to `"true"`. | `"adminuser"` | | saslPassword | N | Input/Output | The SASL password used for authentication. Can be `secretKeyRef` to use a [secret reference]({{< ref component-secrets.md >}}). Only required if `authRequired` is set to `"true"`. | `""`, `"KeFg23!"` | -| saslMechanism | N | Input/Output | The SASL authentication mechanism you'd like to use. Only required if `authtype` is set to `"password"`. If not provided, will default to `PLAINTEXT`, which could cause a break for some services, like Amazon Managed Service for Kafka. | `""`, `"SHA-512"` | +| saslMechanism | N | Input/Output | The SASL authentication mechanism you'd like to use. Only required if `authtype` is set to `"password"`. If not provided, defaults to `PLAINTEXT`, which could cause a break for some services, like Amazon Managed Service for Kafka. | `"SHA-512", "SHA-256", "PLAINTEXT"` | | initialOffset | N | Input | The initial offset to use if no offset was previously committed. Should be "newest" or "oldest". Defaults to "newest". | `"oldest"` | | maxMessageBytes | N | Input/Output | The maximum size in bytes allowed for a single Kafka message. Defaults to 1024. | `2048` | | oidcTokenEndpoint | N | Input/Output | Full URL to an OAuth2 identity provider access token endpoint. Required when `authType` is set to `oidc` | "https://identity.example.com/v1/token" | From cf3afe5e84a7a00ba1ac7e9908ca5053cd837d50 Mon Sep 17 00:00:00 2001 From: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Date: Fri, 16 Jun 2023 10:22:44 -0400 Subject: [PATCH 04/10] Update daprdocs/content/en/reference/components-reference/supported-bindings/redis.md Co-authored-by: Mark Fussell Signed-off-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> --- .../reference/components-reference/supported-bindings/redis.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md index d399d2650..7d5fdc8ae 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md @@ -96,7 +96,7 @@ An HTTP 204 (No Content) and empty body is returned if successful. You can get a record in Redis using the `get` operation. This gets a key that was previously set. -This takes an optional parameter `delete`, which is by default `false`. When it is set to `true`, this operation will use the `GETDEL` operation of Redis. For example, it will return the `value` which was previously set and delete it. +This takes an optional parameter `delete`, which is by default `false`. When it is set to `true`, this operation uses the `GETDEL` operation of Redis. For example, it returns the `value` which was previously set and then deletes it. #### Request From ee25a6a753b38305ae7ba85c44ada8f15fdcbcee Mon Sep 17 00:00:00 2001 From: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Date: Fri, 16 Jun 2023 21:06:01 -0400 Subject: [PATCH 05/10] [Azure Monitor] Reformat/refresh doc and update from retired terminology (#3549) * general doc/formatting refresh and update from retired terminology Signed-off-by: Hannah Hunter * Update daprdocs/content/en/operations/monitoring/metrics/azure-monitor.md Co-authored-by: Mark Fussell Signed-off-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> --------- Signed-off-by: Hannah Hunter Signed-off-by: Hannah Hunter <94493363+hhunter-ms@users.noreply.github.com> Co-authored-by: Mark Fussell --- .../monitoring/metrics/azure-monitor.md | 178 +++++++++--------- 1 file changed, 90 insertions(+), 88 deletions(-) diff --git a/daprdocs/content/en/operations/monitoring/metrics/azure-monitor.md b/daprdocs/content/en/operations/monitoring/metrics/azure-monitor.md index d63b820ea..3011ef399 100644 --- a/daprdocs/content/en/operations/monitoring/metrics/azure-monitor.md +++ b/daprdocs/content/en/operations/monitoring/metrics/azure-monitor.md @@ -15,118 +15,120 @@ description: "Enable Dapr metrics and logs with Azure Monitor for Azure Kubernet ## Enable Prometheus metric scrape using config map -1. Make sure that omsagents are running +1. Make sure that Azure Monitor Agents (AMA) are running. -```bash -$ kubectl get pods -n kube-system -NAME READY STATUS RESTARTS AGE -... -omsagent-75qjs 1/1 Running 1 44h -omsagent-c7c4t 1/1 Running 0 44h -omsagent-rs-74f488997c-dshpx 1/1 Running 1 44h -omsagent-smtk7 1/1 Running 1 44h -... -``` + ```bash + $ kubectl get pods -n kube-system + NAME READY STATUS RESTARTS AGE + ... + ama-logs-48kpv 2/2 Running 0 2d13h + ama-logs-mx24c 2/2 Running 0 2d13h + ama-logs-rs-f9bbb9898-vbt6k 1/1 Running 0 30h + ama-logs-sm2mz 2/2 Running 0 2d13h + ama-logs-z7p4c 2/2 Running 0 2d13h + ... + ``` -2. Apply config map to enable Prometheus metrics endpoint scrape. +1. Apply config map to enable Prometheus metrics endpoint scrape. -You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable prometheus metrics endpoint scrape. + You can use [azm-config-map.yaml](/docs/azm-config-map.yaml) to enable Prometheus metrics endpoint scrape. -If you installed Dapr to the different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example: + If you installed Dapr to a different namespace, you need to change the `monitor_kubernetes_pod_namespaces` array values. For example: -```yaml -... - prometheus-data-collection-settings: |- - [prometheus_data_collection_settings.cluster] - interval = "1m" - monitor_kubernetes_pods = true - monitor_kubernetes_pods_namespaces = ["dapr-system", "default"] - [prometheus_data_collection_settings.node] - interval = "1m" -... -``` + ```yaml + ... + prometheus-data-collection-settings: |- + [prometheus_data_collection_settings.cluster] + interval = "1m" + monitor_kubernetes_pods = true + monitor_kubernetes_pods_namespaces = ["dapr-system", "default"] + [prometheus_data_collection_settings.node] + interval = "1m" + ... + ``` -Apply config map: + Apply config map: -```bash -kubectl apply -f ./azm-config.map.yaml -``` + ```bash + kubectl apply -f ./azm-config.map.yaml + ``` ## Install Dapr with JSON formatted logs -1. Install Dapr with enabling JSON-formatted logs +1. Install Dapr with enabling JSON-formatted logs. -```bash -helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true -``` + ```bash + helm install dapr dapr/dapr --namespace dapr-system --set global.logAsJson=true + ``` -2. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations. +1. Enable JSON formatted log in Dapr sidecar and add Prometheus annotations. -> Note: OMS Agent scrapes the metrics only if replicaset has Prometheus annotations. + > Note: The Azure Monitor Agents (AMA) only sends the metrics if the Prometheus annotations are set. -Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml. + Add `dapr.io/log-as-json: "true"` annotation to your deployment yaml. -Example: -```yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: pythonapp - namespace: default - labels: - app: python -spec: - replicas: 1 - selector: - matchLabels: - app: python - template: - metadata: - labels: - app: python - annotations: - dapr.io/enabled: "true" - dapr.io/app-id: "pythonapp" - dapr.io/log-as-json: "true" - prometheus.io/scrape: "true" - prometheus.io/port: "9090" - prometheus.io/path: "/" + Example: -... -``` + ```yaml + apiVersion: apps/v1 + kind: Deployment + metadata: + name: pythonapp + namespace: default + labels: + app: python + spec: + replicas: 1 + selector: + matchLabels: + app: python + template: + metadata: + labels: + app: python + annotations: + dapr.io/enabled: "true" + dapr.io/app-id: "pythonapp" + dapr.io/log-as-json: "true" + prometheus.io/scrape: "true" + prometheus.io/port: "9090" + prometheus.io/path: "/" + + ... + ``` ## Search metrics and logs with Azure Monitor -1. Go to Azure Monitor +1. Go to Azure Monitor in the Azure portal. -2. Search Dapr logs +1. Search Dapr **Logs**. -Here is an example query, to parse JSON formatted logs and query logs from dapr system processes. + Here is an example query, to parse JSON formatted logs and query logs from Dapr system processes. -``` -ContainerLog -| extend parsed=parse_json(LogEntry) -| project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance'] -| where level != "" -| sort by Time -``` + ``` + ContainerLog + | extend parsed=parse_json(LogEntry) + | project Time=todatetime(parsed['time']), app_id=parsed['app_id'], scope=parsed['scope'],level=parsed['level'], msg=parsed['msg'], type=parsed['type'], ver=parsed['ver'], instance=parsed['instance'] + | where level != "" + | sort by Time + ``` -3. Search metrics +1. Search **Metrics**. -This query, queries process_resident_memory_bytes Prometheus metrics for Dapr system processes and renders timecharts + This query, queries `process_resident_memory_bytes` Prometheus metrics for Dapr system processes and renders timecharts. -``` -InsightsMetrics -| where Namespace == "prometheus" and Name == "process_resident_memory_bytes" -| extend tags=parse_json(Tags) -| project TimeGenerated, Name, Val, app=tostring(tags['app']) -| summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app -| where app startswith "dapr-" -| render timechart -``` + ``` + InsightsMetrics + | where Namespace == "prometheus" and Name == "process_resident_memory_bytes" + | extend tags=parse_json(Tags) + | project TimeGenerated, Name, Val, app=tostring(tags['app']) + | summarize memInBytes=percentile(Val, 99) by bin(TimeGenerated, 1m), app + | where app startswith "dapr-" + | render timechart + ``` -# References +## References -* [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration) -* [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config) -* [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language) +- [Configure scraping of Prometheus metrics with Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-prometheus-integration) +- [Configure agent data collection for Azure Monitor for containers](https://docs.microsoft.com/azure/azure-monitor/insights/container-insights-agent-config) +- [Azure Monitor Query](https://docs.microsoft.com/azure/azure-monitor/log-query/query-language) From b9759702d581df0a18b7a6d5cba6eaf9589ac705 Mon Sep 17 00:00:00 2001 From: Aaron Crawfis Date: Fri, 16 Jun 2023 21:10:04 -0700 Subject: [PATCH 06/10] Manually build website and upload artifacts (#3559) * update dapr publish command Signed-off-by: Hannah Hunter Signed-off-by: Aaron Crawfis * Split workflow into two steps Signed-off-by: Aaron Crawfis * Update upload path Signed-off-by: Aaron Crawfis * Add concurrency check Signed-off-by: Aaron Crawfis * Consolidate jobs Signed-off-by: Aaron Crawfis --------- Signed-off-by: Hannah Hunter Signed-off-by: Aaron Crawfis Co-authored-by: Hannah Hunter Co-authored-by: Mark Fussell --- .github/workflows/website-root.yml | 67 ++++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 18 deletions(-) diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml index 0265713f7..98d2aec11 100644 --- a/.github/workflows/website-root.yml +++ b/.github/workflows/website-root.yml @@ -1,6 +1,7 @@ name: Azure Static Web App Root on: + workflow_dispatch: push: branches: - v1.11 @@ -9,35 +10,65 @@ on: branches: - v1.11 +concurrency: + # Cancel the previously triggered build for only PR build. + group: website-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + jobs: build_and_deploy_job: - if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed') + name: Build Hugo Website + if: github.event.action != 'closed' runs-on: ubuntu-latest - name: Build and Deploy Job + env: + SWA_BASE: 'proud-bay-0e9e0e81e' + HUGO_ENV: production steps: - - uses: actions/checkout@v3 + - name: Checkout docs repo + uses: actions/checkout@v3 with: - submodules: recursive - fetch-depth: 0 + submodules: true + - name: Setup Node + uses: actions/setup-node@v2 + with: + node-version: '14' + - name: Setup Hugo + uses: peaceiris/actions-hugo@v2.5.0 + with: + hugo-version: 0.102.3 + extended: true - name: Setup Docsy - run: cd daprdocs && git submodule update --init --recursive && sudo npm install -D --save autoprefixer && sudo npm install -D --save postcss-cli - - name: Build And Deploy - id: builddeploy + run: | + cd daprdocs + git submodule update --init --recursive + sudo npm install -D --save autoprefixer + sudo npm install -D --save postcss-cli + - name: Build Hugo Website + run: | + cd daprdocs + git config --global --add safe.directory /github/workspace + if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then + STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/" + fi + hugo ${STAGING_URL+-b "$STAGING_URL"} + - name: Deploy docs site uses: Azure/static-web-apps-deploy@v1 - env: - HUGO_ENV: production - HUGO_VERSION: "0.100.2" with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }} - skip_deploy_on_missing_secrets: true - repo_token: ${{ secrets.GITHUB_TOKEN }} # Used for Github integrations (i.e. PR comments) + repo_token: ${{ secrets.GITHUB_TOKEN }} action: "upload" - app_location: "/daprdocs" - app_build_command: "git config --global --add safe.directory /github/workspace && hugo" - output_location: "public" - skip_api_build: true + app_location: "daprdocs/public/" + api_location: "daprdocs/public/" + output_location: "" + skip_app_build: true + - name: Upload Hugo artifacts + uses: actions/upload-artifact@v3 + with: + name: hugo_build + path: ./daprdocs/public/ + if-no-files-found: error - close_pull_request_job: + close_staging_site: if: github.event_name == 'pull_request' && github.event.action == 'closed' runs-on: ubuntu-latest name: Close Pull Request Job From 0d0d29ac92cfba8bb1dcc5e526e6f39fc88354a3 Mon Sep 17 00:00:00 2001 From: Aaron Crawfis Date: Fri, 16 Jun 2023 21:18:24 -0700 Subject: [PATCH 07/10] Upgrade Algolia search to v3 (#3560) * update dapr publish command Signed-off-by: Hannah Hunter Signed-off-by: Aaron Crawfis * Split workflow into two steps Signed-off-by: Aaron Crawfis * Update upload path Signed-off-by: Aaron Crawfis * Add concurrency check Signed-off-by: Aaron Crawfis * Add Algolia workflow script and step Signed-off-by: Aaron Crawfis * Update Algolia box to v3 Signed-off-by: Aaron Crawfis * Fix secret name Signed-off-by: Aaron Crawfis * Override default search bar in Docsy v3 Signed-off-by: Aaron Crawfis * Remove temporary comment Signed-off-by: Aaron Crawfis * Consolidate build and deploy Signed-off-by: Aaron Crawfis --------- Signed-off-by: Hannah Hunter Signed-off-by: Aaron Crawfis Co-authored-by: Hannah Hunter Co-authored-by: Mark Fussell --- .github/scripts/algolia.py | 118 ++++++++++++++++++ .github/workflows/website-root.yml | 26 ++++ daprdocs/layouts/partials/hooks/body-end.html | 20 ++- daprdocs/layouts/partials/hooks/head-end.html | 2 +- daprdocs/layouts/partials/search-input.html | 30 +++++ 5 files changed, 182 insertions(+), 14 deletions(-) create mode 100644 .github/scripts/algolia.py create mode 100644 daprdocs/layouts/partials/search-input.html diff --git a/.github/scripts/algolia.py b/.github/scripts/algolia.py new file mode 100644 index 000000000..5071ea580 --- /dev/null +++ b/.github/scripts/algolia.py @@ -0,0 +1,118 @@ +import os +from re import S +import sys +import json +from bs4 import BeautifulSoup +from algoliasearch.search_client import SearchClient + +url = "docs.dapr.io" +if len(sys.argv) > 1: + starting_directory = os.path.join(os.getcwd(), str(sys.argv[1])) +else: + starting_directory = os.getcwd() + +ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID') +ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY') +ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME') + +client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY) +index = client.init_index(ALGOLIA_INDEX_NAME) + +excluded_files = [ + "404.html", +] + +exluded_directories = [ + "zh-hans", +] + +rankings = { + "Getting started": 0, + "Concepts": 100, + "Developing applications": 200, + "Operations": 300, + "Reference": 400, + "Contributing": 500, + "Home": 600 +} + +def scan_directory(directory: str, pages: list): + if os.path.basename(directory) in exluded_directories: + print(f'Skipping directory: {directory}') + return + for file in os.listdir(directory): + path = os.path.join(directory, file) + if os.path.isfile(path): + if file.endswith(".html") and file not in excluded_files: + if '' not in open(path, encoding="utf8").read(): + print(f'Indexing: {path}') + pages.append(path) + else: + print(f'Skipping hidden page: {path}') + else: + scan_directory(path, pages) + +def parse_file(path: str): + data = {} + data["hierarchy"] = {} + data["rank"] = 999 + data["subrank"] = 99 + data["type"] = "lvl2" + data["lvl0"] = "" + data["lvl1"] = "" + data["lvl2"] = "" + data["lvl3"] = "" + text = "" + subrank = 0 + with open(path, "r", errors='ignore') as file: + content = file.read() + soup = BeautifulSoup(content, "html.parser") + for meta in soup.find_all("meta"): + if meta.get("name") == "description": + data["lvl2"] = meta.get("content") + data["hierarchy"]["lvl1"] = meta.get("content") + elif meta.get("property") == "og:title": + data["lvl0"] = meta.get("content") + data["hierarchy"]["lvl0"] = meta.get("content") + data["hierarchy"]["lvl2"] = meta.get("content") + elif meta.get("property") == "og:url": + data["url"] = meta.get("content") + data["path"] = meta.get("content").split(url)[1] + data["objectID"] = meta.get("content").split(url)[1] + breadcrumbs = soup.find_all("li", class_="breadcrumb-item") + try: + subrank = len(breadcrumbs) + data["subrank"] = subrank + except: + subrank = 99 + data["subrank"] = 99 + for bc in breadcrumbs: + section = bc.text.strip() + data["lvl1"] = section + data["hierarchy"]["lvl0"] = section + try: + data["rank"] = rankings[section] + subrank + except: + print(f"Rank not found for section {section}") + data["rank"] = 998 + break + for p in soup.find_all("p"): + if p.text != "": + text = text + p.text + data["text"] = text + return data + +def index_payload(payload): + res = index.replace_all_objects(payload) + res.wait() + + +if __name__ == "__main__": + pages = [] + payload = [] + scan_directory(starting_directory, pages) + for page in pages: + data = parse_file(page) + if "objectID" in data: + payload.append(data) + index_payload(payload) diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml index 98d2aec11..ed8c3b491 100644 --- a/.github/workflows/website-root.yml +++ b/.github/workflows/website-root.yml @@ -79,3 +79,29 @@ jobs: with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }} action: "close" + + algolia_index: + name: Index site for Algolia + if: github.event_name == 'push' + needs: ['build_and_deploy_job'] + runs-on: ubuntu-latest + env: + ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }} + ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }} + ALGOLIA_INDEX_NAME: daprdocs + steps: + - name: Checkout docs repo + uses: actions/checkout@v2 + with: + submodules: false + - name: Download Hugo artifacts + uses: actions/download-artifact@v3 + with: + name: hugo_build + path: site/ + - name: Install Python packages + run: | + pip install --upgrade bs4 + pip install --upgrade 'algoliasearch>=2.0,<3.0' + - name: Index site + run: python ./.github/scripts/algolia.py ./site diff --git a/daprdocs/layouts/partials/hooks/body-end.html b/daprdocs/layouts/partials/hooks/body-end.html index 695cf8638..79cbc117c 100644 --- a/daprdocs/layouts/partials/hooks/body-end.html +++ b/daprdocs/layouts/partials/hooks/body-end.html @@ -1,19 +1,13 @@ + + {{ with .Site.Params.algolia_docsearch }} - - + {{ end }} - - \ No newline at end of file diff --git a/daprdocs/layouts/partials/hooks/head-end.html b/daprdocs/layouts/partials/hooks/head-end.html index 804fe38e9..03e91efa2 100644 --- a/daprdocs/layouts/partials/hooks/head-end.html +++ b/daprdocs/layouts/partials/hooks/head-end.html @@ -1,3 +1,3 @@ {{ with .Site.Params.algolia_docsearch }} - + {{ end }} \ No newline at end of file diff --git a/daprdocs/layouts/partials/search-input.html b/daprdocs/layouts/partials/search-input.html new file mode 100644 index 000000000..22e900247 --- /dev/null +++ b/daprdocs/layouts/partials/search-input.html @@ -0,0 +1,30 @@ +{{ if .Site.Params.gcs_engine_id -}} + +{{ else if .Site.Params.algolia_docsearch -}} +
+{{ else if .Site.Params.offlineSearch -}} +{{ $offlineSearchIndex := resources.Get "json/offline-search-index.json" | resources.ExecuteAsTemplate "offline-search-index.json" . -}} +{{ if hugo.IsProduction -}} +{{/* Use `md5` as finger print hash function to shorten file name to avoid `file name too long` error. */ -}} +{{ $offlineSearchIndex = $offlineSearchIndex | fingerprint "md5" -}} +{{ end -}} +{{ $offlineSearchLink := $offlineSearchIndex.RelPermalink -}} + + +{{ end -}} From a229194a359d36411e365d4cf487e69ad635dad3 Mon Sep 17 00:00:00 2001 From: Josh van Leeuwen Date: Mon, 19 Jun 2023 17:07:19 +0100 Subject: [PATCH 08/10] Fix JSON in pubsub reference API docs (#3558) Signed-off-by: joshvanl Co-authored-by: Mark Fussell --- daprdocs/content/en/reference/api/pubsub_api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daprdocs/content/en/reference/api/pubsub_api.md b/daprdocs/content/en/reference/api/pubsub_api.md index 68619a753..03068cc3b 100644 --- a/daprdocs/content/en/reference/api/pubsub_api.md +++ b/daprdocs/content/en/reference/api/pubsub_api.md @@ -179,7 +179,7 @@ Example: "topic": "newOrder", "route": "/orders", "metadata": { - "rawPayload": "true", + "rawPayload": "true" } } ] From 9d2ba33fd43def44371f414ea53c2b54c0936d37 Mon Sep 17 00:00:00 2001 From: Aaron Crawfis Date: Mon, 19 Jun 2023 10:28:17 -0700 Subject: [PATCH 09/10] Add skip on missing secrets config Signed-off-by: Aaron Crawfis --- .github/workflows/website-root.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml index ed8c3b491..0e160a426 100644 --- a/.github/workflows/website-root.yml +++ b/.github/workflows/website-root.yml @@ -61,6 +61,7 @@ jobs: api_location: "daprdocs/public/" output_location: "" skip_app_build: true + skip_deploy_on_missing_secretsL: true - name: Upload Hugo artifacts uses: actions/upload-artifact@v3 with: From a0e76ec1e0c7195cf2f8c2de440bae87751af75a Mon Sep 17 00:00:00 2001 From: Aaron Crawfis Date: Mon, 19 Jun 2023 10:28:49 -0700 Subject: [PATCH 10/10] Remove erroneous character Signed-off-by: Aaron Crawfis --- .github/workflows/website-root.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml index 0e160a426..4e22b2c59 100644 --- a/.github/workflows/website-root.yml +++ b/.github/workflows/website-root.yml @@ -61,7 +61,7 @@ jobs: api_location: "daprdocs/public/" output_location: "" skip_app_build: true - skip_deploy_on_missing_secretsL: true + skip_deploy_on_missing_secrets: true - name: Upload Hugo artifacts uses: actions/upload-artifact@v3 with: