Merge pull request #15153 from crazy-max/fix-robots-txt

allow search engines from crawling only on production
This commit is contained in:
Sebastiaan van Stijn 2022-07-19 18:38:32 +02:00 committed by GitHub
commit f3be551c98
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 11 additions and 1 deletions

View File

@ -30,7 +30,7 @@
{%- endif -%}
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
{%- if page.sitemap == false or site.GH_ENV == "gh_pages" %}
{%- if page.sitemap == false or jekyll.environment != 'production' %}
<meta name="robots" content="noindex"/>
{%- endif %}
{%- if jekyll.environment == 'production' and site.google_analytics != '' -%}{%- include analytics/google_analytics.html GOOGLE_ID=site.google_analytics -%}{%- endif -%}

View File

@ -1,3 +1,7 @@
---
layout: null
---
{%- if jekyll.environment == 'production' -%}
User-agent: *
# Docker Engine archives
@ -26,3 +30,9 @@ Disallow: /ee/
Disallow: /apidocs/v*
Sitemap: https://docs.docker.com/sitemap.xml
{%- else -%}
# Disable all indexing on staging websites and Netlify previews to prevent
# them showing up in search results.
User-agent: *
Disallow: /
{%- endif %}