FIX: Use new robots.txt API (#101)

The old robots.txt was invalid because "User-agent: *" was found twice
in robots.txt. Using the API will generate correct robots.txt.
This commit is contained in:
Bianca Nenciu 2022-07-13 19:53:56 +03:00 committed by GitHub
parent 632042981f
commit 45386920ca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 29 additions and 4 deletions

View File

@ -1,4 +0,0 @@
<%- if SiteSetting.docs_enabled%>
User-agent: *
Disallow: /docs/
<% end %>

View File

@ -44,4 +44,17 @@ after_initialize do
add_to_class(:topic_query, :list_docs_topics) do
default_results(@options)
end
on(:robots_info) do |robots_info|
robots_info[:agents] ||= []
any_user_agent = robots_info[:agents].find { |info| info[:name] == "*" }
if !any_user_agent
any_user_agent = { name: "*" }
robots_info[:agents] << any_user_agent
end
any_user_agent[:disallow] ||= []
any_user_agent[:disallow] << "/docs/"
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
require 'rails_helper'
describe RobotsTxtController do
before do
SiteSetting.docs_enabled = true
end
it 'adds /docs/ to robots.txt' do
get '/robots.txt'
expect(response.body).to include('User-agent: *')
expect(response.body).to include('Disallow: /docs/')
end
end