FIX: Use new robots.txt API (#101)
The old robots.txt was invalid because "User-agent: *" was found twice in robots.txt. Using the API will generate correct robots.txt.
This commit is contained in:
parent
632042981f
commit
45386920ca
|
@ -1,4 +0,0 @@
|
||||||
<%- if SiteSetting.docs_enabled%>
|
|
||||||
User-agent: *
|
|
||||||
Disallow: /docs/
|
|
||||||
<% end %>
|
|
13
plugin.rb
13
plugin.rb
|
@ -44,4 +44,17 @@ after_initialize do
|
||||||
add_to_class(:topic_query, :list_docs_topics) do
|
add_to_class(:topic_query, :list_docs_topics) do
|
||||||
default_results(@options)
|
default_results(@options)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
on(:robots_info) do |robots_info|
|
||||||
|
robots_info[:agents] ||= []
|
||||||
|
|
||||||
|
any_user_agent = robots_info[:agents].find { |info| info[:name] == "*" }
|
||||||
|
if !any_user_agent
|
||||||
|
any_user_agent = { name: "*" }
|
||||||
|
robots_info[:agents] << any_user_agent
|
||||||
|
end
|
||||||
|
|
||||||
|
any_user_agent[:disallow] ||= []
|
||||||
|
any_user_agent[:disallow] << "/docs/"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'rails_helper'
|
||||||
|
|
||||||
|
describe RobotsTxtController do
|
||||||
|
before do
|
||||||
|
SiteSetting.docs_enabled = true
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'adds /docs/ to robots.txt' do
|
||||||
|
get '/robots.txt'
|
||||||
|
|
||||||
|
expect(response.body).to include('User-agent: *')
|
||||||
|
expect(response.body).to include('Disallow: /docs/')
|
||||||
|
end
|
||||||
|
end
|
Loading…
Reference in New Issue