DEV: Disable the plugin by default (#1511)

…and preserve the current setting on existing sites
This commit is contained in:
Jarek Radosz 2025-07-22 12:05:52 +02:00 committed by GitHub
parent cc77e73cfd
commit f231aad8b5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
238 changed files with 570 additions and 128 deletions

View File

@ -1,6 +1,6 @@
discourse_ai: discourse_ai:
discourse_ai_enabled: discourse_ai_enabled:
default: true default: false
client: true client: true
ai_artifact_security: ai_artifact_security:
client: true client: true

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class EnableAiIfAlreadyInstalled < ActiveRecord::Migration[7.2]
def up
installed_at = DB.query_single(<<~SQL)&.first
SELECT created_at FROM schema_migration_details WHERE version='20230224165056'
SQL
if installed_at && installed_at < 1.hour.ago
# The plugin was installed before we changed it to be disabled-by-default
# Therefore, if there is no existing database value, enable the plugin
execute <<~SQL
INSERT INTO site_settings(name, data_type, value, created_at, updated_at)
VALUES('discourse_ai_enabled', 5, 't', NOW(), NOW())
ON CONFLICT (name) DO NOTHING
SQL
end
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -10,6 +10,8 @@ RSpec.describe DiscourseAi::Configuration::Feature do
DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) { block.call } DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) { block.call }
end end
before { enable_current_plugin }
describe "#llm_model" do describe "#llm_model" do
context "when persona is not found" do context "when persona is not found" do
it "returns nil when persona_id is invalid" do it "returns nil when persona_id is invalid" do

View File

@ -8,6 +8,8 @@ RSpec.describe DiscourseAi::Configuration::LlmEnumerator do
Fabricate(:automation, script: "llm_report", name: "some automation", enabled: true) Fabricate(:automation, script: "llm_report", name: "some automation", enabled: true)
end end
before { enable_current_plugin }
describe "#values_for_serialization" do describe "#values_for_serialization" do
it "returns an array for that can be used for serialization" do it "returns an array for that can be used for serialization" do
fake_model.destroy! fake_model.destroy!

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe DiscourseAi::Configuration::LlmValidator do RSpec.describe DiscourseAi::Configuration::LlmValidator do
before { enable_current_plugin }
describe "#valid_value?" do describe "#valid_value?" do
context "when the parent module is enabled and we try to reset the selected model" do context "when the parent module is enabled and we try to reset the selected model" do
before do before do

View File

@ -3,6 +3,8 @@
RSpec.describe DiscourseAi::Configuration::SpamDetectionValidator do RSpec.describe DiscourseAi::Configuration::SpamDetectionValidator do
let(:validator) { described_class.new } let(:validator) { described_class.new }
before { enable_current_plugin }
it "always returns true if setting the value to false" do it "always returns true if setting the value to false" do
expect(validator.valid_value?("f")).to eq(true) expect(validator.valid_value?("f")).to eq(true)
end end

View File

@ -8,11 +8,14 @@ require Rails.root.join(
RSpec.describe MigrateSentimentClassificationResultFormat do RSpec.describe MigrateSentimentClassificationResultFormat do
let(:connection) { ActiveRecord::Base.connection } let(:connection) { ActiveRecord::Base.connection }
before { connection.execute(<<~SQL) } before do
enable_current_plugin
connection.execute(<<~SQL)
INSERT INTO classification_results (model_used, classification, created_at, updated_at) VALUES INSERT INTO classification_results (model_used, classification, created_at, updated_at) VALUES
('sentiment', '{"neutral": 65, "negative": 20, "positive": 14}', NOW(), NOW()), ('sentiment', '{"neutral": 65, "negative": 20, "positive": 14}', NOW(), NOW()),
('emotion', '{"sadness": 10, "surprise": 15, "fear": 5, "anger": 20, "joy": 30, "disgust": 8, "neutral": 10}', NOW(), NOW()); ('emotion', '{"sadness": 10, "surprise": 15, "fear": 5, "anger": 20, "joy": 30, "disgust": 8, "neutral": 10}', NOW(), NOW());
SQL SQL
end
after { connection.execute("DELETE FROM classification_results") } after { connection.execute("DELETE FROM classification_results") }

View File

@ -21,6 +21,8 @@ RSpec.describe FixBrokenOpenAiEmbeddingsConfig do
).first ).first
end end
before { enable_current_plugin }
describe "#up" do describe "#up" do
context "when embeddings are already configured" do context "when embeddings are already configured" do
fab!(:embedding_definition) fab!(:embedding_definition)

View File

@ -8,6 +8,8 @@ require Rails.root.join(
RSpec.describe CleanUnusedEmbeddingSearchIndexes do RSpec.describe CleanUnusedEmbeddingSearchIndexes do
let(:connection) { ActiveRecord::Base.connection } let(:connection) { ActiveRecord::Base.connection }
before { enable_current_plugin }
describe "#up" do describe "#up" do
before do before do
# Copied from 20241008054440_create_binary_indexes_for_embeddings # Copied from 20241008054440_create_binary_indexes_for_embeddings

View File

@ -7,7 +7,7 @@ describe Jobs::DetectTranslatePost do
let(:locales) { %w[en ja] } let(:locales) { %w[en ja] }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -7,7 +7,7 @@ describe Jobs::DetectTranslateTopic do
let(:locales) { %w[en ja] } let(:locales) { %w[en ja] }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end
@ -32,7 +32,6 @@ describe Jobs::DetectTranslateTopic do
end end
it "detects locale" do it "detects locale" do
SiteSetting.discourse_ai_enabled = true
allow(DiscourseAi::Translation::TopicLocaleDetector).to receive(:detect_locale).with( allow(DiscourseAi::Translation::TopicLocaleDetector).to receive(:detect_locale).with(
topic, topic,
).and_return("zh_CN") ).and_return("zh_CN")

View File

@ -20,6 +20,8 @@ RSpec.describe Jobs::DigestRagUpload do
end end
before do before do
enable_current_plugin
SiteSetting.ai_embeddings_selected_model = cloudflare_embedding_def.id SiteSetting.ai_embeddings_selected_model = cloudflare_embedding_def.id
SiteSetting.ai_embeddings_enabled = true SiteSetting.ai_embeddings_enabled = true
SiteSetting.authorized_extensions = "txt" SiteSetting.authorized_extensions = "txt"

View File

@ -7,6 +7,7 @@ RSpec.describe Jobs::FastTrackTopicGist do
fab!(:post_2) { Fabricate(:post, topic: topic_1, post_number: 2) } fab!(:post_2) { Fabricate(:post, topic: topic_1, post_number: 2) }
before do before do
enable_current_plugin
assign_fake_provider_to(:ai_summarization_model) assign_fake_provider_to(:ai_summarization_model)
SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summarization_enabled = true
SiteSetting.ai_summary_gists_enabled = true SiteSetting.ai_summary_gists_enabled = true

View File

@ -5,7 +5,10 @@ RSpec.describe Jobs::GenerateInferredConcepts do
fab!(:post) fab!(:post)
fab!(:concept) { Fabricate(:inferred_concept, name: "programming") } fab!(:concept) { Fabricate(:inferred_concept, name: "programming") }
before { SiteSetting.inferred_concepts_enabled = true } before do
enable_current_plugin
SiteSetting.inferred_concepts_enabled = true
end
describe "#execute" do describe "#execute" do
it "does nothing with blank item_ids" do it "does nothing with blank item_ids" do

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe Jobs::GenerateRagEmbeddings do RSpec.describe Jobs::GenerateRagEmbeddings do
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
fab!(:vector_def) { Fabricate(:embedding_definition) } fab!(:vector_def) { Fabricate(:embedding_definition) }

View File

@ -10,7 +10,7 @@ describe Jobs::LocalizeCategories do
end end
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -7,7 +7,7 @@ describe Jobs::LocalizePosts do
let(:locales) { %w[en ja de] } let(:locales) { %w[en ja de] }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -7,7 +7,7 @@ describe Jobs::LocalizeTopics do
let(:locales) { %w[en ja de] } let(:locales) { %w[en ja de] }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -3,6 +3,8 @@
RSpec.describe Jobs::ManageEmbeddingDefSearchIndex do RSpec.describe Jobs::ManageEmbeddingDefSearchIndex do
fab!(:embedding_definition) fab!(:embedding_definition)
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
context "when there is no embedding def" do context "when there is no embedding def" do
it "does nothing" do it "does nothing" do

View File

@ -3,7 +3,10 @@
RSpec.describe Jobs::StreamComposerHelper do RSpec.describe Jobs::StreamComposerHelper do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before { assign_fake_provider_to(:ai_helper_model) } before do
enable_current_plugin
assign_fake_provider_to(:ai_helper_model)
end
describe "#execute" do describe "#execute" do
let!(:input) { "I liek to eet pie fur brakefast becuz it is delishus." } let!(:input) { "I liek to eet pie fur brakefast becuz it is delishus." }

View File

@ -17,6 +17,7 @@ RSpec.describe Jobs::StreamDiscordReply, type: :job do
fab!(:persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } fab!(:persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) }
before do before do
enable_current_plugin
SiteSetting.ai_discord_search_enabled = true SiteSetting.ai_discord_search_enabled = true
SiteSetting.ai_discord_search_mode = "persona" SiteSetting.ai_discord_search_mode = "persona"
SiteSetting.ai_discord_search_persona = persona.id SiteSetting.ai_discord_search_persona = persona.id

View File

@ -3,6 +3,8 @@
RSpec.describe Jobs::StreamDiscoverReply do RSpec.describe Jobs::StreamDiscoverReply do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
fab!(:user) fab!(:user)
fab!(:llm_model) fab!(:llm_model)

View File

@ -3,7 +3,10 @@
RSpec.describe Jobs::StreamPostHelper do RSpec.describe Jobs::StreamPostHelper do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before { assign_fake_provider_to(:ai_helper_model) } before do
enable_current_plugin
assign_fake_provider_to(:ai_helper_model)
end
describe "#execute" do describe "#execute" do
fab!(:topic) fab!(:topic)

View File

@ -3,6 +3,8 @@
RSpec.describe Jobs::StreamTopicAiSummary do RSpec.describe Jobs::StreamTopicAiSummary do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
fab!(:topic) { Fabricate(:topic, highest_post_number: 2) } fab!(:topic) { Fabricate(:topic, highest_post_number: 2) }
fab!(:post_1) { Fabricate(:post, topic: topic, post_number: 1) } fab!(:post_1) { Fabricate(:post, topic: topic, post_number: 1) }

View File

@ -5,7 +5,7 @@ describe Jobs::CategoriesLocaleDetectionBackfill do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -24,6 +24,8 @@ RSpec.describe Jobs::EmbeddingsBackfill do
fab!(:embedding_array) { Array.new(1024) { 1 } } fab!(:embedding_array) { Array.new(1024) { 1 } }
before do before do
enable_current_plugin
SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_selected_model = vector_def.id
SiteSetting.ai_embeddings_enabled = true SiteSetting.ai_embeddings_enabled = true
SiteSetting.ai_embeddings_backfill_batch_size = 1 SiteSetting.ai_embeddings_backfill_batch_size = 1

View File

@ -5,6 +5,7 @@ RSpec.describe Jobs::GenerateConceptsFromPopularItems do
fab!(:post) { Fabricate(:post, like_count: 8, post_number: 2) } fab!(:post) { Fabricate(:post, like_count: 8, post_number: 2) }
before do before do
enable_current_plugin
SiteSetting.inferred_concepts_enabled = true SiteSetting.inferred_concepts_enabled = true
SiteSetting.inferred_concepts_daily_topics_limit = 20 SiteSetting.inferred_concepts_daily_topics_limit = 20
SiteSetting.inferred_concepts_daily_posts_limit = 30 SiteSetting.inferred_concepts_daily_posts_limit = 30

View File

@ -2,13 +2,13 @@
describe Jobs::PostLocalizationBackfill do describe Jobs::PostLocalizationBackfill do
before do before do
enable_current_plugin
SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.ai_translation_backfill_hourly_rate = 100
SiteSetting.content_localization_supported_locales = "en" SiteSetting.content_localization_supported_locales = "en"
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end
SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_enabled = true
SiteSetting.discourse_ai_enabled = true
end end
it "does not enqueue post translation when translator disabled" do it "does not enqueue post translation when translator disabled" do
@ -36,7 +36,6 @@ describe Jobs::PostLocalizationBackfill do
end end
it "does not enqueue post translation if backfill limit is set to 0" do it "does not enqueue post translation if backfill limit is set to 0" do
SiteSetting.discourse_ai_enabled = true
SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_enabled = true
SiteSetting.ai_translation_backfill_hourly_rate = 0 SiteSetting.ai_translation_backfill_hourly_rate = 0
@ -46,7 +45,6 @@ describe Jobs::PostLocalizationBackfill do
end end
it "enqueues post translation with correct limit" do it "enqueues post translation with correct limit" do
SiteSetting.discourse_ai_enabled = true
SiteSetting.ai_translation_enabled = true SiteSetting.ai_translation_enabled = true
SiteSetting.ai_translation_backfill_hourly_rate = 100 SiteSetting.ai_translation_backfill_hourly_rate = 100

View File

@ -5,7 +5,7 @@ describe Jobs::PostsLocaleDetectionBackfill do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe Jobs::RemoveOrphanedEmbeddings do RSpec.describe Jobs::RemoveOrphanedEmbeddings do
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
fab!(:embedding_definition) fab!(:embedding_definition)
fab!(:embedding_definition_2) { Fabricate(:embedding_definition) } fab!(:embedding_definition_2) { Fabricate(:embedding_definition) }

View File

@ -3,6 +3,8 @@
require_relative "../../support/sentiment_inference_stubs" require_relative "../../support/sentiment_inference_stubs"
RSpec.describe Jobs::SentimentBackfill do RSpec.describe Jobs::SentimentBackfill do
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
fab!(:post) fab!(:post)

View File

@ -8,6 +8,7 @@ RSpec.describe Jobs::SummariesBackfill do
let(:intervals) { 12 } # budget is split into intervals. Job runs every five minutes. let(:intervals) { 12 } # budget is split into intervals. Job runs every five minutes.
before do before do
enable_current_plugin
assign_fake_provider_to(:ai_summarization_model) assign_fake_provider_to(:ai_summarization_model)
SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summarization_enabled = true
SiteSetting.ai_summary_backfill_maximum_topics_per_hour = limit SiteSetting.ai_summary_backfill_maximum_topics_per_hour = limit

View File

@ -5,7 +5,7 @@ describe Jobs::TopicsLocaleDetectionBackfill do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
Fabricate(:fake_model).tap do |fake_llm| Fabricate(:fake_model).tap do |fake_llm|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}") SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
end end

View File

@ -6,13 +6,14 @@ RSpec.describe Jobs::SharedConversationAdjustUploadSecurity do
fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") } fab!(:claude_2) { Fabricate(:llm_model, name: "claude-2") }
fab!(:bot_user) do fab!(:bot_user) do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
toggle_enabled_bots(bots: [claude_2]) toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_allowed_groups = "10" SiteSetting.ai_bot_allowed_groups = "10"
SiteSetting.ai_bot_public_sharing_allowed_groups = "10" SiteSetting.ai_bot_public_sharing_allowed_groups = "10"
claude_2.reload.user claude_2.reload.user
end end
fab!(:user) fab!(:user)
fab!(:topic) { Fabricate(:private_message_topic, user: user, recipient: bot_user) } fab!(:topic) { Fabricate(:private_message_topic, user: user, recipient: bot_user) }
fab!(:post_1) { Fabricate(:post, topic: topic, user: bot_user) } fab!(:post_1) { Fabricate(:post, topic: topic, user: bot_user) }
@ -23,6 +24,8 @@ RSpec.describe Jobs::SharedConversationAdjustUploadSecurity do
described_class.new.execute(params) described_class.new.execute(params)
end end
before { enable_current_plugin }
context "when conversation is created" do context "when conversation is created" do
let(:params) { { conversation_id: conversation.id } } let(:params) { { conversation_id: conversation.id } }

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
describe DiscourseAi::Completions::AnthropicMessageProcessor do describe DiscourseAi::Completions::AnthropicMessageProcessor do
before { enable_current_plugin }
it "correctly handles and combines partial thinking chunks into complete thinking objects" do it "correctly handles and combines partial thinking chunks into complete thinking objects" do
processor = processor =
DiscourseAi::Completions::AnthropicMessageProcessor.new( DiscourseAi::Completions::AnthropicMessageProcessor.new(

View File

@ -3,7 +3,10 @@
describe DiscourseAi::Completions::CancelManager do describe DiscourseAi::Completions::CancelManager do
fab!(:model) { Fabricate(:anthropic_model, name: "test-model") } fab!(:model) { Fabricate(:anthropic_model, name: "test-model") }
before { WebMock.allow_net_connect! } before do
enable_current_plugin
WebMock.allow_net_connect!
end
it "can stop monitoring for cancellation cleanly" do it "can stop monitoring for cancellation cleanly" do
cancel_manager = DiscourseAi::Completions::CancelManager.new cancel_manager = DiscourseAi::Completions::CancelManager.new

View File

@ -6,6 +6,8 @@ RSpec.describe DiscourseAi::Completions::Dialects::ChatGpt do
fab!(:llm_model) { Fabricate(:llm_model, max_prompt_tokens: 8192) } fab!(:llm_model) { Fabricate(:llm_model, max_prompt_tokens: 8192) }
let(:context) { DialectContext.new(described_class, llm_model) } let(:context) { DialectContext.new(described_class, llm_model) }
before { enable_current_plugin }
describe "#translate" do describe "#translate" do
it "translates a prompt written in our generic format to the ChatGPT format" do it "translates a prompt written in our generic format to the ChatGPT format" do
open_ai_version = [ open_ai_version = [

View File

@ -3,9 +3,9 @@
RSpec.describe DiscourseAi::Completions::Dialects::Claude do RSpec.describe DiscourseAi::Completions::Dialects::Claude do
fab!(:llm_model) { Fabricate(:anthropic_model, name: "claude-3-opus") } fab!(:llm_model) { Fabricate(:anthropic_model, name: "claude-3-opus") }
let :opus_dialect_klass do let(:opus_dialect_klass) { DiscourseAi::Completions::Dialects::Dialect.dialect_for(llm_model) }
DiscourseAi::Completions::Dialects::Dialect.dialect_for(llm_model)
end before { enable_current_plugin }
describe "#translate" do describe "#translate" do
it "can insert OKs to make stuff interleve properly" do it "can insert OKs to make stuff interleve properly" do

View File

@ -27,6 +27,8 @@ end
RSpec.describe DiscourseAi::Completions::Dialects::Dialect do RSpec.describe DiscourseAi::Completions::Dialects::Dialect do
fab!(:llm_model) fab!(:llm_model)
before { enable_current_plugin }
describe "#translate" do describe "#translate" do
let(:five_token_msg) { "This represents five tokens." } let(:five_token_msg) { "This represents five tokens." }
let(:tools) do let(:tools) do

View File

@ -6,6 +6,8 @@ RSpec.describe DiscourseAi::Completions::Dialects::Gemini do
fab!(:model) { Fabricate(:gemini_model) } fab!(:model) { Fabricate(:gemini_model) }
let(:context) { DialectContext.new(described_class, model) } let(:context) { DialectContext.new(described_class, model) }
before { enable_current_plugin }
describe "#translate" do describe "#translate" do
it "translates a prompt written in our generic format to the Gemini format" do it "translates a prompt written in our generic format to the Gemini format" do
gemini_version = { gemini_version = {

View File

@ -11,6 +11,8 @@ RSpec.describe DiscourseAi::Completions::Dialects::Mistral do
UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id) UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id)
end end
before { enable_current_plugin }
it "does not include user names" do it "does not include user names" do
prompt = prompt =
DiscourseAi::Completions::Prompt.new( DiscourseAi::Completions::Prompt.new(

View File

@ -5,6 +5,8 @@ RSpec.describe DiscourseAi::Completions::Dialects::Nova do
let(:nova_dialect_klass) { DiscourseAi::Completions::Dialects::Dialect.dialect_for(llm_model) } let(:nova_dialect_klass) { DiscourseAi::Completions::Dialects::Dialect.dialect_for(llm_model) }
before { enable_current_plugin }
it "finds the right dialect" do it "finds the right dialect" do
expect(nova_dialect_klass).to eq(DiscourseAi::Completions::Dialects::Nova) expect(nova_dialect_klass).to eq(DiscourseAi::Completions::Dialects::Nova)
end end

View File

@ -7,6 +7,8 @@ RSpec.describe DiscourseAi::Completions::Dialects::Ollama do
let(:context) { DialectContext.new(described_class, model) } let(:context) { DialectContext.new(described_class, model) }
let(:dialect_class) { DiscourseAi::Completions::Dialects::Dialect.dialect_for(model) } let(:dialect_class) { DiscourseAi::Completions::Dialects::Dialect.dialect_for(model) }
before { enable_current_plugin }
describe "#translate" do describe "#translate" do
context "when native tool support is enabled" do context "when native tool support is enabled" do
it "translates a prompt written in our generic format to the Ollama format" do it "translates a prompt written in our generic format to the Ollama format" do

View File

@ -3,6 +3,8 @@
require_relative "dialect_context" require_relative "dialect_context"
RSpec.describe DiscourseAi::Completions::Dialects::OllamaTools do RSpec.describe DiscourseAi::Completions::Dialects::OllamaTools do
before { enable_current_plugin }
describe "#translated_tools" do describe "#translated_tools" do
it "translates a tool from our generic format to the Ollama format" do it "translates a tool from our generic format to the Ollama format" do
tool = { tool = {

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe DiscourseAi::Completions::Dialects::OpenAiCompatible do RSpec.describe DiscourseAi::Completions::Dialects::OpenAiCompatible do
before { enable_current_plugin }
context "when system prompts are disabled" do context "when system prompts are disabled" do
fab!(:model) do fab!(:model) do
Fabricate(:vllm_model, vision_enabled: true, provider_params: { disable_system_prompt: true }) Fabricate(:vllm_model, vision_enabled: true, provider_params: { disable_system_prompt: true })

View File

@ -47,6 +47,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Anthropic do
prompt_with_tools prompt_with_tools
end end
before { enable_current_plugin }
it "does not eat spaces with tool calls" do it "does not eat spaces with tool calls" do
body = <<~STRING body = <<~STRING
event: message_start event: message_start

View File

@ -26,6 +26,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::AwsBedrock do
Aws::EventStream::Encoder.new.encode(aws_message) Aws::EventStream::Encoder.new.encode(aws_message)
end end
before { enable_current_plugin }
it "should provide accurate max token count" do it "should provide accurate max token count" do
prompt = DiscourseAi::Completions::Prompt.new("hello") prompt = DiscourseAi::Completions::Prompt.new("hello")
dialect = DiscourseAi::Completions::Dialects::Claude.new(prompt, model) dialect = DiscourseAi::Completions::Dialects::Claude.new(prompt, model)

View File

@ -58,6 +58,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Cohere do
prompt prompt
end end
before { enable_current_plugin }
it "is able to trigger a tool" do it "is able to trigger a tool" do
body = (<<~TEXT).strip body = (<<~TEXT).strip
{"is_finished":false,"event_type":"stream-start","generation_id":"1648206e-1fe4-4bb6-90cf-360dd55f575b"} {"is_finished":false,"event_type":"stream-start","generation_id":"1648206e-1fe4-4bb6-90cf-360dd55f575b"}

View File

@ -153,6 +153,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Gemini do
} }
end end
before { enable_current_plugin }
it "correctly configures thinking when enabled" do it "correctly configures thinking when enabled" do
model.update!(provider_params: { enable_thinking: "true", thinking_tokens: "10000" }) model.update!(provider_params: { enable_thinking: "true", thinking_tokens: "10000" })

View File

@ -95,6 +95,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::HuggingFace do
) )
end end
before { enable_current_plugin }
describe "#perform_completion!" do describe "#perform_completion!" do
context "when using regular mode" do context "when using regular mode" do
context "with simple prompts" do context "with simple prompts" do

View File

@ -27,6 +27,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::AwsBedrock do
Aws::EventStream::Encoder.new.encode(aws_message) Aws::EventStream::Encoder.new.encode(aws_message)
end end
before { enable_current_plugin }
it "should be able to make a simple request" do it "should be able to make a simple request" do
proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}") proxy = DiscourseAi::Completions::Llm.proxy("custom:#{nova_model.id}")

View File

@ -135,6 +135,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Ollama do
EndpointsCompliance.new(self, endpoint, DiscourseAi::Completions::Dialects::Ollama, user) EndpointsCompliance.new(self, endpoint, DiscourseAi::Completions::Dialects::Ollama, user)
end end
before { enable_current_plugin }
describe "#perform_completion!" do describe "#perform_completion!" do
context "when using regular mode" do context "when using regular mode" do
it "completes a trivial prompt and logs the response" do it "completes a trivial prompt and logs the response" do

View File

@ -30,6 +30,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::OpenAi do
prompt prompt
end end
before { enable_current_plugin }
it "can perform simple streaming completion" do it "can perform simple streaming completion" do
response_payload = <<~TEXT response_payload = <<~TEXT
event: response.created event: response.created

View File

@ -174,6 +174,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::OpenAi do
UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id) UploadCreator.new(image100x100, "image.jpg").create_for(Discourse.system_user.id)
end end
before { enable_current_plugin }
describe "max tokens for reasoning models" do describe "max tokens for reasoning models" do
it "uses max_completion_tokens for reasoning models" do it "uses max_completion_tokens for reasoning models" do
model.update!(name: "o3-mini", max_output_tokens: 999) model.update!(name: "o3-mini", max_output_tokens: 999)

View File

@ -6,6 +6,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::OpenRouter do
subject(:endpoint) { described_class.new(open_router_model) } subject(:endpoint) { described_class.new(open_router_model) }
before { enable_current_plugin }
it "supports provider quantization and order selection" do it "supports provider quantization and order selection" do
open_router_model.provider_params["provider_quantizations"] = "int8,int16" open_router_model.provider_params["provider_quantizations"] = "int8,int16"
open_router_model.provider_params["provider_order"] = "Google, Amazon Bedrock" open_router_model.provider_params["provider_order"] = "Google, Amazon Bedrock"

View File

@ -4,6 +4,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::SambaNova do
fab!(:llm_model) { Fabricate(:samba_nova_model) } fab!(:llm_model) { Fabricate(:samba_nova_model) }
let(:llm) { llm_model.to_llm } let(:llm) { llm_model.to_llm }
before { enable_current_plugin }
it "can stream completions" do it "can stream completions" do
body = <<~PARTS body = <<~PARTS
data: {"id": "4c5e4a44-e847-467d-b9cd-d2f6530678cd", "object": "chat.completion.chunk", "created": 1721336361, "model": "llama3-8b", "system_fingerprint": "fastcoe", "choices": [{"index": 0, "delta": {"content": "I am a bot"}, "logprobs": null, "finish_reason": null}]} data: {"id": "4c5e4a44-e847-467d-b9cd-d2f6530678cd", "object": "chat.completion.chunk", "created": 1721336361, "model": "llama3-8b", "system_fingerprint": "fastcoe", "choices": [{"index": 0, "delta": {"content": "I am a bot"}, "logprobs": null, "finish_reason": null}]}

View File

@ -88,6 +88,8 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Vllm do
let(:request_body) { model.default_options.merge(messages: prompt).to_json } let(:request_body) { model.default_options.merge(messages: prompt).to_json }
let(:stream_request_body) { model.default_options.merge(messages: prompt, stream: true).to_json } let(:stream_request_body) { model.default_options.merge(messages: prompt, stream: true).to_json }
before { enable_current_plugin }
describe "tool support" do describe "tool support" do
it "is able to invoke XML tools correctly" do it "is able to invoke XML tools correctly" do
xml = <<~XML xml = <<~XML

View File

@ -3,6 +3,8 @@
describe DiscourseAi::Completions::JsonStreamDecoder do describe DiscourseAi::Completions::JsonStreamDecoder do
let(:decoder) { DiscourseAi::Completions::JsonStreamDecoder.new } let(:decoder) { DiscourseAi::Completions::JsonStreamDecoder.new }
before { enable_current_plugin }
it "should be able to parse simple messages" do it "should be able to parse simple messages" do
result = decoder << "data: #{{ hello: "world" }.to_json}" result = decoder << "data: #{{ hello: "world" }.to_json}"
expect(result).to eq([{ hello: "world" }]) expect(result).to eq([{ hello: "world" }])

View File

@ -13,6 +13,8 @@ RSpec.describe DiscourseAi::Completions::Llm do
fab!(:user) fab!(:user)
fab!(:model) { Fabricate(:llm_model) } fab!(:model) { Fabricate(:llm_model) }
before { enable_current_plugin }
describe ".proxy" do describe ".proxy" do
it "raises an exception when we can't proxy the model" do it "raises an exception when we can't proxy the model" do
fake_model = "unknown:unknown_v2" fake_model = "unknown:unknown_v2"

View File

@ -14,6 +14,8 @@ describe DiscourseAi::Completions::PromptMessagesBuilder do
Fabricate(:upload, user: user, original_filename: "image.png", extension: "png") Fabricate(:upload, user: user, original_filename: "image.png", extension: "png")
end end
before { enable_current_plugin }
it "correctly merges user messages with uploads" do it "correctly merges user messages with uploads" do
builder.push(type: :user, content: "Hello", id: "Alice", upload_ids: [1]) builder.push(type: :user, content: "Hello", id: "Alice", upload_ids: [1])
builder.push(type: :user, content: "World", id: "Bob", upload_ids: [2]) builder.push(type: :user, content: "World", id: "Bob", upload_ids: [2])

View File

@ -8,6 +8,8 @@ RSpec.describe DiscourseAi::Completions::Prompt do
let(:username) { "username1" } let(:username) { "username1" }
let(:image100x100) { plugin_file_from_fixtures("100x100.jpg") } let(:image100x100) { plugin_file_from_fixtures("100x100.jpg") }
before { enable_current_plugin }
describe ".new" do describe ".new" do
it "raises for invalid attributes" do it "raises for invalid attributes" do
expect { described_class.new("a bot", messages: {}) }.to raise_error(ArgumentError) expect { described_class.new("a bot", messages: {}) }.to raise_error(ArgumentError)

View File

@ -26,6 +26,8 @@ RSpec.describe DiscourseAi::Completions::StructuredOutput do
) )
end end
before { enable_current_plugin }
describe "Parsing structured output on the fly" do describe "Parsing structured output on the fly" do
it "acts as a buffer for an streamed JSON" do it "acts as a buffer for an streamed JSON" do
chunks = [ chunks = [

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe DiscourseAi::Completions::ToolDefinition do RSpec.describe DiscourseAi::Completions::ToolDefinition do
before { enable_current_plugin }
# Test case 1: Basic tool definition creation # Test case 1: Basic tool definition creation
describe "#initialize" do describe "#initialize" do
it "creates a tool with name, description and parameters" do it "creates a tool with name, description and parameters" do

View File

@ -5,6 +5,8 @@ RSpec.describe DiscourseAi::Completions::UploadEncoder do
let(:jpg) { plugin_file_from_fixtures("1x1.jpg") } let(:jpg) { plugin_file_from_fixtures("1x1.jpg") }
let(:webp) { plugin_file_from_fixtures("1x1.webp") } let(:webp) { plugin_file_from_fixtures("1x1.webp") }
before { enable_current_plugin }
it "automatically converts gifs to pngs" do it "automatically converts gifs to pngs" do
upload = UploadCreator.new(gif, "1x1.gif").create_for(Discourse.system_user.id) upload = UploadCreator.new(gif, "1x1.gif").create_for(Discourse.system_user.id)
encoded = described_class.encode(upload_ids: [upload.id], max_pixels: 1_048_576) encoded = described_class.encode(upload_ids: [upload.id], max_pixels: 1_048_576)

View File

@ -3,6 +3,8 @@
describe DiscourseAi::Completions::PromptMessagesBuilder do describe DiscourseAi::Completions::PromptMessagesBuilder do
let(:tag_stripper) { DiscourseAi::Completions::XmlTagStripper.new(%w[thinking results]) } let(:tag_stripper) { DiscourseAi::Completions::XmlTagStripper.new(%w[thinking results]) }
before { enable_current_plugin }
it "should strip tags correctly in simple cases" do it "should strip tags correctly in simple cases" do
result = tag_stripper << "x<thinking>hello</thinki" result = tag_stripper << "x<thinking>hello</thinki"
expect(result).to eq("x") expect(result).to eq("x")

View File

@ -3,6 +3,8 @@
RSpec.describe DiscourseAi::Completions::XmlToolProcessor do RSpec.describe DiscourseAi::Completions::XmlToolProcessor do
let(:processor) { DiscourseAi::Completions::XmlToolProcessor.new } let(:processor) { DiscourseAi::Completions::XmlToolProcessor.new }
before { enable_current_plugin }
it "can process simple text" do it "can process simple text" do
result = [] result = []
result << (processor << "hello") result << (processor << "hello")

View File

@ -12,6 +12,7 @@ RSpec.describe DiscourseAi::Discord::Bot::PersonaReplier do
fab!(:persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) } fab!(:persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) }
before do before do
enable_current_plugin
SiteSetting.ai_discord_search_persona = persona.id.to_s SiteSetting.ai_discord_search_persona = persona.id.to_s
allow_any_instance_of(DiscourseAi::Personas::Bot).to receive(:reply).and_return( allow_any_instance_of(DiscourseAi::Personas::Bot).to receive(:reply).and_return(
"This is a reply from bot!", "This is a reply from bot!",

View File

@ -9,6 +9,8 @@ RSpec.describe DiscourseAi::Discord::Bot::Search do
let(:search) { described_class.new(interaction_body) } let(:search) { described_class.new(interaction_body) }
before do before do
enable_current_plugin
stub_request(:post, "https://discord.com/api/webhooks//interaction_token").with( stub_request(:post, "https://discord.com/api/webhooks//interaction_token").with(
body: body:
"{\"content\":\"Here are the top search results for your query:\\n\\n1. [Title](\\u003chttp://test.localhost/link\\u003e)\\n\\n\"}", "{\"content\":\"Here are the top search results for your query:\\n\\n1. [Title](\\u003chttp://test.localhost/link\\u003e)\\n\\n\"}",

View File

@ -3,6 +3,8 @@
require "rails_helper" require "rails_helper"
RSpec.describe DiscourseAi::Automation do RSpec.describe DiscourseAi::Automation do
before { enable_current_plugin }
describe "manually configured model" do describe "manually configured model" do
let!(:llm_model) { Fabricate(:llm_model) } let!(:llm_model) { Fabricate(:llm_model) }
it "returns a list of available models for automation" do it "returns a list of available models for automation" do

View File

@ -39,6 +39,8 @@ describe DiscourseAi::Automation::LlmPersonaTriage do
end end
before do before do
enable_current_plugin
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
SiteSetting.ai_bot_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}" SiteSetting.ai_bot_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}"

View File

@ -21,6 +21,8 @@ describe DiscourseAutomation do
) )
end end
before { enable_current_plugin }
it "can trigger via automation" do it "can trigger via automation" do
add_automation_field("sender", user.username, type: "user") add_automation_field("sender", user.username, type: "user")
add_automation_field("receivers", [user.username], type: "email_group_user") add_automation_field("receivers", [user.username], type: "email_group_user")

View File

@ -52,7 +52,7 @@ RSpec.describe DiscourseAi::Automation::LlmToolTriage do
end end
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
end end

View File

@ -24,6 +24,8 @@ describe DiscourseAi::Automation::LlmTriage do
end end
before do before do
enable_current_plugin
SiteSetting.tagging_enabled = true SiteSetting.tagging_enabled = true
add_automation_field("system_prompt", "hello %%POST%%") add_automation_field("system_prompt", "hello %%POST%%")
add_automation_field("search_for_text", "bad") add_automation_field("search_for_text", "bad")

View File

@ -6,6 +6,7 @@ describe DiscourseAi::GuardianExtensions do
fab!(:topic) fab!(:topic)
before do before do
enable_current_plugin
group.add(user) group.add(user)
assign_fake_provider_to(:ai_summarization_model) assign_fake_provider_to(:ai_summarization_model)
SiteSetting.ai_summarization_enabled = true SiteSetting.ai_summarization_enabled = true

View File

@ -23,6 +23,8 @@ RSpec.describe DiscourseAi::Inference::CloudflareWorkersAi do
let(:payload) { { text: [content] }.to_json } let(:payload) { { text: [content] }.to_json }
before do before do
enable_current_plugin
stub_request(:post, endpoint).with(body: payload, headers: headers).to_return( stub_request(:post, endpoint).with(body: payload, headers: headers).to_return(
status: response_status, status: response_status,
body: response_body, body: response_body,

View File

@ -11,6 +11,8 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
fab!(:llm_model) { Fabricate(:fake_model) } fab!(:llm_model) { Fabricate(:fake_model) }
before do before do
enable_current_plugin
SiteSetting.inferred_concepts_match_persona = -1 SiteSetting.inferred_concepts_match_persona = -1
SiteSetting.inferred_concepts_enabled = true SiteSetting.inferred_concepts_enabled = true
@ -140,7 +142,9 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
persona_class_double = double("PersonaClass") # rubocop:disable RSpec/VerifiedDoubles persona_class_double = double("PersonaClass") # rubocop:disable RSpec/VerifiedDoubles
allow(AiPersona).to receive(:all_personas).and_return([persona_class_double]) allow(AiPersona).to receive(:all_personas).and_return([persona_class_double])
allow(persona_class_double).to receive(:id).and_return(SiteSetting.inferred_concepts_match_persona.to_i) allow(persona_class_double).to receive(:id).and_return(
SiteSetting.inferred_concepts_match_persona.to_i,
)
allow(persona_class_double).to receive(:new).and_return(persona_instance_double) allow(persona_class_double).to receive(:new).and_return(persona_instance_double)
allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id) allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id)
allow(persona_instance_double).to receive(:class).and_return(persona_class_double) allow(persona_instance_double).to receive(:class).and_return(persona_class_double)
@ -190,7 +194,9 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
persona_class_double = double("PersonaClass") # rubocop:disable RSpec/VerifiedDoubles persona_class_double = double("PersonaClass") # rubocop:disable RSpec/VerifiedDoubles
allow(AiPersona).to receive(:all_personas).and_return([persona_class_double]) allow(AiPersona).to receive(:all_personas).and_return([persona_class_double])
allow(persona_class_double).to receive(:id).and_return(SiteSetting.inferred_concepts_match_persona.to_i) allow(persona_class_double).to receive(:id).and_return(
SiteSetting.inferred_concepts_match_persona.to_i,
)
allow(persona_class_double).to receive(:new).and_return(persona_instance_double) allow(persona_class_double).to receive(:new).and_return(persona_instance_double)
allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id) allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id)
allow(persona_instance_double).to receive(:class).and_return(persona_class_double) allow(persona_instance_double).to receive(:class).and_return(persona_class_double)
@ -228,7 +234,9 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
bot_double = instance_spy(DiscourseAi::Personas::Bot) bot_double = instance_spy(DiscourseAi::Personas::Bot)
allow(AiPersona).to receive(:all_personas).and_return([persona_class_double]) allow(AiPersona).to receive(:all_personas).and_return([persona_class_double])
allow(persona_class_double).to receive(:id).and_return(SiteSetting.inferred_concepts_match_persona.to_i) allow(persona_class_double).to receive(:id).and_return(
SiteSetting.inferred_concepts_match_persona.to_i,
)
allow(persona_class_double).to receive(:new).and_return(persona_instance_double) allow(persona_class_double).to receive(:new).and_return(persona_instance_double)
allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id) allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id)
allow(persona_instance_double).to receive(:class).and_return(persona_class_double) allow(persona_instance_double).to receive(:class).and_return(persona_class_double)
@ -261,7 +269,9 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
bot_double = instance_double("DiscourseAi::Personas::Bot") bot_double = instance_double("DiscourseAi::Personas::Bot")
allow(AiPersona).to receive(:all_personas).and_return([persona_class_double]) allow(AiPersona).to receive(:all_personas).and_return([persona_class_double])
allow(persona_class_double).to receive(:id).and_return(SiteSetting.inferred_concepts_match_persona.to_i) allow(persona_class_double).to receive(:id).and_return(
SiteSetting.inferred_concepts_match_persona.to_i,
)
allow(persona_class_double).to receive(:new).and_return(persona_instance_double) allow(persona_class_double).to receive(:new).and_return(persona_instance_double)
allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id) allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id)
allow(persona_instance_double).to receive(:class).and_return(persona_class_double) allow(persona_instance_double).to receive(:class).and_return(persona_class_double)
@ -283,7 +293,9 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
bot_double = instance_double("DiscourseAi::Personas::Bot") bot_double = instance_double("DiscourseAi::Personas::Bot")
allow(AiPersona).to receive(:all_personas).and_return([persona_class_double]) allow(AiPersona).to receive(:all_personas).and_return([persona_class_double])
allow(persona_class_double).to receive(:id).and_return(SiteSetting.inferred_concepts_match_persona.to_i) allow(persona_class_double).to receive(:id).and_return(
SiteSetting.inferred_concepts_match_persona.to_i,
)
allow(persona_class_double).to receive(:new).and_return(persona_instance_double) allow(persona_class_double).to receive(:new).and_return(persona_instance_double)
allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id) allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id)
allow(persona_instance_double).to receive(:class).and_return(persona_class_double) allow(persona_instance_double).to receive(:class).and_return(persona_class_double)
@ -305,7 +317,9 @@ RSpec.describe DiscourseAi::InferredConcepts::Applier do
bot_double = instance_double("DiscourseAi::Personas::Bot") bot_double = instance_double("DiscourseAi::Personas::Bot")
allow(AiPersona).to receive(:all_personas).and_return([persona_class_double]) allow(AiPersona).to receive(:all_personas).and_return([persona_class_double])
allow(persona_class_double).to receive(:id).and_return(SiteSetting.inferred_concepts_match_persona.to_i) allow(persona_class_double).to receive(:id).and_return(
SiteSetting.inferred_concepts_match_persona.to_i,
)
allow(persona_class_double).to receive(:new).and_return(persona_instance_double) allow(persona_class_double).to receive(:new).and_return(persona_instance_double)
allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id) allow(persona_class_double).to receive(:default_llm_id).and_return(llm_model.id)
allow(persona_instance_double).to receive(:class).and_return(persona_class_double) allow(persona_instance_double).to receive(:class).and_return(persona_class_double)

View File

@ -10,6 +10,7 @@ RSpec.describe DiscourseAi::InferredConcepts::Finder do
fab!(:llm_model) { Fabricate(:fake_model) } fab!(:llm_model) { Fabricate(:fake_model) }
before do before do
enable_current_plugin
SiteSetting.inferred_concepts_generate_persona = -1 SiteSetting.inferred_concepts_generate_persona = -1
SiteSetting.inferred_concepts_deduplicate_persona = -1 SiteSetting.inferred_concepts_deduplicate_persona = -1
SiteSetting.inferred_concepts_enabled = true SiteSetting.inferred_concepts_enabled = true

View File

@ -8,6 +8,8 @@ RSpec.describe DiscourseAi::InferredConcepts::Manager do
fab!(:concept1) { Fabricate(:inferred_concept, name: "programming") } fab!(:concept1) { Fabricate(:inferred_concept, name: "programming") }
fab!(:concept2) { Fabricate(:inferred_concept, name: "testing") } fab!(:concept2) { Fabricate(:inferred_concept, name: "testing") }
before { enable_current_plugin }
describe "#list_concepts" do describe "#list_concepts" do
it "returns all concepts sorted by name" do it "returns all concepts sorted by name" do
concepts = manager.list_concepts concepts = manager.list_concepts

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::EntryPoint do RSpec.describe DiscourseAi::AiBot::EntryPoint do
before { enable_current_plugin }
describe "#inject_into" do describe "#inject_into" do
describe "subscribes to the post_created event" do describe "subscribes to the post_created event" do
fab!(:admin) fab!(:admin)

View File

@ -2,7 +2,9 @@
RSpec.describe Jobs::CreateAiReply do RSpec.describe Jobs::CreateAiReply do
fab!(:gpt_35_bot) { Fabricate(:llm_model, name: "gpt-3.5-turbo") } fab!(:gpt_35_bot) { Fabricate(:llm_model, name: "gpt-3.5-turbo") }
before do before do
enable_current_plugin
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_bot]) toggle_enabled_bots(bots: [gpt_35_bot])
end end

View File

@ -14,6 +14,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
fab!(:opus_model) { Fabricate(:anthropic_model) } fab!(:opus_model) { Fabricate(:anthropic_model) }
fab!(:bot_user) do fab!(:bot_user) do
enable_current_plugin
toggle_enabled_bots(bots: [claude_2]) toggle_enabled_bots(bots: [claude_2])
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
claude_2.reload.user claude_2.reload.user
@ -58,7 +59,10 @@ RSpec.describe DiscourseAi::AiBot::Playground do
) )
end end
before { SiteSetting.ai_embeddings_enabled = false } before do
enable_current_plugin
SiteSetting.ai_embeddings_enabled = false
end
after do after do
# we must reset cache on persona cause data can be rolled back # we must reset cache on persona cause data can be rolled back

View File

@ -9,7 +9,7 @@ describe DiscourseAi::AiBot::SiteSettingsExtension do
DiscourseAi::AiBot::EntryPoint.find_user_from_model(model).present? DiscourseAi::AiBot::EntryPoint.find_user_from_model(model).present?
end end
before { SiteSetting.discourse_ai_enabled = true } before { enable_current_plugin }
it "correctly creates/deletes bot accounts as needed" do it "correctly creates/deletes bot accounts as needed" do
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true

View File

@ -5,6 +5,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
fab!(:empty_locale_user) { Fabricate(:user, locale: "") } fab!(:empty_locale_user) { Fabricate(:user, locale: "") }
before do before do
enable_current_plugin
assign_fake_provider_to(:ai_helper_model) assign_fake_provider_to(:ai_helper_model)
Group.refresh_automatic_groups! Group.refresh_automatic_groups!
end end

View File

@ -3,12 +3,15 @@
RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do
subject(:titler) { described_class.new(thread) } subject(:titler) { described_class.new(thread) }
before { assign_fake_provider_to(:ai_helper_model) }
fab!(:thread) { Fabricate(:chat_thread) } fab!(:thread) { Fabricate(:chat_thread) }
fab!(:chat_message) { Fabricate(:chat_message, thread: thread) } fab!(:chat_message) { Fabricate(:chat_message, thread: thread) }
fab!(:user) fab!(:user)
before do
enable_current_plugin
assign_fake_provider_to(:ai_helper_model)
end
describe "#suggested_title" do describe "#suggested_title" do
it "bails early if thread has no content" do it "bails early if thread has no content" do
empty_thread = Chat::Thread.new empty_thread = Chat::Thread.new

View File

@ -6,6 +6,8 @@ RSpec.describe DiscourseAi::AiHelper::DateFormatter do
# Reference time is Tuesday Jan 16th, 2024 at 2:30 PM Sydney time # Reference time is Tuesday Jan 16th, 2024 at 2:30 PM Sydney time
let(:sydney_reference) { DateTime.parse("2024-01-16 14:30:00 +11:00") } let(:sydney_reference) { DateTime.parse("2024-01-16 14:30:00 +11:00") }
before { enable_current_plugin }
describe ".process_date_placeholders" do describe ".process_date_placeholders" do
describe "with Sydney timezone" do describe "with Sydney timezone" do
before do before do

View File

@ -4,6 +4,8 @@ describe DiscourseAi::AiHelper::EntryPoint do
fab!(:english_user) { Fabricate(:user) } fab!(:english_user) { Fabricate(:user) }
fab!(:french_user) { Fabricate(:user, locale: "fr") } fab!(:french_user) { Fabricate(:user, locale: "fr") }
before { enable_current_plugin }
it "will correctly localize available prompts" do it "will correctly localize available prompts" do
assign_fake_provider_to(:ai_helper_model) assign_fake_provider_to(:ai_helper_model)
SiteSetting.default_locale = "en" SiteSetting.default_locale = "en"

View File

@ -6,6 +6,7 @@ RSpec.describe DiscourseAi::AiHelper::Painter do
fab!(:user) fab!(:user)
before do before do
enable_current_plugin
assign_fake_provider_to(:ai_helper_model) assign_fake_provider_to(:ai_helper_model)
SiteSetting.ai_stability_api_url = "https://api.stability.dev" SiteSetting.ai_stability_api_url = "https://api.stability.dev"
SiteSetting.ai_stability_api_key = "abc" SiteSetting.ai_stability_api_key = "abc"

View File

@ -20,6 +20,8 @@ RSpec.describe DiscourseAi::AiHelper::SemanticCategorizer do
let(:expected_embedding) { [0.0038493] * vector.vdef.dimensions } let(:expected_embedding) { [0.0038493] * vector.vdef.dimensions }
before do before do
enable_current_plugin
SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_selected_model = vector_def.id
SiteSetting.ai_embeddings_enabled = true SiteSetting.ai_embeddings_enabled = true

View File

@ -19,7 +19,7 @@ RSpec.describe DiscourseAi::AiModeration::SpamScanner do
end end
before do before do
SiteSetting.discourse_ai_enabled = true enable_current_plugin
SiteSetting.ai_spam_detection_enabled = true SiteSetting.ai_spam_detection_enabled = true
end end

View File

@ -8,6 +8,8 @@ describe DiscourseAi::Automation::LlmTriage do
DiscourseAi::Automation::LlmTriage.handle(**args) DiscourseAi::Automation::LlmTriage.handle(**args)
end end
before { enable_current_plugin }
it "does nothing if it does not pass triage" do it "does nothing if it does not pass triage" do
DiscourseAi::Completions::Llm.with_prepared_responses(["good"]) do DiscourseAi::Completions::Llm.with_prepared_responses(["good"]) do
triage( triage(

View File

@ -43,6 +43,8 @@ module DiscourseAi
fab!(:post_with_likes3) { Fabricate(:post, topic: topic_with_likes, like_count: 3) } fab!(:post_with_likes3) { Fabricate(:post, topic: topic_with_likes, like_count: 3) }
before { enable_current_plugin }
if defined?(::DiscourseSolved) if defined?(::DiscourseSolved)
it "will correctly denote solved topics" do it "will correctly denote solved topics" do
Fabricate(:solved_topic, topic: topic_with_likes, answer_post: post_with_likes2) Fabricate(:solved_topic, topic: topic_with_likes, answer_post: post_with_likes2)

View File

@ -34,6 +34,8 @@ module DiscourseAi
fab!(:llm_model) fab!(:llm_model)
before { enable_current_plugin }
describe "#run!" do describe "#run!" do
it "is able to generate email reports" do it "is able to generate email reports" do
freeze_time freeze_time

View File

@ -5,7 +5,10 @@ describe DiscourseAi::Embeddings::EntryPoint do
fab!(:embedding_definition) fab!(:embedding_definition)
before { SiteSetting.ai_embeddings_selected_model = embedding_definition.id } before do
enable_current_plugin
SiteSetting.ai_embeddings_selected_model = embedding_definition.id
end
describe "registering event callbacks" do describe "registering event callbacks" do
context "when creating a topic" do context "when creating a topic" do

View File

@ -5,6 +5,8 @@ RSpec.describe Jobs::GenerateEmbeddings do
fab!(:vector_def) { Fabricate(:embedding_definition) } fab!(:vector_def) { Fabricate(:embedding_definition) }
before { enable_current_plugin }
describe "#execute" do describe "#execute" do
before do before do
SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_selected_model = vector_def.id

View File

@ -8,9 +8,11 @@ RSpec.describe DiscourseAi::Embeddings::Schema do
fab!(:post) { Fabricate(:post, post_number: 1) } fab!(:post) { Fabricate(:post, post_number: 1) }
let(:digest) { OpenSSL::Digest.hexdigest("SHA1", "test") } let(:digest) { OpenSSL::Digest.hexdigest("SHA1", "test") }
before { SiteSetting.ai_embeddings_selected_model = vector_def.id } before do
enable_current_plugin
before { posts_schema.store(post, embeddings, digest) } SiteSetting.ai_embeddings_selected_model = vector_def.id
posts_schema.store(post, embeddings, digest)
end
describe "#find_by_target" do describe "#find_by_target" do
it "gets you the post_id of the record that matches the post" do it "gets you the post_id of the record that matches the post" do

View File

@ -16,6 +16,7 @@ describe DiscourseAi::Embeddings::SemanticRelated do
fab!(:vector_def) { Fabricate(:embedding_definition) } fab!(:vector_def) { Fabricate(:embedding_definition) }
before do before do
enable_current_plugin
SiteSetting.ai_embeddings_semantic_related_topics_enabled = true SiteSetting.ai_embeddings_semantic_related_topics_enabled = true
SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_selected_model = vector_def.id
SiteSetting.ai_embeddings_enabled = true SiteSetting.ai_embeddings_enabled = true

View File

@ -10,6 +10,7 @@ RSpec.describe DiscourseAi::Embeddings::SemanticSearch do
fab!(:vector_def) { Fabricate(:embedding_definition) } fab!(:vector_def) { Fabricate(:embedding_definition) }
before do before do
enable_current_plugin
SiteSetting.ai_embeddings_selected_model = vector_def.id SiteSetting.ai_embeddings_selected_model = vector_def.id
assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model)
end end

View File

@ -3,6 +3,8 @@
describe DiscourseAi::Embeddings::EntryPoint do describe DiscourseAi::Embeddings::EntryPoint do
fab!(:user) fab!(:user)
before { enable_current_plugin }
describe "SemanticTopicQuery extension" do describe "SemanticTopicQuery extension" do
describe "#list_semantic_related_topics" do describe "#list_semantic_related_topics" do
subject(:topic_query) { DiscourseAi::Embeddings::SemanticTopicQuery.new(user) } subject(:topic_query) { DiscourseAi::Embeddings::SemanticTopicQuery.new(user) }

View File

@ -6,6 +6,8 @@ RSpec.describe DiscourseAi::Embeddings::Strategies::Truncation do
fab!(:open_ai_embedding_def) fab!(:open_ai_embedding_def)
let(:prefix) { "I come first:" } let(:prefix) { "I come first:" }
before { enable_current_plugin }
describe "#prepare_target_text" do describe "#prepare_target_text" do
before { SiteSetting.max_post_length = 100_000 } before { SiteSetting.max_post_length = 100_000 }

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe DiscourseAi::Embeddings::Vector do RSpec.describe DiscourseAi::Embeddings::Vector do
before { enable_current_plugin }
shared_examples "generates and store embeddings using a vector definition" do shared_examples "generates and store embeddings using a vector definition" do
subject(:vector) { described_class.new(vdef) } subject(:vector) { described_class.new(vdef) }

Some files were not shown because too many files have changed in this diff Show More