FEATURE: Block seeded models for being a persona default (#1100)

This commit is contained in:
Rafael dos Santos Silva 2025-01-29 17:13:19 -03:00 committed by GitHub
parent c49b455fd9
commit 8f0756fbca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 47 additions and 7 deletions

View File

@ -27,9 +27,9 @@ module DiscourseAi
} }
end end
llms = llms =
DiscourseAi::Configuration::LlmEnumerator.values.map do |hash| DiscourseAi::Configuration::LlmEnumerator
{ id: hash[:value], name: hash[:name] } .values(allowed_seeded_llms: SiteSetting.ai_bot_allowed_seeded_models)
end .map { |hash| { id: hash[:value], name: hash[:name] } }
render json: { ai_personas: ai_personas, meta: { tools: tools, llms: llms } } render json: { ai_personas: ai_personas, meta: { tools: tools, llms: llms } }
end end

View File

@ -12,6 +12,7 @@ class AiPersona < ActiveRecord::Base
validates :system_prompt, presence: true, length: { maximum: 10_000_000 } validates :system_prompt, presence: true, length: { maximum: 10_000_000 }
validate :system_persona_unchangeable, on: :update, if: :system validate :system_persona_unchangeable, on: :update, if: :system
validate :chat_preconditions validate :chat_preconditions
validate :allowed_seeded_model, if: :default_llm
validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true
# leaves some room for growth but sets a maximum to avoid memory issues # leaves some room for growth but sets a maximum to avoid memory issues
# we may want to revisit this in the future # we may want to revisit this in the future
@ -275,6 +276,18 @@ class AiPersona < ActiveRecord::Base
throw :abort throw :abort
end end
end end
def allowed_seeded_model
return if default_llm.blank?
llm = LlmModel.find_by(id: default_llm.split(":").last.to_i)
return if llm.nil?
return if !llm.seeded?
return if SiteSetting.ai_bot_allowed_seeded_models.include?(llm.id.to_s)
errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"))
end
end end
# == Schema Information # == Schema Information

View File

@ -26,9 +26,8 @@ discourse_ai:
default: 60 default: 60
hidden: true hidden: true
ai_openai_dall_e_3_url: "https://api.openai.com/v1/images/generations" ai_openai_dall_e_3_url: "https://api.openai.com/v1/images/generations"
ai_openai_embeddings_url: ai_openai_embeddings_url:
hidden: true hidden: true
default: "https://api.openai.com/v1/embeddings" default: "https://api.openai.com/v1/embeddings"
ai_openai_organization: ai_openai_organization:
@ -57,7 +56,7 @@ discourse_ai:
ai_hugging_face_tei_endpoint_srv: ai_hugging_face_tei_endpoint_srv:
default: "" default: ""
hidden: true hidden: true
ai_hugging_face_tei_api_key: ai_hugging_face_tei_api_key:
default: "" default: ""
hidden: true hidden: true
ai_hugging_face_tei_reranker_endpoint: ai_hugging_face_tei_reranker_endpoint:
@ -203,7 +202,7 @@ discourse_ai:
client: true client: true
hidden: true hidden: true
ai_embeddings_discourse_service_api_endpoint: ai_embeddings_discourse_service_api_endpoint:
default: "" default: ""
hidden: true hidden: true
ai_embeddings_discourse_service_api_endpoint_srv: ai_embeddings_discourse_service_api_endpoint_srv:
@ -307,6 +306,11 @@ discourse_ai:
ai_bot_github_access_token: ai_bot_github_access_token:
default: "" default: ""
secret: true secret: true
ai_bot_allowed_seeded_models:
default: ""
hidden: true
type: list
list_type: compact
ai_automation_max_triage_per_minute: ai_automation_max_triage_per_minute:
default: 60 default: 60
hidden: true hidden: true

View File

@ -172,6 +172,29 @@ RSpec.describe AiPersona do
) )
end end
it "validates allowed seeded model" do
persona =
AiPersona.new(
name: "test",
description: "test",
system_prompt: "test",
tools: [],
allowed_group_ids: [],
default_llm: "seeded_model:-1",
)
llm_model = Fabricate(:llm_model, id: -1)
SiteSetting.ai_bot_allowed_seeded_models = ""
expect(persona.valid?).to eq(false)
expect(persona.errors[:default_llm]).to include(
I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"),
)
SiteSetting.ai_bot_allowed_seeded_models = "-1"
expect(persona.valid?).to eq(true)
end
it "does not leak caches between sites" do it "does not leak caches between sites" do
AiPersona.create!( AiPersona.create!(
name: "pun_bot", name: "pun_bot",