FIX: unify automation model translation (#540)
report runner and llm triage used different paths to figure out underlying model name, unify so we use the same path. fixes claude 3 based models on llm triage
This commit is contained in:
parent
e8b2a200c1
commit
5cac47a30a
|
@ -6,10 +6,25 @@ module DiscourseAi
|
||||||
{ id: "gpt-4-turbo", name: "discourse_automation.ai_models.gpt_4_turbo" },
|
{ id: "gpt-4-turbo", name: "discourse_automation.ai_models.gpt_4_turbo" },
|
||||||
{ id: "gpt-4", name: "discourse_automation.ai_models.gpt_4" },
|
{ id: "gpt-4", name: "discourse_automation.ai_models.gpt_4" },
|
||||||
{ id: "gpt-3.5-turbo", name: "discourse_automation.ai_models.gpt_3_5_turbo" },
|
{ id: "gpt-3.5-turbo", name: "discourse_automation.ai_models.gpt_3_5_turbo" },
|
||||||
{ id: "claude-2", name: "discourse_automation.ai_models.claude_2" },
|
|
||||||
{ id: "gemini-pro", name: "discourse_automation.ai_models.gemini_pro" },
|
{ id: "gemini-pro", name: "discourse_automation.ai_models.gemini_pro" },
|
||||||
|
{ id: "claude-2", name: "discourse_automation.ai_models.claude_2" },
|
||||||
{ id: "claude-3-sonnet", name: "discourse_automation.ai_models.claude_3_sonnet" },
|
{ id: "claude-3-sonnet", name: "discourse_automation.ai_models.claude_3_sonnet" },
|
||||||
{ id: "claude-3-opus", name: "discourse_automation.ai_models.claude_3_opus" },
|
{ id: "claude-3-opus", name: "discourse_automation.ai_models.claude_3_opus" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def self.translate_model(model)
|
||||||
|
return "google:gemini-pro" if model == "gemini-pro"
|
||||||
|
return "open_ai:#{model}" if model.start_with? "gpt"
|
||||||
|
|
||||||
|
if model.start_with? "claude"
|
||||||
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(model)
|
||||||
|
return "aws_bedrock:#{model}"
|
||||||
|
else
|
||||||
|
return "anthropic:#{model}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
raise "Unknown model #{model}"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -32,7 +32,8 @@ module DiscourseAi
|
||||||
|
|
||||||
result = nil
|
result = nil
|
||||||
|
|
||||||
llm = DiscourseAi::Completions::Llm.proxy(translate_model(model))
|
translated_model = DiscourseAi::Automation.translate_model(model)
|
||||||
|
llm = DiscourseAi::Completions::Llm.proxy(translated_model)
|
||||||
|
|
||||||
result =
|
result =
|
||||||
llm.generate(
|
llm.generate(
|
||||||
|
@ -71,17 +72,6 @@ module DiscourseAi
|
||||||
ReviewablePost.needs_review!(target: post, created_by: Discourse.system_user) if flag_post
|
ReviewablePost.needs_review!(target: post, created_by: Discourse.system_user) if flag_post
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.translate_model(model)
|
|
||||||
return "google:gemini-pro" if model == "gemini-pro"
|
|
||||||
return "open_ai:#{model}" if model != "claude-2"
|
|
||||||
|
|
||||||
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2")
|
|
||||||
"aws_bedrock:claude-2"
|
|
||||||
else
|
|
||||||
"anthropic:claude-2"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -65,7 +65,9 @@ module DiscourseAi
|
||||||
I18n.t("discourse_automation.scriptables.llm_report.title")
|
I18n.t("discourse_automation.scriptables.llm_report.title")
|
||||||
end
|
end
|
||||||
@model = model
|
@model = model
|
||||||
@llm = DiscourseAi::Completions::Llm.proxy(translate_model(model))
|
|
||||||
|
translated_model = DiscourseAi::Automation.translate_model(model)
|
||||||
|
@llm = DiscourseAi::Completions::Llm.proxy(translated_model)
|
||||||
@category_ids = category_ids
|
@category_ids = category_ids
|
||||||
@tags = tags
|
@tags = tags
|
||||||
@allow_secure_categories = allow_secure_categories
|
@allow_secure_categories = allow_secure_categories
|
||||||
|
@ -210,21 +212,6 @@ Follow the provided writing composition instructions carefully and precisely ste
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def translate_model(model)
|
|
||||||
return "google:gemini-pro" if model == "gemini-pro"
|
|
||||||
return "open_ai:#{model}" if model.start_with? "gpt"
|
|
||||||
|
|
||||||
if model.start_with? "claude"
|
|
||||||
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(model)
|
|
||||||
return "aws_bedrock:#{model}"
|
|
||||||
else
|
|
||||||
return "anthropic:#{model}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
raise "Unknown model #{model}"
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def suppress_notifications(raw)
|
def suppress_notifications(raw)
|
||||||
|
|
Loading…
Reference in New Issue