FIX: various issues with llm and triage management (#1186)

- Fix search API to only include column_names when present to make the API less confusing
- Ensure correct LLM is used in PMs by tracking and preferring the last bot user
- Fix persona_id conversion from string to integer in custom fields
- Add missing test for PM triage with no replies - ensure we don't try to auto title topic 
- Ensure bot users are properly added to PMs
- Make title setting optional when replying to posts
- Add ability to control stream_reply behavior

These changes improve reliability and fix edge cases in bot interactions,
particularly in private messages with multiple LLMs and while triaging posts using personas
This commit is contained in:
Sam 2025-03-12 18:32:02 +11:00 committed by GitHub
parent b17c688162
commit 65503a5038
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 123 additions and 21 deletions

View File

@ -4,6 +4,7 @@ module DiscourseAi
module AiBot
class Playground
BYPASS_AI_REPLY_CUSTOM_FIELD = "discourse_ai_bypass_ai_reply"
BOT_USER_PREF_ID_CUSTOM_FIELD = "discourse_ai_bot_user_pref_id"
attr_reader :bot
@ -64,6 +65,33 @@ module DiscourseAi
user_id.to_i <= 0
end
def self.get_bot_user(post:, all_llm_users:, mentionables:)
bot_user = nil
if post.topic.private_message?
# this ensures that we reply using the correct llm
# 1. if we have a preferred llm user we use that
# 2. if we don't just take first topic allowed user
# 3. if we don't have that we take the first mentionable
bot_user = nil
if preferred_user =
all_llm_users.find { |id, username|
id == post.topic.custom_fields[BOT_USER_PREF_ID_CUSTOM_FIELD].to_i
}
bot_user = User.find_by(id: preferred_user[0])
end
bot_user ||=
post.topic.topic_allowed_users.where(user_id: all_llm_users.map(&:first)).first&.user
bot_user ||=
post
.topic
.topic_allowed_users
.where(user_id: mentionables.map { |m| m[:user_id] })
.first
&.user
end
bot_user
end
def self.schedule_reply(post)
return if is_bot_user_id?(post.user_id)
mentionables = nil
@ -75,7 +103,6 @@ module DiscourseAi
mentionables = AiPersona.allowed_modalities(user: post.user, allow_topic_mentions: true)
end
bot_user = nil
mentioned = nil
all_llm_users =
@ -84,18 +111,8 @@ module DiscourseAi
.joins(:user)
.pluck("users.id", "users.username_lower")
if post.topic.private_message?
# this is an edge case, you started a PM with a different bot
bot_user =
post.topic.topic_allowed_users.where(user_id: all_llm_users.map(&:first)).first&.user
bot_user ||=
post
.topic
.topic_allowed_users
.where(user_id: mentionables.map { |m| m[:user_id] })
.first
&.user
end
bot_user =
get_bot_user(post: post, all_llm_users: all_llm_users, mentionables: mentionables)
mentions = nil
if mentionables.present? || (bot_user && post.topic.private_message?)
@ -126,6 +143,8 @@ module DiscourseAi
if bot_user
topic_persona_id = post.topic.custom_fields["ai_persona_id"]
topic_persona_id = topic_persona_id.to_i if topic_persona_id.present?
persona_id = mentioned&.dig(:id) || topic_persona_id
persona = nil
@ -141,7 +160,7 @@ module DiscourseAi
end
# edge case, llm was mentioned in an ai persona conversation
if persona_id == topic_persona_id.to_i && post.topic.private_message? && persona &&
if persona_id == topic_persona_id && post.topic.private_message? && persona &&
all_llm_users.present?
if !persona.force_default_llm && mentions.present?
mentioned_llm_user_id, _ =
@ -162,7 +181,15 @@ module DiscourseAi
end
end
def self.reply_to_post(post:, user: nil, persona_id: nil, whisper: nil, add_user_to_pm: false)
def self.reply_to_post(
post:,
user: nil,
persona_id: nil,
whisper: nil,
add_user_to_pm: false,
stream_reply: false,
auto_set_title: false
)
ai_persona = AiPersona.find_by(id: persona_id)
raise Discourse::InvalidParameters.new(:persona_id) if !ai_persona
persona_class = ai_persona.class_instance
@ -178,6 +205,8 @@ module DiscourseAi
whisper: whisper,
context_style: :topic,
add_user_to_pm: add_user_to_pm,
stream_reply: stream_reply,
auto_set_title: auto_set_title,
)
end
@ -444,6 +473,8 @@ module DiscourseAi
whisper: nil,
context_style: nil,
add_user_to_pm: true,
stream_reply: nil,
auto_set_title: true,
&blk
)
# this is a multithreading issue
@ -479,13 +510,23 @@ module DiscourseAi
reply_user = User.find_by(id: bot.persona.class.user_id) || reply_user
end
stream_reply = post.topic.private_message?
stream_reply = post.topic.private_message? if stream_reply.nil?
# we need to ensure persona user is allowed to reply to the pm
if post.topic.private_message? && add_user_to_pm
if !post.topic.topic_allowed_users.exists?(user_id: reply_user.id)
post.topic.topic_allowed_users.create!(user_id: reply_user.id)
end
# edge case, maybe the llm user is missing?
if !post.topic.topic_allowed_users.exists?(user_id: bot.bot_user.id)
post.topic.topic_allowed_users.create!(user_id: bot.bot_user.id)
end
# we store the id of the last bot_user, this is then used to give it preference
if post.topic.custom_fields[BOT_USER_PREF_ID_CUSTOM_FIELD].to_i != bot.bot_user.id
post.topic.custom_fields[BOT_USER_PREF_ID_CUSTOM_FIELD] = bot.bot_user.id
post.topic.save_custom_fields
end
end
if stream_reply
@ -609,7 +650,7 @@ module DiscourseAi
end
post_streamer&.finish(skip_callback: true)
publish_final_update(reply_post) if stream_reply
if reply_post && post.post_number == 1 && post.topic.private_message?
if reply_post && post.post_number == 1 && post.topic.private_message? && auto_set_title
title_playground(reply_post, post.user)
end
end

View File

@ -125,6 +125,7 @@ module DiscourseAi
def self.format_results(rows, args: nil, result_style:)
rows = rows&.map { |row| yield row } if block_given?
column_names = nil
if result_style == :compact
index = -1
@ -142,7 +143,8 @@ module DiscourseAi
column_names = column_indexes.keys
end
result = { column_names: column_names, rows: rows }
result = { rows: rows }
result[:column_names] = column_names if column_names
result[:args] = args if args
result
end

View File

@ -165,6 +165,30 @@ describe DiscourseAi::Automation::LlmPersonaTriage do
expect(context).to include("support")
end
it "interacts correctly with a PM with no replies" do
pm_topic = Fabricate(:private_message_topic, user: user, title: "Important PM")
# Create initial PM post
pm_post =
Fabricate(
:post,
topic: pm_topic,
user: user,
raw: "This is a private message that needs triage",
)
DiscourseAi::Completions::Llm.with_prepared_responses(
["I've received your private message"],
) do |_, _, _prompts|
automation.running_in_background!
automation.trigger!({ "post" => pm_post })
end
reply = pm_topic.posts.order(:post_number).last
expect(reply.raw).to eq("I've received your private message")
expect(reply.topic.reload.title).to eq("Important PM")
end
it "interacts correctly with PMs" do
# Create a private message topic
pm_topic = Fabricate(:private_message_topic, user: user, title: "Important PM")

View File

@ -637,13 +637,14 @@ RSpec.describe DiscourseAi::AiBot::Playground do
create_post(
title: "I just made a PM",
raw: "Hey there #{persona.user.username}, can you help me?",
target_usernames: "#{user.username},#{persona.user.username}",
target_usernames: "#{user.username},#{persona.user.username},#{claude_2.user.username}",
archetype: Archetype.private_message,
user: admin,
)
end
post.topic.custom_fields["ai_persona_id"] = persona.id
# note that this is a string due to custom field shananigans
post.topic.custom_fields["ai_persona_id"] = persona.id.to_s
post.topic.save_custom_fields
llm2 = Fabricate(:llm_model, enabled_chat_bot: true)
@ -662,6 +663,22 @@ RSpec.describe DiscourseAi::AiBot::Playground do
expect(last_post.raw).to eq("Hi from bot two")
expect(last_post.user_id).to eq(persona.user_id)
current_users = last_post.topic.reload.topic_allowed_users.joins(:user).pluck(:username)
expect(current_users).to include(llm2.user.username)
# subseqent replies should come from the new llm
DiscourseAi::Completions::Llm.with_prepared_responses(["Hi from bot two"], llm: llm2) do
create_post(
user: admin,
raw: "just confirming everything switched",
topic_id: post.topic_id,
)
end
last_post = post.topic.reload.posts.order("id desc").first
expect(last_post.raw).to eq("Hi from bot two")
expect(last_post.user_id).to eq(persona.user_id)
# tether llm, so it can no longer be switched
persona.update!(force_default_llm: true, default_llm_id: claude_2.id)

View File

@ -72,8 +72,26 @@ RSpec.describe DiscourseAi::Utils::Search do
GroupUser.create!(group: group, user: user)
# Now should find the private post
results = described_class.perform_search(search_query: private_post.raw, current_user: user)
results =
described_class.perform_search(
search_query: private_post.raw,
current_user: user,
result_style: :detailed,
)
expect(results[:rows].length).to eq(1)
# so API is less confusing
expect(results.key?(:column_names)).to eq(false)
results =
described_class.perform_search(
search_query: private_post.raw,
current_user: user,
result_style: :compact,
)
expect(results[:rows].length).to eq(1)
# so API is less confusing
expect(results[:column_names]).to be_present
end
it "properly handles subfolder URLs" do