From 65718f6dbee84503df26d4e3f0e17bee8c638dce Mon Sep 17 00:00:00 2001 From: Sam Date: Thu, 24 Apr 2025 23:07:26 +1100 Subject: [PATCH] FIX: eat all leading spaces llms provide when they stream them (#1280) * FIX: eat all leading spaces llms provide when they stream them * improve so we don't stop replying... --- lib/ai_bot/chat_streamer.rb | 3 +++ spec/lib/modules/ai_bot/playground_spec.rb | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/ai_bot/chat_streamer.rb b/lib/ai_bot/chat_streamer.rb index bd441e7c..139a6c7f 100644 --- a/lib/ai_bot/chat_streamer.rb +++ b/lib/ai_bot/chat_streamer.rb @@ -39,6 +39,9 @@ module DiscourseAi def <<(partial) return if partial.to_s.empty? + # we throw away leading spaces prior to message creation for now + # by design + return if partial.to_s.blank? && !@reply if @client_id ChatSDK::Channel.stop_reply( diff --git a/spec/lib/modules/ai_bot/playground_spec.rb b/spec/lib/modules/ai_bot/playground_spec.rb index 750d1ea9..be670399 100644 --- a/spec/lib/modules/ai_bot/playground_spec.rb +++ b/spec/lib/modules/ai_bot/playground_spec.rb @@ -354,7 +354,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do ) prompts = nil - DiscourseAi::Completions::Llm.with_prepared_responses(["world"]) do |_, _, _prompts| + DiscourseAi::Completions::Llm.with_prepared_responses([[" ", "world"]]) do |_, _, _prompts| message = ChatSDK::Message.create( channel_id: channel.id, @@ -386,6 +386,9 @@ RSpec.describe DiscourseAi::AiBot::Playground do TEXT expect(content.strip).to eq(expected) + + reply = Chat::Message.order(:id).last + expect(reply.message).to eq("world") end it "should reply to a mention if properly enabled" do