FIX: eat all leading spaces llms provide when they stream them (#1280)

* FIX: eat all leading spaces llms provide when they stream them

* improve so we don't stop replying...
This commit is contained in:
Sam 2025-04-24 23:07:26 +11:00 committed by GitHub
parent 2060426709
commit 65718f6dbe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 7 additions and 1 deletions

View File

@ -39,6 +39,9 @@ module DiscourseAi
def <<(partial)
return if partial.to_s.empty?
# we throw away leading spaces prior to message creation for now
# by design
return if partial.to_s.blank? && !@reply
if @client_id
ChatSDK::Channel.stop_reply(

View File

@ -354,7 +354,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
)
prompts = nil
DiscourseAi::Completions::Llm.with_prepared_responses(["world"]) do |_, _, _prompts|
DiscourseAi::Completions::Llm.with_prepared_responses([[" ", "world"]]) do |_, _, _prompts|
message =
ChatSDK::Message.create(
channel_id: channel.id,
@ -386,6 +386,9 @@ RSpec.describe DiscourseAi::AiBot::Playground do
TEXT
expect(content.strip).to eq(expected)
reply = Chat::Message.order(:id).last
expect(reply.message).to eq("world")
end
it "should reply to a mention if properly enabled" do