diff --git a/lib/shared/tokenizer/tokenizer.rb b/lib/shared/tokenizer/tokenizer.rb index fc66c4e7..0fdcf2c9 100644 --- a/lib/shared/tokenizer/tokenizer.rb +++ b/lib/shared/tokenizer/tokenizer.rb @@ -49,6 +49,9 @@ module DiscourseAi return text if text.size < max_length tokenizer.decode(tokenize(text).take(max_length)) + rescue Tiktoken::UnicodeError + max_length = max_length - 1 + retry end end end diff --git a/spec/shared/tokenizer.rb b/spec/shared/tokenizer.rb index 9402445e..bfdf6510 100644 --- a/spec/shared/tokenizer.rb +++ b/spec/shared/tokenizer.rb @@ -76,5 +76,10 @@ describe DiscourseAi::Tokenizer::OpenAiTokenizer do sentence = "foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud" expect(described_class.truncate(sentence, 3)).to eq("foo bar baz") end + + it "truncates a sentence successfully at a multibyte unicode character" do + sentence = "foo bar 👨🏿‍👩🏿‍👧🏿‍👧🏿 baz qux quux corge grault garply waldo fred plugh xyzzy thud" + expect(described_class.truncate(sentence, 7)).to eq("foo bar 👨🏿") + end end end