FEATURE: Examples support for personas. (#1334)
Examples simulate previous interactions with an LLM and come right after the system prompt. This helps grounding the model and producing better responses.
This commit is contained in:
parent
acd1986a5c
commit
aef84bc5bb
|
@ -225,6 +225,10 @@ module DiscourseAi
|
||||||
permitted[:response_format] = permit_response_format(response_format)
|
permitted[:response_format] = permit_response_format(response_format)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if examples = params.dig(:ai_persona, :examples)
|
||||||
|
permitted[:examples] = permit_examples(examples)
|
||||||
|
end
|
||||||
|
|
||||||
permitted
|
permitted
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -251,6 +255,12 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def permit_examples(examples)
|
||||||
|
return [] if !examples.is_a?(Array)
|
||||||
|
|
||||||
|
examples.map { |example_arr| example_arr.take(2).map(&:to_s) }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -13,6 +13,7 @@ class AiPersona < ActiveRecord::Base
|
||||||
validate :system_persona_unchangeable, on: :update, if: :system
|
validate :system_persona_unchangeable, on: :update, if: :system
|
||||||
validate :chat_preconditions
|
validate :chat_preconditions
|
||||||
validate :allowed_seeded_model, if: :default_llm_id
|
validate :allowed_seeded_model, if: :default_llm_id
|
||||||
|
validate :well_formated_examples
|
||||||
validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true
|
validates :max_context_posts, numericality: { greater_than: 0 }, allow_nil: true
|
||||||
# leaves some room for growth but sets a maximum to avoid memory issues
|
# leaves some room for growth but sets a maximum to avoid memory issues
|
||||||
# we may want to revisit this in the future
|
# we may want to revisit this in the future
|
||||||
|
@ -265,6 +266,7 @@ class AiPersona < ActiveRecord::Base
|
||||||
define_method(:top_p) { @ai_persona&.top_p }
|
define_method(:top_p) { @ai_persona&.top_p }
|
||||||
define_method(:system_prompt) { @ai_persona&.system_prompt || "You are a helpful bot." }
|
define_method(:system_prompt) { @ai_persona&.system_prompt || "You are a helpful bot." }
|
||||||
define_method(:uploads) { @ai_persona&.uploads }
|
define_method(:uploads) { @ai_persona&.uploads }
|
||||||
|
define_method(:examples) { @ai_persona&.examples }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -343,6 +345,11 @@ class AiPersona < ActiveRecord::Base
|
||||||
new_format = response_format_change[1].map { |f| f["key"] }.to_set
|
new_format = response_format_change[1].map { |f| f["key"] }.to_set
|
||||||
|
|
||||||
errors.add(:base, error_msg) if old_format != new_format
|
errors.add(:base, error_msg) if old_format != new_format
|
||||||
|
elsif examples_changed?
|
||||||
|
old_examples = examples_change[0].flatten.to_set
|
||||||
|
new_examples = examples_change[1].flatten.to_set
|
||||||
|
|
||||||
|
errors.add(:base, error_msg) if old_examples != new_examples
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -363,6 +370,17 @@ class AiPersona < ActiveRecord::Base
|
||||||
|
|
||||||
errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"))
|
errors.add(:default_llm, I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def well_formated_examples
|
||||||
|
return if examples.blank?
|
||||||
|
|
||||||
|
if examples.is_a?(Array) &&
|
||||||
|
examples.all? { |e| e.is_a?(Array) && e.length == 2 && e.all?(&:present?) }
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
errors.add(:examples, I18n.t("discourse_ai.personas.malformed_examples"))
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# == Schema Information
|
# == Schema Information
|
||||||
|
@ -401,6 +419,7 @@ end
|
||||||
# default_llm_id :bigint
|
# default_llm_id :bigint
|
||||||
# question_consolidator_llm_id :bigint
|
# question_consolidator_llm_id :bigint
|
||||||
# response_format :jsonb
|
# response_format :jsonb
|
||||||
|
# examples :jsonb
|
||||||
#
|
#
|
||||||
# Indexes
|
# Indexes
|
||||||
#
|
#
|
||||||
|
|
|
@ -31,7 +31,8 @@ class LocalizedAiPersonaSerializer < ApplicationSerializer
|
||||||
:allow_topic_mentions,
|
:allow_topic_mentions,
|
||||||
:allow_personal_messages,
|
:allow_personal_messages,
|
||||||
:force_default_llm,
|
:force_default_llm,
|
||||||
:response_format
|
:response_format,
|
||||||
|
:examples
|
||||||
|
|
||||||
has_one :user, serializer: BasicUserSerializer, embed: :object
|
has_one :user, serializer: BasicUserSerializer, embed: :object
|
||||||
has_many :rag_uploads, serializer: UploadSerializer, embed: :object
|
has_many :rag_uploads, serializer: UploadSerializer, embed: :object
|
||||||
|
|
|
@ -34,6 +34,7 @@ const CREATE_ATTRIBUTES = [
|
||||||
"allow_chat_channel_mentions",
|
"allow_chat_channel_mentions",
|
||||||
"allow_chat_direct_messages",
|
"allow_chat_direct_messages",
|
||||||
"response_format",
|
"response_format",
|
||||||
|
"examples",
|
||||||
];
|
];
|
||||||
|
|
||||||
const SYSTEM_ATTRIBUTES = [
|
const SYSTEM_ATTRIBUTES = [
|
||||||
|
@ -61,7 +62,6 @@ const SYSTEM_ATTRIBUTES = [
|
||||||
"allow_topic_mentions",
|
"allow_topic_mentions",
|
||||||
"allow_chat_channel_mentions",
|
"allow_chat_channel_mentions",
|
||||||
"allow_chat_direct_messages",
|
"allow_chat_direct_messages",
|
||||||
"response_format",
|
|
||||||
];
|
];
|
||||||
|
|
||||||
export default class AiPersona extends RestModel {
|
export default class AiPersona extends RestModel {
|
||||||
|
@ -154,6 +154,7 @@ export default class AiPersona extends RestModel {
|
||||||
this.populateTools(attrs);
|
this.populateTools(attrs);
|
||||||
attrs.forced_tool_count = this.forced_tool_count || -1;
|
attrs.forced_tool_count = this.forced_tool_count || -1;
|
||||||
attrs.response_format = attrs.response_format || [];
|
attrs.response_format = attrs.response_format || [];
|
||||||
|
attrs.examples = attrs.examples || [];
|
||||||
|
|
||||||
return attrs;
|
return attrs;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ import AdminUser from "admin/models/admin-user";
|
||||||
import GroupChooser from "select-kit/components/group-chooser";
|
import GroupChooser from "select-kit/components/group-chooser";
|
||||||
import AiPersonaResponseFormatEditor from "../components/modal/ai-persona-response-format-editor";
|
import AiPersonaResponseFormatEditor from "../components/modal/ai-persona-response-format-editor";
|
||||||
import AiLlmSelector from "./ai-llm-selector";
|
import AiLlmSelector from "./ai-llm-selector";
|
||||||
|
import AiPersonaCollapsableExample from "./ai-persona-example";
|
||||||
import AiPersonaToolOptions from "./ai-persona-tool-options";
|
import AiPersonaToolOptions from "./ai-persona-tool-options";
|
||||||
import AiToolSelector from "./ai-tool-selector";
|
import AiToolSelector from "./ai-tool-selector";
|
||||||
import RagOptionsFk from "./rag-options-fk";
|
import RagOptionsFk from "./rag-options-fk";
|
||||||
|
@ -230,6 +231,12 @@ export default class PersonaEditor extends Component {
|
||||||
return this.allTools.filter((tool) => tools.includes(tool.id));
|
return this.allTools.filter((tool) => tools.includes(tool.id));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
addExamplesPair(form, data) {
|
||||||
|
const newExamples = [...data.examples, ["", ""]];
|
||||||
|
form.set("examples", newExamples);
|
||||||
|
}
|
||||||
|
|
||||||
mapToolOptions(currentOptions, toolNames) {
|
mapToolOptions(currentOptions, toolNames) {
|
||||||
const updatedOptions = Object.assign({}, currentOptions);
|
const updatedOptions = Object.assign({}, currentOptions);
|
||||||
|
|
||||||
|
@ -422,6 +429,32 @@ export default class PersonaEditor extends Component {
|
||||||
</form.Field>
|
</form.Field>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
|
|
||||||
|
<form.Section
|
||||||
|
@title={{i18n "discourse_ai.ai_persona.examples.title"}}
|
||||||
|
@subtitle={{i18n "discourse_ai.ai_persona.examples.examples_help"}}
|
||||||
|
>
|
||||||
|
{{#unless data.system}}
|
||||||
|
<form.Container>
|
||||||
|
<form.Button
|
||||||
|
@action={{fn this.addExamplesPair form data}}
|
||||||
|
@label="discourse_ai.ai_persona.examples.new"
|
||||||
|
class="ai-persona-editor__new_example"
|
||||||
|
/>
|
||||||
|
</form.Container>
|
||||||
|
{{/unless}}
|
||||||
|
|
||||||
|
{{#if (gt data.examples.length 0)}}
|
||||||
|
<form.Collection @name="examples" as |exCollection exCollectionIdx|>
|
||||||
|
<AiPersonaCollapsableExample
|
||||||
|
@examplesCollection={{exCollection}}
|
||||||
|
@exampleNumber={{exCollectionIdx}}
|
||||||
|
@system={{data.system}}
|
||||||
|
@form={{form}}
|
||||||
|
/>
|
||||||
|
</form.Collection>
|
||||||
|
{{/if}}
|
||||||
|
</form.Section>
|
||||||
|
|
||||||
<form.Section @title={{i18n "discourse_ai.ai_persona.ai_tools"}}>
|
<form.Section @title={{i18n "discourse_ai.ai_persona.ai_tools"}}>
|
||||||
<form.Field
|
<form.Field
|
||||||
@name="tools"
|
@name="tools"
|
||||||
|
|
|
@ -0,0 +1,67 @@
|
||||||
|
import Component from "@glimmer/component";
|
||||||
|
import { tracked } from "@glimmer/tracking";
|
||||||
|
import { concat } from "@ember/helper";
|
||||||
|
import { on } from "@ember/modifier";
|
||||||
|
import { action } from "@ember/object";
|
||||||
|
import { eq } from "truth-helpers";
|
||||||
|
import icon from "discourse/helpers/d-icon";
|
||||||
|
import { i18n } from "discourse-i18n";
|
||||||
|
|
||||||
|
export default class AiPersonaCollapsableExample extends Component {
|
||||||
|
@tracked collapsed = true;
|
||||||
|
|
||||||
|
get caretIcon() {
|
||||||
|
return this.collapsed ? "angle-right" : "angle-down";
|
||||||
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
toggleExample() {
|
||||||
|
this.collapsed = !this.collapsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
deletePair() {
|
||||||
|
this.collapsed = true;
|
||||||
|
this.args.examplesCollection.remove(this.args.exampleNumber);
|
||||||
|
}
|
||||||
|
|
||||||
|
get exampleTitle() {
|
||||||
|
return i18n("discourse_ai.ai_persona.examples.collapsable_title", {
|
||||||
|
number: this.args.exampleNumber + 1,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
<template>
|
||||||
|
<div role="button" {{on "click" this.toggleExample}}>
|
||||||
|
<span>{{icon this.caretIcon}}</span>
|
||||||
|
{{this.exampleTitle}}
|
||||||
|
</div>
|
||||||
|
{{#unless this.collapsed}}
|
||||||
|
<@examplesCollection.Collection as |exPair pairIdx|>
|
||||||
|
<exPair.Field
|
||||||
|
@title={{i18n
|
||||||
|
(concat
|
||||||
|
"discourse_ai.ai_persona.examples."
|
||||||
|
(if (eq pairIdx 0) "user" "model")
|
||||||
|
)
|
||||||
|
}}
|
||||||
|
@validation="required|length:1,100"
|
||||||
|
@disabled={{@system}}
|
||||||
|
as |field|
|
||||||
|
>
|
||||||
|
<field.Textarea />
|
||||||
|
</exPair.Field>
|
||||||
|
</@examplesCollection.Collection>
|
||||||
|
|
||||||
|
{{#unless @system}}
|
||||||
|
<@form.Container>
|
||||||
|
<@form.Button
|
||||||
|
@action={{this.deletePair}}
|
||||||
|
@label="discourse_ai.ai_persona.examples.remove"
|
||||||
|
class="ai-persona-editor__delete_example btn-danger"
|
||||||
|
/>
|
||||||
|
</@form.Container>
|
||||||
|
{{/unless}}
|
||||||
|
{{/unless}}
|
||||||
|
</template>
|
||||||
|
}
|
|
@ -330,6 +330,14 @@ en:
|
||||||
modal:
|
modal:
|
||||||
root_title: "Response structure"
|
root_title: "Response structure"
|
||||||
key_title: "Key"
|
key_title: "Key"
|
||||||
|
examples:
|
||||||
|
title: Examples
|
||||||
|
examples_help: Simulate previous interactions with the LLM and ground it to produce better result.
|
||||||
|
new: New example
|
||||||
|
remove: Delete example
|
||||||
|
collapsable_title: "Example #%{number}"
|
||||||
|
user: "User message"
|
||||||
|
model: "Model response"
|
||||||
|
|
||||||
list:
|
list:
|
||||||
enabled: "AI Bot?"
|
enabled: "AI Bot?"
|
||||||
|
|
|
@ -495,6 +495,9 @@ en:
|
||||||
other: "We couldn't delete this model because %{settings} are using it. Update the settings and try again."
|
other: "We couldn't delete this model because %{settings} are using it. Update the settings and try again."
|
||||||
cannot_edit_builtin: "You can't edit a built-in model."
|
cannot_edit_builtin: "You can't edit a built-in model."
|
||||||
|
|
||||||
|
personas:
|
||||||
|
malformed_examples: "The given examples have the wrong format."
|
||||||
|
|
||||||
embeddings:
|
embeddings:
|
||||||
delete_failed: "This model is currently in use. Update the `ai embeddings selected model` first."
|
delete_failed: "This model is currently in use. Update the `ai embeddings selected model` first."
|
||||||
cannot_edit_builtin: "You can't edit a built-in model."
|
cannot_edit_builtin: "You can't edit a built-in model."
|
||||||
|
|
|
@ -74,6 +74,8 @@ DiscourseAi::Personas::Persona.system_personas.each do |persona_class, id|
|
||||||
|
|
||||||
persona.response_format = instance.response_format
|
persona.response_format = instance.response_format
|
||||||
|
|
||||||
|
persona.examples = instance.examples
|
||||||
|
|
||||||
persona.system_prompt = instance.system_prompt
|
persona.system_prompt = instance.system_prompt
|
||||||
persona.top_p = instance.top_p
|
persona.top_p = instance.top_p
|
||||||
persona.temperature = instance.temperature
|
persona.temperature = instance.temperature
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class AddExamplesToPersonas < ActiveRecord::Migration[7.2]
|
||||||
|
def change
|
||||||
|
add_column :ai_personas, :examples, :jsonb
|
||||||
|
end
|
||||||
|
end
|
|
@ -164,6 +164,10 @@ module DiscourseAi
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def examples
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
def available_tools
|
def available_tools
|
||||||
self
|
self
|
||||||
.class
|
.class
|
||||||
|
@ -173,11 +177,7 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def craft_prompt(context, llm: nil)
|
def craft_prompt(context, llm: nil)
|
||||||
system_insts =
|
system_insts = replace_placeholders(system_prompt, context)
|
||||||
system_prompt.gsub(/\{(\w+)\}/) do |match|
|
|
||||||
found = context.lookup_template_param(match[1..-2])
|
|
||||||
found.nil? ? match : found.to_s
|
|
||||||
end
|
|
||||||
|
|
||||||
prompt_insts = <<~TEXT.strip
|
prompt_insts = <<~TEXT.strip
|
||||||
#{system_insts}
|
#{system_insts}
|
||||||
|
@ -206,10 +206,21 @@ module DiscourseAi
|
||||||
|
|
||||||
prompt_insts << fragments_guidance if fragments_guidance.present?
|
prompt_insts << fragments_guidance if fragments_guidance.present?
|
||||||
|
|
||||||
|
post_system_examples = []
|
||||||
|
|
||||||
|
if examples.present?
|
||||||
|
examples.flatten.each_with_index do |e, idx|
|
||||||
|
post_system_examples << {
|
||||||
|
content: replace_placeholders(e, context),
|
||||||
|
type: (idx + 1).odd? ? :user : :model,
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
prompt =
|
prompt =
|
||||||
DiscourseAi::Completions::Prompt.new(
|
DiscourseAi::Completions::Prompt.new(
|
||||||
prompt_insts,
|
prompt_insts,
|
||||||
messages: context.messages,
|
messages: post_system_examples.concat(context.messages),
|
||||||
topic_id: context.topic_id,
|
topic_id: context.topic_id,
|
||||||
post_id: context.post_id,
|
post_id: context.post_id,
|
||||||
)
|
)
|
||||||
|
@ -239,6 +250,13 @@ module DiscourseAi
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
||||||
|
def replace_placeholders(content, context)
|
||||||
|
content.gsub(/\{(\w+)\}/) do |match|
|
||||||
|
found = context.lookup_template_param(match[1..-2])
|
||||||
|
found.nil? ? match : found.to_s
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def tool_instance(tool_call, bot_user:, llm:, context:, existing_tools:)
|
def tool_instance(tool_call, bot_user:, llm:, context:, existing_tools:)
|
||||||
function_id = tool_call.id
|
function_id = tool_call.id
|
||||||
function_name = tool_call.name
|
function_name = tool_call.name
|
||||||
|
|
|
@ -32,6 +32,15 @@ module DiscourseAi
|
||||||
def response_format
|
def response_format
|
||||||
[{ key: "summary", type: "string" }]
|
[{ key: "summary", type: "string" }]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def examples
|
||||||
|
[
|
||||||
|
[
|
||||||
|
"Here are the posts inside <input></input> XML tags:\n\n<input>1) user1 said: I love Mondays 2) user2 said: I hate Mondays</input>\n\nGenerate a concise, coherent summary of the text above maintaining the original language.",
|
||||||
|
"Two users are sharing their feelings toward Mondays. [user1]({resource_url}/1) hates them, while [user2]({resource_url}/2) loves them.",
|
||||||
|
],
|
||||||
|
]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -44,20 +44,7 @@ module DiscourseAi
|
||||||
input =
|
input =
|
||||||
contents.map { |item| "(#{item[:id]} #{item[:poster]} said: #{item[:text]} " }.join
|
contents.map { |item| "(#{item[:id]} #{item[:poster]} said: #{item[:text]} " }.join
|
||||||
|
|
||||||
messages = []
|
[{ type: :user, content: <<~TEXT.strip }]
|
||||||
messages << {
|
|
||||||
type: :user,
|
|
||||||
content:
|
|
||||||
"Here are the posts inside <input></input> XML tags:\n\n<input>1) user1 said: I love Mondays 2) user2 said: I hate Mondays</input>\n\nGenerate a concise, coherent summary of the text above maintaining the original language.",
|
|
||||||
}
|
|
||||||
|
|
||||||
messages << {
|
|
||||||
type: :model,
|
|
||||||
content:
|
|
||||||
"Two users are sharing their feelings toward Mondays. [user1](#{resource_path}/1) hates them, while [user2](#{resource_path}/2) loves them.",
|
|
||||||
}
|
|
||||||
|
|
||||||
messages << { type: :user, content: <<~TEXT.strip }
|
|
||||||
#{content_title.present? ? "The discussion title is: " + content_title + ".\n" : ""}
|
#{content_title.present? ? "The discussion title is: " + content_title + ".\n" : ""}
|
||||||
Here are the posts, inside <input></input> XML tags:
|
Here are the posts, inside <input></input> XML tags:
|
||||||
|
|
||||||
|
@ -67,8 +54,6 @@ module DiscourseAi
|
||||||
|
|
||||||
Generate a concise, coherent summary of the text above maintaining the original language.
|
Generate a concise, coherent summary of the text above maintaining the original language.
|
||||||
TEXT
|
TEXT
|
||||||
|
|
||||||
messages
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
@ -8,6 +8,7 @@ class TestPersona < DiscourseAi::Personas::Persona
|
||||||
DiscourseAi::Personas::Tools::Image,
|
DiscourseAi::Personas::Tools::Image,
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
def system_prompt
|
def system_prompt
|
||||||
<<~PROMPT
|
<<~PROMPT
|
||||||
{site_url}
|
{site_url}
|
||||||
|
@ -445,6 +446,29 @@ RSpec.describe DiscourseAi::Personas::Persona do
|
||||||
expect(crafted_system_prompt).not_to include("fragment-n10") # Fragment #10 not included
|
expect(crafted_system_prompt).not_to include("fragment-n10") # Fragment #10 not included
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context "when the persona has examples" do
|
||||||
|
fab!(:examples_persona) do
|
||||||
|
Fabricate(
|
||||||
|
:ai_persona,
|
||||||
|
examples: [["User message", "assistant response"]],
|
||||||
|
allowed_group_ids: [Group::AUTO_GROUPS[:trust_level_0]],
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "includes them before the context messages" do
|
||||||
|
custom_persona =
|
||||||
|
DiscourseAi::Personas::Persona.find_by(id: examples_persona.id, user: user).new
|
||||||
|
|
||||||
|
post_system_prompt_msgs = custom_persona.craft_prompt(with_cc).messages.last(3)
|
||||||
|
|
||||||
|
expect(post_system_prompt_msgs).to contain_exactly(
|
||||||
|
{ content: "User message", type: :user },
|
||||||
|
{ content: "assistant response", type: :model },
|
||||||
|
{ content: "Tell me the time", type: :user },
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,90 +1,79 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
RSpec.describe AiPersona do
|
RSpec.describe AiPersona do
|
||||||
|
subject(:basic_persona) do
|
||||||
|
AiPersona.new(
|
||||||
|
name: "test",
|
||||||
|
description: "test",
|
||||||
|
system_prompt: "test",
|
||||||
|
tools: [],
|
||||||
|
allowed_group_ids: [],
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
fab!(:llm_model)
|
fab!(:llm_model)
|
||||||
fab!(:seeded_llm_model) { Fabricate(:llm_model, id: -1) }
|
fab!(:seeded_llm_model) { Fabricate(:llm_model, id: -1) }
|
||||||
|
|
||||||
it "validates context settings" do
|
it "validates context settings" do
|
||||||
persona =
|
expect(basic_persona.valid?).to eq(true)
|
||||||
AiPersona.new(
|
|
||||||
name: "test",
|
|
||||||
description: "test",
|
|
||||||
system_prompt: "test",
|
|
||||||
tools: [],
|
|
||||||
allowed_group_ids: [],
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(persona.valid?).to eq(true)
|
basic_persona.max_context_posts = 0
|
||||||
|
expect(basic_persona.valid?).to eq(false)
|
||||||
|
expect(basic_persona.errors[:max_context_posts]).to eq(["must be greater than 0"])
|
||||||
|
|
||||||
persona.max_context_posts = 0
|
basic_persona.max_context_posts = 1
|
||||||
expect(persona.valid?).to eq(false)
|
expect(basic_persona.valid?).to eq(true)
|
||||||
expect(persona.errors[:max_context_posts]).to eq(["must be greater than 0"])
|
|
||||||
|
|
||||||
persona.max_context_posts = 1
|
basic_persona.max_context_posts = nil
|
||||||
expect(persona.valid?).to eq(true)
|
expect(basic_persona.valid?).to eq(true)
|
||||||
|
|
||||||
persona.max_context_posts = nil
|
|
||||||
expect(persona.valid?).to eq(true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "validates tools" do
|
it "validates tools" do
|
||||||
persona =
|
|
||||||
AiPersona.new(
|
|
||||||
name: "test",
|
|
||||||
description: "test",
|
|
||||||
system_prompt: "test",
|
|
||||||
tools: [],
|
|
||||||
allowed_group_ids: [],
|
|
||||||
)
|
|
||||||
|
|
||||||
Fabricate(:ai_tool, id: 1)
|
Fabricate(:ai_tool, id: 1)
|
||||||
Fabricate(:ai_tool, id: 2, name: "Archie search", tool_name: "search")
|
Fabricate(:ai_tool, id: 2, name: "Archie search", tool_name: "search")
|
||||||
|
|
||||||
expect(persona.valid?).to eq(true)
|
expect(basic_persona.valid?).to eq(true)
|
||||||
|
|
||||||
persona.tools = %w[search image_generation]
|
basic_persona.tools = %w[search image_generation]
|
||||||
expect(persona.valid?).to eq(true)
|
expect(basic_persona.valid?).to eq(true)
|
||||||
|
|
||||||
persona.tools = %w[search image_generation search]
|
basic_persona.tools = %w[search image_generation search]
|
||||||
expect(persona.valid?).to eq(false)
|
expect(basic_persona.valid?).to eq(false)
|
||||||
expect(persona.errors[:tools]).to eq(["Can not have duplicate tools"])
|
expect(basic_persona.errors[:tools]).to eq(["Can not have duplicate tools"])
|
||||||
|
|
||||||
persona.tools = [["custom-1", { test: "test" }, false], ["custom-2", { test: "test" }, false]]
|
basic_persona.tools = [
|
||||||
expect(persona.valid?).to eq(true)
|
["custom-1", { test: "test" }, false],
|
||||||
expect(persona.errors[:tools]).to eq([])
|
["custom-2", { test: "test" }, false],
|
||||||
|
]
|
||||||
|
expect(basic_persona.valid?).to eq(true)
|
||||||
|
expect(basic_persona.errors[:tools]).to eq([])
|
||||||
|
|
||||||
persona.tools = [["custom-1", { test: "test" }, false], ["custom-1", { test: "test" }, false]]
|
basic_persona.tools = [
|
||||||
expect(persona.valid?).to eq(false)
|
["custom-1", { test: "test" }, false],
|
||||||
expect(persona.errors[:tools]).to eq(["Can not have duplicate tools"])
|
["custom-1", { test: "test" }, false],
|
||||||
|
]
|
||||||
|
expect(basic_persona.valid?).to eq(false)
|
||||||
|
expect(basic_persona.errors[:tools]).to eq(["Can not have duplicate tools"])
|
||||||
|
|
||||||
persona.tools = [
|
basic_persona.tools = [
|
||||||
["custom-1", { test: "test" }, false],
|
["custom-1", { test: "test" }, false],
|
||||||
["custom-2", { test: "test" }, false],
|
["custom-2", { test: "test" }, false],
|
||||||
"image_generation",
|
"image_generation",
|
||||||
]
|
]
|
||||||
expect(persona.valid?).to eq(true)
|
expect(basic_persona.valid?).to eq(true)
|
||||||
expect(persona.errors[:tools]).to eq([])
|
expect(basic_persona.errors[:tools]).to eq([])
|
||||||
|
|
||||||
persona.tools = [
|
basic_persona.tools = [
|
||||||
["custom-1", { test: "test" }, false],
|
["custom-1", { test: "test" }, false],
|
||||||
["custom-2", { test: "test" }, false],
|
["custom-2", { test: "test" }, false],
|
||||||
"Search",
|
"Search",
|
||||||
]
|
]
|
||||||
expect(persona.valid?).to eq(false)
|
expect(basic_persona.valid?).to eq(false)
|
||||||
expect(persona.errors[:tools]).to eq(["Can not have duplicate tools"])
|
expect(basic_persona.errors[:tools]).to eq(["Can not have duplicate tools"])
|
||||||
end
|
end
|
||||||
|
|
||||||
it "allows creation of user" do
|
it "allows creation of user" do
|
||||||
persona =
|
user = basic_persona.create_user!
|
||||||
AiPersona.create!(
|
|
||||||
name: "test",
|
|
||||||
description: "test",
|
|
||||||
system_prompt: "test",
|
|
||||||
tools: [],
|
|
||||||
allowed_group_ids: [],
|
|
||||||
)
|
|
||||||
|
|
||||||
user = persona.create_user!
|
|
||||||
expect(user.username).to eq("test_bot")
|
expect(user.username).to eq("test_bot")
|
||||||
expect(user.name).to eq("Test")
|
expect(user.name).to eq("Test")
|
||||||
expect(user.bot?).to be(true)
|
expect(user.bot?).to be(true)
|
||||||
|
@ -223,25 +212,17 @@ RSpec.describe AiPersona do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "validates allowed seeded model" do
|
it "validates allowed seeded model" do
|
||||||
persona =
|
basic_persona.default_llm_id = seeded_llm_model.id
|
||||||
AiPersona.new(
|
|
||||||
name: "test",
|
|
||||||
description: "test",
|
|
||||||
system_prompt: "test",
|
|
||||||
tools: [],
|
|
||||||
allowed_group_ids: [],
|
|
||||||
default_llm_id: seeded_llm_model.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
SiteSetting.ai_bot_allowed_seeded_models = ""
|
SiteSetting.ai_bot_allowed_seeded_models = ""
|
||||||
|
|
||||||
expect(persona.valid?).to eq(false)
|
expect(basic_persona.valid?).to eq(false)
|
||||||
expect(persona.errors[:default_llm]).to include(
|
expect(basic_persona.errors[:default_llm]).to include(
|
||||||
I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"),
|
I18n.t("discourse_ai.llm.configuration.invalid_seeded_model"),
|
||||||
)
|
)
|
||||||
|
|
||||||
SiteSetting.ai_bot_allowed_seeded_models = "-1"
|
SiteSetting.ai_bot_allowed_seeded_models = "-1"
|
||||||
expect(persona.valid?).to eq(true)
|
expect(basic_persona.valid?).to eq(true)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "does not leak caches between sites" do
|
it "does not leak caches between sites" do
|
||||||
|
@ -268,6 +249,7 @@ RSpec.describe AiPersona do
|
||||||
system_prompt: "system persona",
|
system_prompt: "system persona",
|
||||||
tools: %w[Search Time],
|
tools: %w[Search Time],
|
||||||
response_format: [{ key: "summary", type: "string" }],
|
response_format: [{ key: "summary", type: "string" }],
|
||||||
|
examples: [%w[user_msg1 assistant_msg1], %w[user_msg2 assistant_msg2]],
|
||||||
system: true,
|
system: true,
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -302,6 +284,40 @@ RSpec.describe AiPersona do
|
||||||
ActiveRecord::RecordInvalid,
|
ActiveRecord::RecordInvalid,
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it "doesn't accept changes to examples" do
|
||||||
|
other_examples = [%w[user_msg1 assistant_msg1]]
|
||||||
|
|
||||||
|
expect { system_persona.update!(examples: other_examples) }.to raise_error(
|
||||||
|
ActiveRecord::RecordInvalid,
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "validates examples format" do
|
||||||
|
it "doesn't accept examples that are not arrays" do
|
||||||
|
basic_persona.examples = [1]
|
||||||
|
|
||||||
|
expect(basic_persona.valid?).to eq(false)
|
||||||
|
expect(basic_persona.errors[:examples].first).to eq(
|
||||||
|
I18n.t("discourse_ai.personas.malformed_examples"),
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "doesn't accept examples that don't come in pairs" do
|
||||||
|
basic_persona.examples = [%w[user_msg1]]
|
||||||
|
|
||||||
|
expect(basic_persona.valid?).to eq(false)
|
||||||
|
expect(basic_persona.errors[:examples].first).to eq(
|
||||||
|
I18n.t("discourse_ai.personas.malformed_examples"),
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "works when example is well formatted" do
|
||||||
|
basic_persona.examples = [%w[user_msg1 assistant1]]
|
||||||
|
|
||||||
|
expect(basic_persona.valid?).to eq(true)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -186,6 +186,7 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
|
||||||
question_consolidator_llm_id: llm_model.id,
|
question_consolidator_llm_id: llm_model.id,
|
||||||
forced_tool_count: 2,
|
forced_tool_count: 2,
|
||||||
response_format: [{ key: "summary", type: "string" }],
|
response_format: [{ key: "summary", type: "string" }],
|
||||||
|
examples: [%w[user_msg1 assistant_msg1], %w[user_msg2 assistant_msg2]],
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -213,6 +214,7 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
|
||||||
expect(persona_json["response_format"].map { |rf| rf["key"] }).to contain_exactly(
|
expect(persona_json["response_format"].map { |rf| rf["key"] }).to contain_exactly(
|
||||||
"summary",
|
"summary",
|
||||||
)
|
)
|
||||||
|
expect(persona_json["examples"]).to eq(valid_attributes[:examples])
|
||||||
|
|
||||||
persona = AiPersona.find(persona_json["id"])
|
persona = AiPersona.find(persona_json["id"])
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue