FEATURE: Use different personas to power AI helper features.
You can now edit each AI helper prompt individually through personas, limit access to specific groups, set different LLMs, etc.
This commit is contained in:
parent
cab39839fd
commit
0338dbea23
|
@ -25,25 +25,24 @@ module DiscourseAi
|
|||
input = get_text_param!
|
||||
force_default_locale = params[:force_default_locale] || false
|
||||
|
||||
prompt = CompletionPrompt.find_by(id: params[:mode])
|
||||
raise Discourse::InvalidParameters.new(:mode) if params[:mode].blank?
|
||||
|
||||
raise Discourse::InvalidParameters.new(:mode) if !prompt || !prompt.enabled?
|
||||
|
||||
if prompt.id == CompletionPrompt::CUSTOM_PROMPT
|
||||
if params[:mode] == DiscourseAi::AiHelper::Assistant::CUSTOM_PROMPT
|
||||
raise Discourse::InvalidParameters.new(:custom_prompt) if params[:custom_prompt].blank?
|
||||
|
||||
prompt.custom_instruction = params[:custom_prompt]
|
||||
end
|
||||
|
||||
return suggest_thumbnails(input) if prompt.id == CompletionPrompt::ILLUSTRATE_POST
|
||||
if params[:mode] == DiscourseAi::AiHelper::Assistant::ILLUSTRATE_POST
|
||||
return suggest_thumbnails(input)
|
||||
end
|
||||
|
||||
hijack do
|
||||
render json:
|
||||
DiscourseAi::AiHelper::Assistant.new.generate_and_send_prompt(
|
||||
prompt,
|
||||
params[:mode],
|
||||
input,
|
||||
current_user,
|
||||
force_default_locale: force_default_locale,
|
||||
custom_prompt: params[:custom_prompt],
|
||||
),
|
||||
status: 200
|
||||
end
|
||||
|
@ -60,13 +59,10 @@ module DiscourseAi
|
|||
input = get_text_param!
|
||||
end
|
||||
|
||||
prompt = CompletionPrompt.enabled_by_name("generate_titles")
|
||||
raise Discourse::InvalidParameters.new(:mode) if !prompt
|
||||
|
||||
hijack do
|
||||
render json:
|
||||
DiscourseAi::AiHelper::Assistant.new.generate_and_send_prompt(
|
||||
prompt,
|
||||
DiscourseAi::AiHelper::Assistant::GENERATE_TITLES,
|
||||
input,
|
||||
current_user,
|
||||
),
|
||||
|
@ -115,12 +111,12 @@ module DiscourseAi
|
|||
location = params[:location]
|
||||
raise Discourse::InvalidParameters.new(:location) if !location
|
||||
|
||||
prompt = CompletionPrompt.find_by(id: params[:mode])
|
||||
raise Discourse::InvalidParameters.new(:mode) if params[:mode].blank?
|
||||
if params[:mode] == DiscourseAi::AiHelper::Assistant::ILLUSTRATE_POST
|
||||
return suggest_thumbnails(input)
|
||||
end
|
||||
|
||||
raise Discourse::InvalidParameters.new(:mode) if !prompt || !prompt.enabled?
|
||||
return suggest_thumbnails(input) if prompt.id == CompletionPrompt::ILLUSTRATE_POST
|
||||
|
||||
if prompt.id == CompletionPrompt::CUSTOM_PROMPT
|
||||
if params[:mode] == DiscourseAi::AiHelper::Assistant::CUSTOM_PROMPT
|
||||
raise Discourse::InvalidParameters.new(:custom_prompt) if params[:custom_prompt].blank?
|
||||
end
|
||||
|
||||
|
@ -133,7 +129,7 @@ module DiscourseAi
|
|||
:stream_composer_helper,
|
||||
user_id: current_user.id,
|
||||
text: text,
|
||||
prompt: prompt.name,
|
||||
prompt: params[:mode],
|
||||
custom_prompt: params[:custom_prompt],
|
||||
force_default_locale: params[:force_default_locale] || false,
|
||||
client_id: params[:client_id],
|
||||
|
@ -149,7 +145,7 @@ module DiscourseAi
|
|||
post_id: post.id,
|
||||
user_id: current_user.id,
|
||||
text: text,
|
||||
prompt: prompt.name,
|
||||
prompt: params[:mode],
|
||||
custom_prompt: params[:custom_prompt],
|
||||
client_id: params[:client_id],
|
||||
)
|
||||
|
|
|
@ -10,19 +10,16 @@ module Jobs
|
|||
return unless args[:text]
|
||||
return unless args[:client_id]
|
||||
|
||||
prompt = CompletionPrompt.enabled_by_name(args[:prompt])
|
||||
|
||||
if prompt.id == CompletionPrompt::CUSTOM_PROMPT
|
||||
prompt.custom_instruction = args[:custom_prompt]
|
||||
end
|
||||
helper_mode = args[:prompt]
|
||||
|
||||
DiscourseAi::AiHelper::Assistant.new.stream_prompt(
|
||||
prompt,
|
||||
helper_mode,
|
||||
args[:text],
|
||||
user,
|
||||
"/discourse-ai/ai-helper/stream_composer_suggestion",
|
||||
force_default_locale: args[:force_default_locale],
|
||||
client_id: args[:client_id],
|
||||
custom_prompt: args[:custom_prompt],
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -14,16 +14,12 @@ module Jobs
|
|||
|
||||
return unless user.guardian.can_see?(post)
|
||||
|
||||
prompt = CompletionPrompt.enabled_by_name(args[:prompt])
|
||||
helper_mode = args[:prompt]
|
||||
|
||||
if prompt.id == CompletionPrompt::CUSTOM_PROMPT
|
||||
prompt.custom_instruction = args[:custom_prompt]
|
||||
end
|
||||
|
||||
if prompt.name == "explain"
|
||||
input = <<~TEXT
|
||||
<term>#{args[:text]}</term>
|
||||
<context>#{post.raw}</context>
|
||||
if helper_mode == DiscourseAi::AiHelper::Assistant::EXPLAIN
|
||||
input = <<~TEXT.strip
|
||||
<term>#{args[:text]}</term>
|
||||
<context>#{post.raw}</context>
|
||||
<topic>#{topic.title}</topic>
|
||||
#{reply_to ? "<replyTo>#{reply_to.raw}</replyTo>" : nil}
|
||||
TEXT
|
||||
|
@ -32,10 +28,11 @@ module Jobs
|
|||
end
|
||||
|
||||
DiscourseAi::AiHelper::Assistant.new.stream_prompt(
|
||||
prompt,
|
||||
helper_mode,
|
||||
input,
|
||||
user,
|
||||
"/discourse-ai/ai-helper/stream_suggestion/#{post.id}",
|
||||
custom_prompt: args[:custom_prompt],
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,6 +10,7 @@ module Jobs
|
|||
return if !SiteSetting.discourse_ai_enabled
|
||||
return if !SiteSetting.ai_translation_enabled
|
||||
limit = SiteSetting.ai_translation_backfill_rate
|
||||
|
||||
return if limit == 0
|
||||
|
||||
topics = Topic.where(locale: nil, deleted_at: nil).where("topics.user_id > 0")
|
||||
|
|
|
@ -73,7 +73,7 @@ export default class AiComposerHelperMenu extends Component {
|
|||
}
|
||||
|
||||
prompts.forEach((p) => {
|
||||
this.prompts[p.id] = p;
|
||||
this.prompts[p.name] = p;
|
||||
});
|
||||
|
||||
this.promptTypes = prompts.reduce((memo, p) => {
|
||||
|
@ -116,7 +116,7 @@ export default class AiComposerHelperMenu extends Component {
|
|||
if (option.name === "illustrate_post") {
|
||||
return this.modal.show(ThumbnailSuggestion, {
|
||||
model: {
|
||||
mode: option.id,
|
||||
mode: option.name,
|
||||
selectedText: this.args.data.selectedText,
|
||||
thumbnails: this.thumbnailSuggestions,
|
||||
},
|
||||
|
@ -128,7 +128,7 @@ export default class AiComposerHelperMenu extends Component {
|
|||
|
||||
return this.modal.show(ModalDiffModal, {
|
||||
model: {
|
||||
mode: option.id,
|
||||
mode: option.name,
|
||||
selectedText: this.args.data.selectedText,
|
||||
revert: this.undoAiAction,
|
||||
toolbarEvent: this.args.data.toolbarEvent,
|
||||
|
|
|
@ -29,13 +29,12 @@ export default class AiHelperOptionsList extends Component {
|
|||
@submit={{@performAction}}
|
||||
/>
|
||||
{{else}}
|
||||
<li data-name={{option.translated_name}} data-value={{option.id}}>
|
||||
<li data-name={{option.translated_name}} data-value={{option.name}}>
|
||||
<DButton
|
||||
@icon={{option.icon}}
|
||||
@translatedLabel={{option.translated_name}}
|
||||
@action={{fn @performAction option}}
|
||||
data-name={{option.name}}
|
||||
data-value={{option.id}}
|
||||
class="ai-helper-options__button"
|
||||
>
|
||||
{{#if (and (eq option.name "proofread") this.showShortcut)}}
|
||||
|
|
|
@ -202,7 +202,7 @@ export default class AiPostHelperMenu extends Component {
|
|||
this._activeAiRequest = ajax("/discourse-ai/ai-helper/suggest", {
|
||||
method: "POST",
|
||||
data: {
|
||||
mode: option.id,
|
||||
mode: option.name,
|
||||
text: this.args.data.quoteState.buffer,
|
||||
custom_prompt: this.customPromptValue,
|
||||
},
|
||||
|
@ -238,7 +238,7 @@ export default class AiPostHelperMenu extends Component {
|
|||
method: "POST",
|
||||
data: {
|
||||
location: "post",
|
||||
mode: option.id,
|
||||
mode: option.name,
|
||||
text: this.args.data.selectedText,
|
||||
post_id: this.args.data.quoteState.postId,
|
||||
custom_prompt: this.customPromptValue,
|
||||
|
|
|
@ -34,7 +34,7 @@ export default class AiEditSuggestionButton extends Component {
|
|||
this._activeAIRequest = ajax("/discourse-ai/ai-helper/suggest", {
|
||||
method: "POST",
|
||||
data: {
|
||||
mode: this.mode.id,
|
||||
mode: this.mode.name,
|
||||
text: this.args.outletArgs.initialValue,
|
||||
custom_prompt: "",
|
||||
},
|
||||
|
|
|
@ -44,7 +44,7 @@ function initializeAiHelperTrigger(api) {
|
|||
|
||||
const mode = currentUser?.ai_helper_prompts.find(
|
||||
(p) => p.name === "proofread"
|
||||
).id;
|
||||
).name;
|
||||
|
||||
modal.show(ModalDiffModal, {
|
||||
model: {
|
||||
|
|
|
@ -339,6 +339,33 @@ en:
|
|||
concept_deduplicator:
|
||||
name: "Concept Deduplicator"
|
||||
description: "AI Bot specialized in deduplicating concepts"
|
||||
custom_prompt:
|
||||
name: "Custom Prompt"
|
||||
description: "Default persona powering the AI helper's custom prompt feature"
|
||||
smart_dates:
|
||||
name: "Smart Dates"
|
||||
description: "Default persona powering the AI helper's smart dates feature"
|
||||
markdown_table_generator:
|
||||
name: "Markdown Table Generator"
|
||||
description: "Default persona powering the AI helper's generate Markdown table feature"
|
||||
post_illustrator:
|
||||
name: "Post Illustrator"
|
||||
description: "Generates StableDiffusion prompts to power the AI helper's illustrate post feature"
|
||||
proofreader:
|
||||
name: "Proofreader"
|
||||
description: "Default persona powering the AI helper's proofread text feature"
|
||||
titles_generator:
|
||||
name: "Titles Generator"
|
||||
description: "Default persona powering the AI helper's suggest topic titles feature"
|
||||
tutor:
|
||||
name: "Tutor"
|
||||
description: "Default persona powering the AI helper's explain feature"
|
||||
translator:
|
||||
name: "Translator"
|
||||
description: "Default persona powering the AI helper's translator feature"
|
||||
image_captioner:
|
||||
name: "Image Captions"
|
||||
description: "Default persona powering the AI helper's image caption feature"
|
||||
topic_not_found: "Summary unavailable, topic not found!"
|
||||
summarizing: "Summarizing topic"
|
||||
searching: "Searching for: '%{query}'"
|
||||
|
|
|
@ -104,13 +104,14 @@ discourse_ai:
|
|||
allow_any: false
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::LlmEnumerator"
|
||||
validator: "DiscourseAi::Configuration::LlmValidator"
|
||||
ai_helper_custom_prompts_allowed_groups:
|
||||
hidden: true
|
||||
ai_helper_custom_prompts_allowed_groups: # Deprecated. TODO(roman): Remove 2025-09-01
|
||||
type: group_list
|
||||
list_type: compact
|
||||
default: "3" # 3: @staff
|
||||
allow_any: false
|
||||
refresh: true
|
||||
hidden: true
|
||||
post_ai_helper_allowed_groups:
|
||||
type: group_list
|
||||
list_type: compact
|
||||
|
@ -143,6 +144,7 @@ discourse_ai:
|
|||
default: ""
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::LlmVisionEnumerator"
|
||||
hidden: true
|
||||
ai_auto_image_caption_allowed_groups:
|
||||
client: true
|
||||
type: group_list
|
||||
|
@ -160,6 +162,42 @@ discourse_ai:
|
|||
hidden: true
|
||||
type: list
|
||||
list_type: compact
|
||||
ai_helper_proofreader_persona:
|
||||
default: "-22"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_tittle_suggestions_persona:
|
||||
default: "-23"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_explain_persona:
|
||||
default: "-24"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_post_illustrator_persona:
|
||||
default: "-21"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_smart_dates_persona:
|
||||
default: "-19"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_translator_persona:
|
||||
default: "-25"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_markdown_tables_persona:
|
||||
default: "-20"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_custom_prompt_persona:
|
||||
default: "-18"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
ai_helper_image_caption_persona:
|
||||
default: "-26"
|
||||
type: enum
|
||||
enum: "DiscourseAi::Configuration::PersonaEnumerator"
|
||||
|
||||
ai_embeddings_enabled:
|
||||
default: false
|
||||
|
|
|
@ -1,269 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -301
|
||||
cp.name = "translate"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:text]
|
||||
cp.stop_sequences = ["\n</output>", "</output>"]
|
||||
cp.temperature = 0.2
|
||||
cp.messages = {
|
||||
insts: <<~TEXT,
|
||||
I want you to act as an %LANGUAGE% translator, spelling corrector and improver. I will write to you
|
||||
in any language and you will detect the language, translate it and answer in the corrected and
|
||||
improved version of my text, in %LANGUAGE%. I want you to replace my simplified A0-level words and
|
||||
sentences with more beautiful and elegant, upper level %LANGUAGE% words and sentences.
|
||||
Keep the meaning same, but make them more literary. I want you to only reply the correction,
|
||||
the improvements and nothing else, do not write explanations.
|
||||
You will find the text between <input></input> XML tags.
|
||||
Include your translation between <output></output> XML tags.
|
||||
TEXT
|
||||
examples: [["<input>Hello</input>", "<output>...%LANGUAGE% translation...</output>"]],
|
||||
}
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -303
|
||||
cp.name = "proofread"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:diff]
|
||||
cp.temperature = 0
|
||||
cp.stop_sequences = ["\n</output>"]
|
||||
cp.messages = {
|
||||
insts: <<~TEXT,
|
||||
You are a markdown proofreader. You correct egregious typos and phrasing issues but keep the user's original voice.
|
||||
You do not touch code blocks. I will provide you with text to proofread. If nothing needs fixing, then you will echo the text back.
|
||||
You will find the text between <input></input> XML tags.
|
||||
You will ALWAYS return the corrected text between <output></output> XML tags.
|
||||
TEXT
|
||||
examples: [
|
||||
[
|
||||
"<input></input>",
|
||||
"<output></output>",
|
||||
],
|
||||
[<<~TEXT, "The rain in Spain, stays mainly in the Plane."],
|
||||
<input>
|
||||
The rain in spain stays mainly in the plane.
|
||||
</input>
|
||||
TEXT
|
||||
[
|
||||
"<input>The rain in Spain, stays mainly in the Plane.</input>",
|
||||
"<output>The rain in Spain, stays mainly in the Plane.</output>",
|
||||
],
|
||||
[<<~TEXT, <<~TEXT],
|
||||
<input>
|
||||
Hello,
|
||||
|
||||
Sometimes the logo isn't changing automatically when color scheme changes.
|
||||
|
||||

|
||||
</input>
|
||||
TEXT
|
||||
<output>
|
||||
Hello,
|
||||
Sometimes the logo does not change automatically when the color scheme changes.
|
||||

|
||||
</output>
|
||||
TEXT
|
||||
[<<~TEXT, <<~TEXT],
|
||||
<input>
|
||||
Any ideas what is wrong with this peace of cod?
|
||||
> This quot contains a typo
|
||||
```ruby
|
||||
# this has speling mistakes
|
||||
testin.atypo = 11
|
||||
baad = "bad"
|
||||
```
|
||||
</input>
|
||||
TEXT
|
||||
<output>
|
||||
Any ideas what is wrong with this piece of code?
|
||||
> This quot contains a typo
|
||||
```ruby
|
||||
# This has spelling mistakes
|
||||
testing.a_typo = 11
|
||||
bad = "bad"
|
||||
```
|
||||
</output>
|
||||
TEXT
|
||||
],
|
||||
}
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -304
|
||||
cp.name = "markdown_table"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:diff]
|
||||
cp.temperature = 0.5
|
||||
cp.stop_sequences = ["\n</output>"]
|
||||
cp.messages = {
|
||||
insts: <<~TEXT,
|
||||
You are a markdown table formatter, I will provide you text inside <input></input> XML tags and you will format it into a markdown table
|
||||
TEXT
|
||||
examples: [
|
||||
["<input>sam,joe,jane\nage: 22| 10|11</input>", <<~TEXT],
|
||||
<output>
|
||||
| | sam | joe | jane |
|
||||
|---|---|---|---|
|
||||
| age | 22 | 10 | 11 |
|
||||
</output>
|
||||
TEXT
|
||||
[<<~TEXT, <<~TEXT],
|
||||
<input>
|
||||
sam: speed 100, age 22
|
||||
jane: age 10
|
||||
fred: height 22
|
||||
</input>
|
||||
TEXT
|
||||
<output>
|
||||
| | speed | age | height |
|
||||
|---|---|---|---|
|
||||
| sam | 100 | 22 | - |
|
||||
| jane | - | 10 | - |
|
||||
| fred | - | - | 22 |
|
||||
</output>
|
||||
TEXT
|
||||
[<<~TEXT, <<~TEXT],
|
||||
<input>
|
||||
chrome 22ms (first load 10ms)
|
||||
firefox 10ms (first load: 9ms)
|
||||
</input>
|
||||
TEXT
|
||||
<output>
|
||||
| Browser | Load Time (ms) | First Load Time (ms) |
|
||||
|---|---|---|
|
||||
| Chrome | 22 | 10 |
|
||||
| Firefox | 10 | 9 |
|
||||
</output>
|
||||
TEXT
|
||||
],
|
||||
}
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -305
|
||||
cp.name = "custom_prompt"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:diff]
|
||||
cp.messages = { insts: <<~TEXT }
|
||||
You are a helpful assistant. I will give you instructions inside <input></input> XML tags.
|
||||
You will look at them and reply with a result.
|
||||
TEXT
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -306
|
||||
cp.name = "explain"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:text]
|
||||
cp.messages = { insts: <<~TEXT }
|
||||
You are a tutor explaining a term to a student in a specific context.
|
||||
|
||||
I will provide everything you need to know inside <input> tags, which consists of the term I want you
|
||||
to explain inside <term> tags, the context of where it was used inside <context> tags, the title of
|
||||
the topic where it was used inside <topic> tags, and optionally, the previous post in the conversation
|
||||
in <replyTo> tags.
|
||||
|
||||
Using all this information, write a paragraph with a brief explanation
|
||||
of what the term means. Format the response using Markdown. Reply only with the explanation and
|
||||
nothing more.
|
||||
TEXT
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -307
|
||||
cp.name = "generate_titles"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:list]
|
||||
cp.messages = {
|
||||
insts: <<~TEXT,
|
||||
I want you to act as a title generator for written pieces. I will provide you with a text,
|
||||
and you will generate five titles. Please keep the title concise and under 20 words,
|
||||
and ensure that the meaning is maintained. Replies will utilize the language type of the topic.
|
||||
I want you to only reply the list of options and nothing else, do not write explanations.
|
||||
Never ever use colons in the title. Always use sentence case, using a capital letter at
|
||||
the start of the title, never start the title with a lower case letter. Proper nouns in the title
|
||||
can have a capital letter, and acronyms like LLM can use capital letters. Format some titles
|
||||
as questions, some as statements. Make sure to use question marks if the title is a question.
|
||||
Each title you generate must be separated by *
|
||||
You will find the text between <input></input> XML tags.
|
||||
TEXT
|
||||
examples: [
|
||||
[
|
||||
"<input>In the labyrinth of time, a solitary horse, etched in gold by the setting sun, embarked on an infinite journey.</input>",
|
||||
"<item>The solitary horse</item><item>The horse etched in gold</item><item>A horse's infinite journey</item><item>A horse lost in time</item><item>A horse's last ride</item>",
|
||||
],
|
||||
],
|
||||
post_insts: "Wrap each title between <item></item> XML tags.",
|
||||
}
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -308
|
||||
cp.name = "illustrate_post"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:list]
|
||||
cp.messages = {}
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -309
|
||||
cp.name = "detect_text_locale"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:text]
|
||||
cp.messages = {
|
||||
insts: <<~TEXT,
|
||||
I want you to act as a language expert, determining the locale for a set of text.
|
||||
The locale is a language identifier, such as "en" for English, "de" for German, etc,
|
||||
and can also include a region identifier, such as "en-GB" for British English, or "zh-Hans" for Simplified Chinese.
|
||||
I will provide you with text, and you will determine the locale of the text.
|
||||
You will find the text between <input></input> XML tags.
|
||||
Include your locale between <output></output> XML tags.
|
||||
TEXT
|
||||
examples: [["<input>Hello my favourite colour is red</input>", "<output>en-GB</output>"]],
|
||||
}
|
||||
end
|
||||
|
||||
CompletionPrompt.seed do |cp|
|
||||
cp.id = -310
|
||||
cp.name = "replace_dates"
|
||||
cp.prompt_type = CompletionPrompt.prompt_types[:diff]
|
||||
cp.temperature = 0
|
||||
cp.stop_sequences = ["\n</output>"]
|
||||
cp.messages = {
|
||||
insts: <<~TEXT,
|
||||
You are a date and time formatter for Discourse posts. Convert natural language time references into date placeholders.
|
||||
Do not modify any markdown, code blocks, or existing date formats.
|
||||
|
||||
Here's the temporal context:
|
||||
{{temporal_context}}
|
||||
|
||||
Available date placeholder formats:
|
||||
- Simple day without time: {{date:1}} for tomorrow, {{date:7}} for a week from today
|
||||
- Specific time: {{datetime:2pm+1}} for 2 PM tomorrow
|
||||
- Time range: {{datetime:2pm+1:4pm+1}} for tomorrow 2 PM to 4 PM
|
||||
|
||||
You will find the text between <input></input> XML tags.
|
||||
Return the text with dates converted between <output></output> XML tags.
|
||||
TEXT
|
||||
examples: [
|
||||
[
|
||||
"<input>The meeting is at 2pm tomorrow</input>",
|
||||
"<output>The meeting is at {{datetime:2pm+1}}</output>",
|
||||
],
|
||||
["<input>Due in 3 days</input>", "<output>Due {{date:3}}</output>"],
|
||||
[
|
||||
"<input>Meeting next Tuesday at 2pm</input>",
|
||||
"<output>Meeting {{next_week:tuesday-2pm}}</output>",
|
||||
],
|
||||
[
|
||||
"<input>Meeting from 2pm to 4pm tomorrow</input>",
|
||||
"<output>Meeting {{datetime:2pm+1:4pm+1}}</output>",
|
||||
],
|
||||
[
|
||||
"<input>Meeting notes for tomorrow:
|
||||
* Action items in `config.rb`
|
||||
* Review PR #1234
|
||||
* Deadline is 5pm
|
||||
* Check [this link](https://example.com)</input>",
|
||||
"<output>Meeting notes for {{date:1}}:
|
||||
* Action items in `config.rb`
|
||||
* Review PR #1234
|
||||
* Deadline is {{datetime:5pm+1}}
|
||||
* Check [this link](https://example.com)</output>",
|
||||
],
|
||||
],
|
||||
}
|
||||
end
|
|
@ -3,10 +3,13 @@
|
|||
summarization_personas = [DiscourseAi::Personas::Summarizer, DiscourseAi::Personas::ShortSummarizer]
|
||||
|
||||
def from_setting(setting_name)
|
||||
DB.query_single(
|
||||
"SELECT value FROM site_settings WHERE name = :setting_name",
|
||||
setting_name: setting_name,
|
||||
)
|
||||
DB
|
||||
.query_single(
|
||||
"SELECT value FROM site_settings WHERE name = :setting_name",
|
||||
setting_name: setting_name,
|
||||
)
|
||||
&.first
|
||||
&.split("|")
|
||||
end
|
||||
|
||||
DiscourseAi::Personas::Persona.system_personas.each do |persona_class, id|
|
||||
|
@ -28,7 +31,11 @@ DiscourseAi::Personas::Persona.system_personas.each do |persona_class, id|
|
|||
default_groups = [Group::AUTO_GROUPS[:everyone]]
|
||||
end
|
||||
|
||||
persona.allowed_group_ids = from_setting(setting_name).first&.split("|") || default_groups
|
||||
persona.allowed_group_ids = from_setting(setting_name) || default_groups
|
||||
elsif persona_class == DiscourseAi::Personas::CustomPrompt
|
||||
setting_name = "ai_helper_custom_prompts_allowed_groups"
|
||||
default_groups = [Group::AUTO_GROUPS[:staff]]
|
||||
persona.allowed_group_ids = from_setting(setting_name) || default_groups
|
||||
else
|
||||
persona.allowed_group_ids = [Group::AUTO_GROUPS[:trust_level_0]]
|
||||
end
|
||||
|
|
|
@ -188,8 +188,7 @@ class DiscourseAi::Evals::Eval
|
|||
end
|
||||
|
||||
def helper(llm, input:, name:, locale: nil)
|
||||
completion_prompt = CompletionPrompt.find_by(name: name)
|
||||
helper = DiscourseAi::AiHelper::Assistant.new(helper_llm: llm.llm_proxy)
|
||||
helper = DiscourseAi::AiHelper::Assistant.new(helper_llm: llm.llm_model)
|
||||
user = Discourse.system_user
|
||||
if locale
|
||||
user = User.new
|
||||
|
@ -202,7 +201,7 @@ class DiscourseAi::Evals::Eval
|
|||
end
|
||||
result =
|
||||
helper.generate_and_send_prompt(
|
||||
completion_prompt,
|
||||
name,
|
||||
input,
|
||||
current_user = user,
|
||||
_force_default_locale = false,
|
||||
|
|
|
@ -5,6 +5,16 @@ module DiscourseAi
|
|||
class Assistant
|
||||
IMAGE_CAPTION_MAX_WORDS = 50
|
||||
|
||||
TRANSLATE = "translate"
|
||||
GENERATE_TITLES = "generate_titles"
|
||||
PROOFREAD = "proofread"
|
||||
MARKDOWN_TABLE = "markdown_table"
|
||||
CUSTOM_PROMPT = "custom_prompt"
|
||||
EXPLAIN = "explain"
|
||||
ILLUSTRATE_POST = "illustrate_post"
|
||||
REPLACE_DATES = "replace_dates"
|
||||
IMAGE_CAPTION = "image_caption"
|
||||
|
||||
def self.prompt_cache
|
||||
@prompt_cache ||= ::DiscourseAi::MultisiteHash.new("prompt_cache")
|
||||
end
|
||||
|
@ -18,58 +28,37 @@ module DiscourseAi
|
|||
@image_caption_llm = image_caption_llm
|
||||
end
|
||||
|
||||
def helper_llm
|
||||
@helper_llm || DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model)
|
||||
end
|
||||
|
||||
def image_caption_llm
|
||||
@image_caption_llm ||
|
||||
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_image_caption_model)
|
||||
end
|
||||
|
||||
def available_prompts(user)
|
||||
key = "prompt_cache_#{I18n.locale}"
|
||||
self
|
||||
.class
|
||||
.prompt_cache
|
||||
.fetch(key) do
|
||||
prompts = CompletionPrompt.where(enabled: true)
|
||||
prompts = self.class.prompt_cache.fetch(key) { self.all_prompts }
|
||||
|
||||
# Hide illustrate_post if disabled
|
||||
prompts =
|
||||
prompts.where.not(
|
||||
name: "illustrate_post",
|
||||
) if SiteSetting.ai_helper_illustrate_post_model == "disabled"
|
||||
prompts
|
||||
.map do |prompt|
|
||||
next if !user.in_any_groups?(prompt[:allowed_group_ids])
|
||||
|
||||
prompts =
|
||||
prompts.map do |prompt|
|
||||
if prompt.name == "translate"
|
||||
locale = user.effective_locale
|
||||
locale_hash =
|
||||
LocaleSiteSetting.language_names[locale] ||
|
||||
LocaleSiteSetting.language_names[locale.split("_")[0]]
|
||||
translation =
|
||||
I18n.t(
|
||||
"discourse_ai.ai_helper.prompts.translate",
|
||||
language: locale_hash["nativeName"],
|
||||
) || prompt.translated_name || prompt.name
|
||||
else
|
||||
translation =
|
||||
I18n.t("discourse_ai.ai_helper.prompts.#{prompt.name}", default: nil) ||
|
||||
prompt.translated_name || prompt.name
|
||||
end
|
||||
if prompt[:name] == ILLUSTRATE_POST &&
|
||||
SiteSetting.ai_helper_illustrate_post_model == "disabled"
|
||||
next
|
||||
end
|
||||
|
||||
{
|
||||
id: prompt.id,
|
||||
name: prompt.name,
|
||||
translated_name: translation,
|
||||
prompt_type: prompt.prompt_type,
|
||||
icon: icon_map(prompt.name),
|
||||
location: location_map(prompt.name),
|
||||
}
|
||||
end
|
||||
prompts
|
||||
# We cannot cache this. It depends on the user's effective_locale.
|
||||
if prompt[:name] == TRANSLATE
|
||||
locale = user.effective_locale
|
||||
locale_hash =
|
||||
LocaleSiteSetting.language_names[locale] ||
|
||||
LocaleSiteSetting.language_names[locale.split("_")[0]]
|
||||
translation =
|
||||
I18n.t(
|
||||
"discourse_ai.ai_helper.prompts.translate",
|
||||
language: locale_hash["nativeName"],
|
||||
) || prompt[:name]
|
||||
|
||||
prompt.merge(translated_name: translation)
|
||||
else
|
||||
prompt
|
||||
end
|
||||
end
|
||||
.compact
|
||||
end
|
||||
|
||||
def custom_locale_instructions(user = nil, force_default_locale)
|
||||
|
@ -85,26 +74,14 @@ module DiscourseAi
|
|||
end
|
||||
end
|
||||
|
||||
def localize_prompt!(prompt, user = nil, force_default_locale: false)
|
||||
locale_instructions = custom_locale_instructions(user, force_default_locale)
|
||||
if locale_instructions
|
||||
prompt.messages[0][:content] = prompt.messages[0][:content] + locale_instructions
|
||||
end
|
||||
def attach_user_context(context, user = nil, force_default_locale: false)
|
||||
locale = SiteSetting.default_locale
|
||||
locale = user.effective_locale if user && !force_default_locale
|
||||
locale_hash = LocaleSiteSetting.language_names[locale]
|
||||
|
||||
if prompt.messages[0][:content].include?("%LANGUAGE%")
|
||||
locale = SiteSetting.default_locale
|
||||
context.user_language = "#{locale_hash["name"]}"
|
||||
|
||||
locale = user.effective_locale if user && !force_default_locale
|
||||
|
||||
locale_hash = LocaleSiteSetting.language_names[locale]
|
||||
|
||||
prompt.messages[0][:content] = prompt.messages[0][:content].gsub(
|
||||
"%LANGUAGE%",
|
||||
"#{locale_hash["name"]}",
|
||||
)
|
||||
end
|
||||
|
||||
if user && prompt.messages[0][:content].include?("{{temporal_context}}")
|
||||
if user
|
||||
timezone = user.user_option.timezone || "UTC"
|
||||
current_time = Time.now.in_time_zone(timezone)
|
||||
|
||||
|
@ -117,48 +94,85 @@ module DiscourseAi
|
|||
},
|
||||
}
|
||||
|
||||
prompt.messages[0][:content] = prompt.messages[0][:content].gsub(
|
||||
"{{temporal_context}}",
|
||||
temporal_context.to_json,
|
||||
)
|
||||
|
||||
prompt.messages.each do |message|
|
||||
message[:content] = DateFormatter.process_date_placeholders(message[:content], user)
|
||||
end
|
||||
context.temporal_context = temporal_context.to_json
|
||||
end
|
||||
|
||||
context
|
||||
end
|
||||
|
||||
def generate_prompt(completion_prompt, input, user, force_default_locale: false, &block)
|
||||
llm = helper_llm
|
||||
prompt = completion_prompt.messages_with_input(input)
|
||||
localize_prompt!(prompt, user, force_default_locale: force_default_locale)
|
||||
def generate_prompt(
|
||||
helper_mode,
|
||||
input,
|
||||
user,
|
||||
force_default_locale: false,
|
||||
custom_prompt: nil,
|
||||
&block
|
||||
)
|
||||
bot = build_bot(helper_mode, user)
|
||||
|
||||
llm.generate(
|
||||
prompt,
|
||||
user: user,
|
||||
temperature: completion_prompt.temperature,
|
||||
stop_sequences: completion_prompt.stop_sequences,
|
||||
feature_name: "ai_helper",
|
||||
&block
|
||||
)
|
||||
user_input = "<input>#{input}</input>"
|
||||
if helper_mode == CUSTOM_PROMPT && custom_prompt.present?
|
||||
user_input = "<input>#{custom_prompt}:\n#{input}</input>"
|
||||
end
|
||||
|
||||
context =
|
||||
DiscourseAi::Personas::BotContext.new(
|
||||
user: user,
|
||||
skip_tool_details: true,
|
||||
feature_name: "ai_helper",
|
||||
messages: [{ type: :user, content: user_input }],
|
||||
format_dates: helper_mode == REPLACE_DATES,
|
||||
custom_instructions: custom_locale_instructions(user, force_default_locale),
|
||||
)
|
||||
context = attach_user_context(context, user, force_default_locale: force_default_locale)
|
||||
|
||||
helper_response = +""
|
||||
|
||||
buffer_blk =
|
||||
Proc.new do |partial, _, type|
|
||||
if type == :structured_output
|
||||
json_summary_schema_key = bot.persona.response_format&.first.to_h
|
||||
helper_chunk = partial.read_buffered_property(json_summary_schema_key["key"]&.to_sym)
|
||||
|
||||
if helper_chunk.present?
|
||||
helper_response << helper_chunk
|
||||
block.call(helper_chunk) if block
|
||||
end
|
||||
elsif type.blank?
|
||||
# Assume response is a regular completion.
|
||||
helper_response << helper_chunk
|
||||
block.call(helper_chunk) if block
|
||||
end
|
||||
end
|
||||
|
||||
bot.reply(context, &buffer_blk)
|
||||
|
||||
helper_response
|
||||
end
|
||||
|
||||
def generate_and_send_prompt(completion_prompt, input, user, force_default_locale: false)
|
||||
completion_result =
|
||||
def generate_and_send_prompt(
|
||||
helper_mode,
|
||||
input,
|
||||
user,
|
||||
force_default_locale: false,
|
||||
custom_prompt: nil
|
||||
)
|
||||
helper_response =
|
||||
generate_prompt(
|
||||
completion_prompt,
|
||||
helper_mode,
|
||||
input,
|
||||
user,
|
||||
force_default_locale: force_default_locale,
|
||||
custom_prompt: custom_prompt,
|
||||
)
|
||||
result = { type: completion_prompt.prompt_type }
|
||||
result = { type: prompt_type(helper_mode) }
|
||||
|
||||
result[:suggestions] = (
|
||||
if completion_prompt.list?
|
||||
parse_list(completion_result).map { |suggestion| sanitize_result(suggestion) }
|
||||
if result[:type] == :list
|
||||
parse_list(helper_response).map { |suggestion| sanitize_result(suggestion) }
|
||||
else
|
||||
sanitized = sanitize_result(completion_result)
|
||||
result[:diff] = parse_diff(input, sanitized) if completion_prompt.diff?
|
||||
sanitized = sanitize_result(helper_response)
|
||||
result[:diff] = parse_diff(input, sanitized) if result[:type] == :diff
|
||||
[sanitized]
|
||||
end
|
||||
)
|
||||
|
@ -167,25 +181,28 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def stream_prompt(
|
||||
completion_prompt,
|
||||
helper_mode,
|
||||
input,
|
||||
user,
|
||||
channel,
|
||||
force_default_locale: false,
|
||||
client_id: nil
|
||||
client_id: nil,
|
||||
custom_prompt: nil
|
||||
)
|
||||
streamed_diff = +""
|
||||
streamed_result = +""
|
||||
start = Time.now
|
||||
type = prompt_type(helper_mode)
|
||||
|
||||
generate_prompt(
|
||||
completion_prompt,
|
||||
helper_mode,
|
||||
input,
|
||||
user,
|
||||
force_default_locale: force_default_locale,
|
||||
) do |partial_response, cancel_function|
|
||||
custom_prompt: custom_prompt,
|
||||
) do |partial_response|
|
||||
streamed_result << partial_response
|
||||
streamed_diff = parse_diff(input, partial_response) if completion_prompt.diff?
|
||||
streamed_diff = parse_diff(input, partial_response) if type == :diff
|
||||
|
||||
# Throttle updates and check for safe stream points
|
||||
if (streamed_result.length > 10 && (Time.now - start > 0.3)) || Rails.env.test?
|
||||
|
@ -197,7 +214,7 @@ module DiscourseAi
|
|||
end
|
||||
end
|
||||
|
||||
final_diff = parse_diff(input, streamed_result) if completion_prompt.diff?
|
||||
final_diff = parse_diff(input, streamed_result) if type == :diff
|
||||
|
||||
sanitized_result = sanitize_result(streamed_result)
|
||||
if sanitized_result.present?
|
||||
|
@ -211,33 +228,126 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def generate_image_caption(upload, user)
|
||||
prompt =
|
||||
DiscourseAi::Completions::Prompt.new(
|
||||
"You are a bot specializing in image captioning.",
|
||||
bot = build_bot(IMAGE_CAPTION, user)
|
||||
force_default_locale = false
|
||||
|
||||
context =
|
||||
DiscourseAi::Personas::BotContext.new(
|
||||
user: user,
|
||||
skip_tool_details: true,
|
||||
feature_name: IMAGE_CAPTION,
|
||||
messages: [
|
||||
{
|
||||
type: :user,
|
||||
content: [
|
||||
"Describe this image in a single sentence#{custom_locale_instructions(user)}",
|
||||
{ upload_id: upload.id },
|
||||
],
|
||||
content: ["Describe this image in a single sentence.", { upload_id: upload.id }],
|
||||
},
|
||||
],
|
||||
custom_instructions: custom_locale_instructions(user, force_default_locale),
|
||||
)
|
||||
|
||||
raw_caption =
|
||||
image_caption_llm.generate(
|
||||
prompt,
|
||||
user: user,
|
||||
max_tokens: 1024,
|
||||
feature_name: "image_caption",
|
||||
)
|
||||
structured_output = nil
|
||||
|
||||
buffer_blk =
|
||||
Proc.new do |partial, _, type|
|
||||
if type == :structured_output
|
||||
structured_output = partial
|
||||
json_summary_schema_key = bot.persona.response_format&.first.to_h
|
||||
end
|
||||
end
|
||||
|
||||
bot.reply(context, llm_args: { max_tokens: 1024 }, &buffer_blk)
|
||||
|
||||
raw_caption = ""
|
||||
|
||||
if structured_output
|
||||
json_summary_schema_key = bot.persona.response_format&.first.to_h
|
||||
raw_caption =
|
||||
structured_output.read_buffered_property(json_summary_schema_key["key"]&.to_sym)
|
||||
end
|
||||
|
||||
raw_caption.delete("|").squish.truncate_words(IMAGE_CAPTION_MAX_WORDS)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def build_bot(helper_mode, user)
|
||||
persona_id = personas_prompt_map(include_image_caption: true).invert[helper_mode]
|
||||
raise Discourse::InvalidParameters.new(:mode) if persona_id.blank?
|
||||
|
||||
persona_klass = AiPersona.find_by(id: persona_id)&.class_instance
|
||||
return if persona_klass.nil?
|
||||
|
||||
llm_model = find_ai_helper_model(helper_mode, persona_klass)
|
||||
|
||||
DiscourseAi::Personas::Bot.as(user, persona: persona_klass.new, model: llm_model)
|
||||
end
|
||||
|
||||
# Priorities are:
|
||||
# 1. Persona's default LLM
|
||||
# 2. Hidden `ai_helper_model` setting, or `ai_helper_image_caption_model` for image_caption.
|
||||
# 3. Newest LLM config
|
||||
def find_ai_helper_model(helper_mode, persona_klass)
|
||||
model_id = persona_klass.default_llm_id
|
||||
|
||||
if !model_id
|
||||
if helper_mode == IMAGE_CAPTION
|
||||
model_id = @helper_llm || SiteSetting.ai_helper_image_caption_model&.split(":")&.last
|
||||
else
|
||||
model_id = @image_caption_llm || SiteSetting.ai_helper_model&.split(":")&.last
|
||||
end
|
||||
end
|
||||
|
||||
if model_id.present?
|
||||
LlmModel.find_by(id: model_id)
|
||||
else
|
||||
LlmModel.last
|
||||
end
|
||||
end
|
||||
|
||||
def personas_prompt_map(include_image_caption: false)
|
||||
map = {
|
||||
SiteSetting.ai_helper_translator_persona.to_i => TRANSLATE,
|
||||
SiteSetting.ai_helper_tittle_suggestions_persona.to_i => GENERATE_TITLES,
|
||||
SiteSetting.ai_helper_proofreader_persona.to_i => PROOFREAD,
|
||||
SiteSetting.ai_helper_markdown_tables_persona.to_i => MARKDOWN_TABLE,
|
||||
SiteSetting.ai_helper_custom_prompt_persona.to_i => CUSTOM_PROMPT,
|
||||
SiteSetting.ai_helper_explain_persona.to_i => EXPLAIN,
|
||||
SiteSetting.ai_helper_post_illustrator_persona.to_i => ILLUSTRATE_POST,
|
||||
SiteSetting.ai_helper_smart_dates_persona.to_i => REPLACE_DATES,
|
||||
}
|
||||
|
||||
if include_image_caption
|
||||
image_caption_persona = SiteSetting.ai_helper_image_caption_persona.to_i
|
||||
map[image_caption_persona] = IMAGE_CAPTION if image_caption_persona
|
||||
end
|
||||
|
||||
map
|
||||
end
|
||||
|
||||
def all_prompts
|
||||
personas_and_prompts = personas_prompt_map
|
||||
|
||||
AiPersona
|
||||
.where(id: personas_prompt_map.keys)
|
||||
.map do |ai_persona|
|
||||
prompt_name = personas_prompt_map[ai_persona.id]
|
||||
|
||||
if prompt_name
|
||||
{
|
||||
name: prompt_name,
|
||||
translated_name:
|
||||
I18n.t("discourse_ai.ai_helper.prompts.#{prompt_name}", default: nil) ||
|
||||
prompt_name,
|
||||
prompt_type: prompt_type(prompt_name),
|
||||
icon: icon_map(prompt_name),
|
||||
location: location_map(prompt_name),
|
||||
allowed_group_ids: ai_persona.allowed_group_ids,
|
||||
}
|
||||
end
|
||||
end
|
||||
.compact
|
||||
end
|
||||
|
||||
SANITIZE_REGEX_STR =
|
||||
%w[term context topic replyTo input output result]
|
||||
.map { |tag| "<#{tag}>\\n?|\\n?</#{tag}>" }
|
||||
|
@ -268,25 +378,21 @@ module DiscourseAi
|
|||
|
||||
def icon_map(name)
|
||||
case name
|
||||
when "translate"
|
||||
when TRANSLATE
|
||||
"language"
|
||||
when "generate_titles"
|
||||
when GENERATE_TITLES
|
||||
"heading"
|
||||
when "proofread"
|
||||
when PROOFREAD
|
||||
"spell-check"
|
||||
when "markdown_table"
|
||||
when MARKDOWN_TABLE
|
||||
"table"
|
||||
when "tone"
|
||||
"microphone"
|
||||
when "custom_prompt"
|
||||
when CUSTOM_PROMPT
|
||||
"comment"
|
||||
when "rewrite"
|
||||
"pen"
|
||||
when "explain"
|
||||
when EXPLAIN
|
||||
"question"
|
||||
when "illustrate_post"
|
||||
when ILLUSTRATE_POST
|
||||
"images"
|
||||
when "replace_dates"
|
||||
when REPLACE_DATES
|
||||
"calendar-days"
|
||||
else
|
||||
nil
|
||||
|
@ -295,33 +401,37 @@ module DiscourseAi
|
|||
|
||||
def location_map(name)
|
||||
case name
|
||||
when "translate"
|
||||
when TRANSLATE
|
||||
%w[composer post]
|
||||
when "generate_titles"
|
||||
when GENERATE_TITLES
|
||||
%w[composer]
|
||||
when "proofread"
|
||||
when PROOFREAD
|
||||
%w[composer post]
|
||||
when "markdown_table"
|
||||
when MARKDOWN_TABLE
|
||||
%w[composer]
|
||||
when "tone"
|
||||
%w[composer]
|
||||
when "custom_prompt"
|
||||
when CUSTOM_PROMPT
|
||||
%w[composer post]
|
||||
when "rewrite"
|
||||
%w[composer]
|
||||
when "explain"
|
||||
when EXPLAIN
|
||||
%w[post]
|
||||
when "summarize"
|
||||
%w[post]
|
||||
when "illustrate_post"
|
||||
when ILLUSTRATE_POST
|
||||
%w[composer]
|
||||
when "replace_dates"
|
||||
when REPLACE_DATES
|
||||
%w[composer]
|
||||
else
|
||||
%w[]
|
||||
end
|
||||
end
|
||||
|
||||
def prompt_type(prompt_name)
|
||||
if [PROOFREAD, MARKDOWN_TABLE, REPLACE_DATES, CUSTOM_PROMPT].include?(prompt_name)
|
||||
return :diff
|
||||
end
|
||||
|
||||
return :list if [ILLUSTRATE_POST, GENERATE_TITLES].include?(prompt_name)
|
||||
|
||||
:text
|
||||
end
|
||||
|
||||
def parse_diff(text, suggestion)
|
||||
cooked_text = PrettyText.cook(text)
|
||||
cooked_suggestion = PrettyText.cook(suggestion)
|
||||
|
|
|
@ -41,8 +41,6 @@ module DiscourseAi
|
|||
"The number of completions you requested exceed the number of canned responses"
|
||||
end
|
||||
|
||||
response = as_structured_output(response) if model_params[:response_format].present?
|
||||
|
||||
raise response if response.is_a?(StandardError)
|
||||
|
||||
@completions += 1
|
||||
|
@ -57,8 +55,9 @@ module DiscourseAi
|
|||
yield(response, cancel_fn)
|
||||
elsif is_thinking?(response)
|
||||
yield(response, cancel_fn)
|
||||
elsif is_structured_output?(response)
|
||||
yield(response, cancel_fn)
|
||||
elsif model_params[:response_format].present?
|
||||
structured_output = as_structured_output(response)
|
||||
yield(structured_output, cancel_fn)
|
||||
else
|
||||
response.each_char do |char|
|
||||
break if cancelled
|
||||
|
@ -69,6 +68,7 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
response = response.first if response.is_a?(Array) && response.length == 1
|
||||
response = as_structured_output(response) if model_params[:response_format].present?
|
||||
|
||||
response
|
||||
end
|
||||
|
@ -87,10 +87,6 @@ module DiscourseAi
|
|||
response.is_a?(DiscourseAi::Completions::ToolCall)
|
||||
end
|
||||
|
||||
def is_structured_output?(response)
|
||||
response.is_a?(DiscourseAi::Completions::StructuredOutput)
|
||||
end
|
||||
|
||||
def as_structured_output(response)
|
||||
schema_properties = model_params[:response_format].dig(:json_schema, :schema, :properties)
|
||||
return response if schema_properties.blank?
|
||||
|
|
|
@ -10,7 +10,7 @@ module DiscourseAi
|
|||
def valid_value?(val)
|
||||
return true if val == "f"
|
||||
|
||||
if @opts[:name] == :ai_summarization_enabled
|
||||
if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled
|
||||
has_llms = LlmModel.count > 0
|
||||
@no_llms_configured = !has_llms
|
||||
has_llms
|
||||
|
|
|
@ -18,7 +18,10 @@ module DiscourseAi
|
|||
:feature_name,
|
||||
:resource_url,
|
||||
:cancel_manager,
|
||||
:inferred_concepts
|
||||
:inferred_concepts,
|
||||
:format_dates,
|
||||
:temporal_context,
|
||||
:user_language
|
||||
|
||||
def initialize(
|
||||
post: nil,
|
||||
|
@ -37,13 +40,15 @@ module DiscourseAi
|
|||
feature_name: "bot",
|
||||
resource_url: nil,
|
||||
cancel_manager: nil,
|
||||
inferred_concepts: []
|
||||
inferred_concepts: [],
|
||||
format_dates: false
|
||||
)
|
||||
@participants = participants
|
||||
@user = user
|
||||
@skip_tool_details = skip_tool_details
|
||||
@messages = messages
|
||||
@custom_instructions = custom_instructions
|
||||
@format_dates = format_dates
|
||||
|
||||
@message_id = message_id
|
||||
@channel_id = channel_id
|
||||
|
@ -78,6 +83,8 @@ module DiscourseAi
|
|||
participants
|
||||
resource_url
|
||||
inferred_concepts
|
||||
user_language
|
||||
temporal_context
|
||||
]
|
||||
|
||||
def lookup_template_param(key)
|
||||
|
@ -125,6 +132,8 @@ module DiscourseAi
|
|||
feature_name: @feature_name,
|
||||
resource_url: @resource_url,
|
||||
inferred_concepts: @inferred_concepts,
|
||||
user_language: @user_language,
|
||||
temporal_context: @temporal_context,
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class CustomPrompt < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
You are a helpful assistant. I will give you instructions inside <input></input> XML tags.
|
||||
You will look at them and reply with a result.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class ImageCaptioner < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
"You are a bot specializing in image captioning."
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,59 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class MarkdownTableGenerator < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
You are a markdown table formatter, I will provide you text inside <input></input> XML tags and you will format it into a markdown table
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
|
||||
def temperature
|
||||
0.5
|
||||
end
|
||||
|
||||
def examples
|
||||
[
|
||||
["<input>sam,joe,jane\nage: 22| 10|11</input>", { output: <<~TEXT }.to_json],
|
||||
| | sam | joe | jane |
|
||||
|---|---|---|---|
|
||||
| age | 22 | 10 | 11 |
|
||||
TEXT
|
||||
[<<~TEXT, { output: <<~TEXT }.to_json],
|
||||
<input>
|
||||
sam: speed 100, age 22
|
||||
jane: age 10
|
||||
fred: height 22
|
||||
</input>
|
||||
TEXT
|
||||
| | speed | age | height |
|
||||
|---|---|---|---|
|
||||
| sam | 100 | 22 | - |
|
||||
| jane | - | 10 | - |
|
||||
| fred | - | - | 22 |
|
||||
TEXT
|
||||
[<<~TEXT, { output: <<~TEXT }.to_json],
|
||||
<input>
|
||||
chrome 22ms (first load 10ms)
|
||||
firefox 10ms (first load: 9ms)
|
||||
</input>
|
||||
TEXT
|
||||
| Browser | Load Time (ms) | First Load Time (ms) |
|
||||
|---|---|---|
|
||||
| Chrome | 22 | 10 |
|
||||
| Firefox | 10 | 9 |
|
||||
TEXT
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -55,6 +55,15 @@ module DiscourseAi
|
|||
ConceptFinder => -15,
|
||||
ConceptMatcher => -16,
|
||||
ConceptDeduplicator => -17,
|
||||
CustomPrompt => -18,
|
||||
SmartDates => -19,
|
||||
MarkdownTableGenerator => -20,
|
||||
PostIllustrator => -21,
|
||||
Proofreader => -22,
|
||||
TitlesGenerator => -23,
|
||||
Tutor => -24,
|
||||
Translator => -25,
|
||||
ImageCaptioner => -26,
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -260,10 +269,15 @@ module DiscourseAi
|
|||
protected
|
||||
|
||||
def replace_placeholders(content, context)
|
||||
content.gsub(/\{(\w+)\}/) do |match|
|
||||
found = context.lookup_template_param(match[1..-2])
|
||||
found.nil? ? match : found.to_s
|
||||
end
|
||||
replaced =
|
||||
content.gsub(/\{(\w+)\}/) do |match|
|
||||
found = context.lookup_template_param(match[1..-2])
|
||||
found.nil? ? match : found.to_s
|
||||
end
|
||||
|
||||
return replaced if !context.format_dates
|
||||
|
||||
::DiscourseAi::AiHelper::DateFormatter.process_date_placeholders(replaced, context.user)
|
||||
end
|
||||
|
||||
def tool_instance(tool_call, bot_user:, llm:, context:, existing_tools:)
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class PostIllustrator < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
Provide me a StableDiffusion prompt to generate an image that illustrates the following post in 40 words or less, be creative.
|
||||
You'll find the post between <input></input> XML tags.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class Proofreader < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
You are a markdown proofreader. You correct egregious typos and phrasing issues but keep the user's original voice.
|
||||
You do not touch code blocks. I will provide you with text to proofread. If nothing needs fixing, then you will echo the text back.
|
||||
You will find the text between <input></input> XML tags.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
|
||||
def examples
|
||||
[
|
||||
[
|
||||
"<input></input>",
|
||||
{ output: "" }.to_json,
|
||||
],
|
||||
[
|
||||
"<input>The rain in spain stays mainly in the plane.</input>",
|
||||
{ output: "The rain in Spain, stays mainly in the Plane." }.to_json,
|
||||
],
|
||||
[
|
||||
"<input>The rain in Spain, stays mainly in the Plane.</input>",
|
||||
{ output: "The rain in Spain, stays mainly in the Plane." }.to_json,
|
||||
],
|
||||
[<<~TEXT, { output: <<~TEXT }.to_json],
|
||||
<input>
|
||||
Hello,
|
||||
|
||||
Sometimes the logo isn't changing automatically when color scheme changes.
|
||||
|
||||

|
||||
</input>
|
||||
TEXT
|
||||
Hello,
|
||||
Sometimes the logo does not change automatically when the color scheme changes.
|
||||

|
||||
TEXT
|
||||
[<<~TEXT, { output: <<~TEXT }.to_json],
|
||||
<input>
|
||||
Any ideas what is wrong with this peace of cod?
|
||||
> This quot contains a typo
|
||||
```ruby
|
||||
# this has speling mistakes
|
||||
testin.atypo = 11
|
||||
baad = "bad"
|
||||
```
|
||||
</input>
|
||||
TEXT
|
||||
Any ideas what is wrong with this piece of code?
|
||||
> This quot contains a typo
|
||||
```ruby
|
||||
# This has spelling mistakes
|
||||
testing.a_typo = 11
|
||||
bad = "bad"
|
||||
```
|
||||
TEXT
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,63 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class SmartDates < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
You are a date and time formatter for Discourse posts. Convert natural language time references into date placeholders.
|
||||
Do not modify any markdown, code blocks, or existing date formats.
|
||||
|
||||
Here's the temporal context:
|
||||
{temporal_context}
|
||||
|
||||
Available date placeholder formats:
|
||||
- Simple day without time: {{date:1}} for tomorrow, {{date:7}} for a week from today
|
||||
- Specific time: {{datetime:2pm+1}} for 2 PM tomorrow
|
||||
- Time range: {{datetime:2pm+1:4pm+1}} for tomorrow 2 PM to 4 PM
|
||||
|
||||
You will find the text between <input></input> XML tags.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
|
||||
def examples
|
||||
[
|
||||
[
|
||||
"<input>The meeting is at 2pm tomorrow</input>",
|
||||
{ output: "The meeting is at {{datetime:2pm+1}}" }.to_json,
|
||||
],
|
||||
["<input>Due in 3 days</input>", { output: "Due {{date:3}}" }.to_json],
|
||||
[
|
||||
"<input>Meeting next Tuesday at 2pm</input>",
|
||||
{ output: "Meeting {{next_week:tuesday-2pm}}" }.to_json,
|
||||
],
|
||||
[
|
||||
"<input>Meeting from 2pm to 4pm tomorrow</input>",
|
||||
{ output: "Meeting {{datetime:2pm+1:4pm+1}}" }.to_json,
|
||||
],
|
||||
[<<~TEXT, { output: <<~TEXT }.to_json],
|
||||
<input>Meeting notes for tomorrow:
|
||||
* Action items in `config.rb`
|
||||
* Review PR #1234
|
||||
* Deadline is 5pm
|
||||
* Check [this link](https://example.com)</input>
|
||||
TEXT
|
||||
Meeting notes for {{date:1}}:
|
||||
* Action items in `config.rb`
|
||||
* Review PR #1234
|
||||
* Deadline is {{datetime:5pm+1}}
|
||||
* Check [this link](https://example.com)
|
||||
TEXT
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,39 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class TitlesGenerator < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
I want you to act as a title generator for written pieces. I will provide you with a text,
|
||||
and you will generate five titles. Please keep the title concise and under 20 words,
|
||||
and ensure that the meaning is maintained. Replies will utilize the language type of the topic.
|
||||
I want you to only reply the list of options and nothing else, do not write explanations.
|
||||
Never ever use colons in the title. Always use sentence case, using a capital letter at
|
||||
the start of the title, never start the title with a lower case letter. Proper nouns in the title
|
||||
can have a capital letter, and acronyms like LLM can use capital letters. Format some titles
|
||||
as questions, some as statements. Make sure to use question marks if the title is a question.
|
||||
You will find the text between <input></input> XML tags.
|
||||
Wrap each title between <item></item> XML tags.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
|
||||
def examples
|
||||
[
|
||||
[
|
||||
"<input>In the labyrinth of time, a solitary horse, etched in gold by the setting sun, embarked on an infinite journey.</input>",
|
||||
"<item>The solitary horse</item><item>The horse etched in gold</item><item>A horse's infinite journey</item><item>A horse lost in time</item><item>A horse's last ride</item>",
|
||||
],
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class Translator < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
I want you to act as an {user_language} translator, spelling corrector and improver. I will write to you
|
||||
in any language and you will detect the language, translate it and answer in the corrected and
|
||||
improved version of my text, in {user_language}. I want you to replace my simplified A0-level words and
|
||||
sentences with more beautiful and elegant, upper level {user_language} words and sentences.
|
||||
Keep the meaning same, but make them more literary. I want you to only reply the correction,
|
||||
the improvements and nothing else, do not write explanations.
|
||||
You will find the text between <input></input> XML tags.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
|
||||
def temperature
|
||||
0.2
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Personas
|
||||
class Tutor < Persona
|
||||
def self.default_enabled
|
||||
false
|
||||
end
|
||||
|
||||
def system_prompt
|
||||
<<~PROMPT.strip
|
||||
You are a tutor explaining a term to a student in a specific context.
|
||||
|
||||
I will provide everything you need to know inside <input> tags, which consists of the term I want you
|
||||
to explain inside <term> tags, the context of where it was used inside <context> tags, the title of
|
||||
the topic where it was used inside <topic> tags, and optionally, the previous post in the conversation
|
||||
in <replyTo> tags.
|
||||
|
||||
Using all this information, write a paragraph with a brief explanation
|
||||
of what the term means. Format the response using Markdown. Reply only with the explanation and
|
||||
nothing more.
|
||||
PROMPT
|
||||
end
|
||||
|
||||
def response_format
|
||||
[{ "key" => "output", "type" => "string" }]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -15,13 +15,12 @@ RSpec.describe Jobs::StreamComposerHelper do
|
|||
end
|
||||
|
||||
describe "validates params" do
|
||||
let(:mode) { CompletionPrompt::PROOFREAD }
|
||||
let(:prompt) { CompletionPrompt.find_by(id: mode) }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::PROOFREAD }
|
||||
|
||||
it "does nothing if there is no user" do
|
||||
messages =
|
||||
MessageBus.track_publish("/discourse-ai/ai-helper/stream_suggestion") do
|
||||
job.execute(user_id: nil, text: input, prompt: prompt.name, force_default_locale: false)
|
||||
job.execute(user_id: nil, text: input, prompt: mode, force_default_locale: false)
|
||||
end
|
||||
|
||||
expect(messages).to be_empty
|
||||
|
@ -33,7 +32,7 @@ RSpec.describe Jobs::StreamComposerHelper do
|
|||
job.execute(
|
||||
user_id: user.id,
|
||||
text: nil,
|
||||
prompt: prompt.name,
|
||||
prompt: mode,
|
||||
force_default_locale: false,
|
||||
client_id: "123",
|
||||
)
|
||||
|
@ -44,12 +43,10 @@ RSpec.describe Jobs::StreamComposerHelper do
|
|||
end
|
||||
|
||||
context "when all params are provided" do
|
||||
let(:mode) { CompletionPrompt::PROOFREAD }
|
||||
let(:prompt) { CompletionPrompt.find_by(id: mode) }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::PROOFREAD }
|
||||
|
||||
it "publishes updates with a partial result" do
|
||||
proofread_result = "I like to eat pie for breakfast because it is delicious."
|
||||
partial_result = "I"
|
||||
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([proofread_result]) do
|
||||
messages =
|
||||
|
@ -57,7 +54,7 @@ RSpec.describe Jobs::StreamComposerHelper do
|
|||
job.execute(
|
||||
user_id: user.id,
|
||||
text: input,
|
||||
prompt: prompt.name,
|
||||
prompt: mode,
|
||||
force_default_locale: true,
|
||||
client_id: "123",
|
||||
)
|
||||
|
@ -65,7 +62,7 @@ RSpec.describe Jobs::StreamComposerHelper do
|
|||
|
||||
partial_result_update = messages.first.data
|
||||
expect(partial_result_update[:done]).to eq(false)
|
||||
expect(partial_result_update[:result]).to eq(partial_result)
|
||||
expect(partial_result_update[:result]).to eq(proofread_result)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -78,7 +75,7 @@ RSpec.describe Jobs::StreamComposerHelper do
|
|||
job.execute(
|
||||
user_id: user.id,
|
||||
text: input,
|
||||
prompt: prompt.name,
|
||||
prompt: mode,
|
||||
force_default_locale: true,
|
||||
client_id: "123",
|
||||
)
|
||||
|
|
|
@ -23,8 +23,7 @@ RSpec.describe Jobs::StreamPostHelper do
|
|||
end
|
||||
|
||||
describe "validates params" do
|
||||
let(:mode) { CompletionPrompt::EXPLAIN }
|
||||
let(:prompt) { CompletionPrompt.find_by(id: mode) }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::EXPLAIN }
|
||||
|
||||
it "does nothing if there is no post" do
|
||||
messages =
|
||||
|
@ -55,24 +54,21 @@ RSpec.describe Jobs::StreamPostHelper do
|
|||
end
|
||||
|
||||
context "when the prompt is explain" do
|
||||
let(:mode) { CompletionPrompt::EXPLAIN }
|
||||
let(:prompt) { CompletionPrompt.find_by(id: mode) }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::EXPLAIN }
|
||||
|
||||
it "publishes updates with a partial result" do
|
||||
explanation =
|
||||
"In this context, \"pie\" refers to a baked dessert typically consisting of a pastry crust and filling."
|
||||
|
||||
partial_explanation = "I"
|
||||
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([explanation]) do
|
||||
messages =
|
||||
MessageBus.track_publish("/discourse-ai/ai-helper/stream_suggestion/#{post.id}") do
|
||||
job.execute(post_id: post.id, user_id: user.id, text: "pie", prompt: prompt.name)
|
||||
job.execute(post_id: post.id, user_id: user.id, text: "pie", prompt: mode)
|
||||
end
|
||||
|
||||
partial_result_update = messages.first.data
|
||||
expect(partial_result_update[:done]).to eq(false)
|
||||
expect(partial_result_update[:result]).to eq(partial_explanation)
|
||||
expect(partial_result_update[:result]).to eq(explanation)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -83,7 +79,7 @@ RSpec.describe Jobs::StreamPostHelper do
|
|||
DiscourseAi::Completions::Llm.with_prepared_responses([explanation]) do
|
||||
messages =
|
||||
MessageBus.track_publish("/discourse-ai/ai-helper/stream_suggestion/#{post.id}") do
|
||||
job.execute(post_id: post.id, user_id: user.id, text: "pie", prompt: prompt.name)
|
||||
job.execute(post_id: post.id, user_id: user.id, text: "pie", prompt: mode)
|
||||
end
|
||||
|
||||
final_update = messages.last.data
|
||||
|
@ -94,23 +90,21 @@ RSpec.describe Jobs::StreamPostHelper do
|
|||
end
|
||||
|
||||
context "when the prompt is translate" do
|
||||
let(:mode) { CompletionPrompt::TRANSLATE }
|
||||
let(:prompt) { CompletionPrompt.find_by(id: mode) }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::TRANSLATE }
|
||||
|
||||
it "publishes updates with a partial result" do
|
||||
sentence = "I like to eat pie."
|
||||
translation = "Me gusta comer pastel."
|
||||
partial_translation = "M"
|
||||
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([translation]) do
|
||||
messages =
|
||||
MessageBus.track_publish("/discourse-ai/ai-helper/stream_suggestion/#{post.id}") do
|
||||
job.execute(post_id: post.id, user_id: user.id, text: sentence, prompt: prompt.name)
|
||||
job.execute(post_id: post.id, user_id: user.id, text: sentence, prompt: mode)
|
||||
end
|
||||
|
||||
partial_result_update = messages.first.data
|
||||
expect(partial_result_update[:done]).to eq(false)
|
||||
expect(partial_result_update[:result]).to eq(partial_translation)
|
||||
expect(partial_result_update[:result]).to eq(translation)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -121,7 +115,7 @@ RSpec.describe Jobs::StreamPostHelper do
|
|||
DiscourseAi::Completions::Llm.with_prepared_responses([translation]) do
|
||||
messages =
|
||||
MessageBus.track_publish("/discourse-ai/ai-helper/stream_suggestion/#{post.id}") do
|
||||
job.execute(post_id: post.id, user_id: user.id, text: sentence, prompt: prompt.name)
|
||||
job.execute(post_id: post.id, user_id: user.id, text: sentence, prompt: mode)
|
||||
end
|
||||
|
||||
final_update = messages.last.data
|
||||
|
|
|
@ -3,9 +3,11 @@
|
|||
RSpec.describe DiscourseAi::AiHelper::Assistant do
|
||||
fab!(:user)
|
||||
fab!(:empty_locale_user) { Fabricate(:user, locale: "") }
|
||||
let(:prompt) { CompletionPrompt.find_by(id: mode) }
|
||||
|
||||
before { assign_fake_provider_to(:ai_helper_model) }
|
||||
before do
|
||||
assign_fake_provider_to(:ai_helper_model)
|
||||
Group.refresh_automatic_groups!
|
||||
end
|
||||
|
||||
let(:english_text) { <<~STRING }
|
||||
To perfect his horror, Caesar, surrounded at the base of the statue by the impatient daggers of his friends,
|
||||
|
@ -48,15 +50,12 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
it "returns all available prompts" do
|
||||
prompts = subject.available_prompts(user)
|
||||
|
||||
expect(prompts.length).to eq(8)
|
||||
expect(prompts.map { |p| p[:name] }).to contain_exactly(
|
||||
"translate",
|
||||
"generate_titles",
|
||||
"proofread",
|
||||
"markdown_table",
|
||||
"custom_prompt",
|
||||
"explain",
|
||||
"detect_text_locale",
|
||||
"replace_dates",
|
||||
)
|
||||
end
|
||||
|
@ -64,13 +63,12 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
it "returns all prompts to be shown in the composer" do
|
||||
prompts = subject.available_prompts(user)
|
||||
filtered_prompts = prompts.select { |prompt| prompt[:location].include?("composer") }
|
||||
expect(filtered_prompts.length).to eq(6)
|
||||
|
||||
expect(filtered_prompts.map { |p| p[:name] }).to contain_exactly(
|
||||
"translate",
|
||||
"generate_titles",
|
||||
"proofread",
|
||||
"markdown_table",
|
||||
"custom_prompt",
|
||||
"replace_dates",
|
||||
)
|
||||
end
|
||||
|
@ -78,12 +76,11 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
it "returns all prompts to be shown in the post menu" do
|
||||
prompts = subject.available_prompts(user)
|
||||
filtered_prompts = prompts.select { |prompt| prompt[:location].include?("post") }
|
||||
expect(filtered_prompts.length).to eq(4)
|
||||
|
||||
expect(filtered_prompts.map { |p| p[:name] }).to contain_exactly(
|
||||
"translate",
|
||||
"explain",
|
||||
"proofread",
|
||||
"custom_prompt",
|
||||
)
|
||||
end
|
||||
|
||||
|
@ -101,90 +98,69 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
it "returns the illustrate_post prompt in the list of all prompts" do
|
||||
prompts = subject.available_prompts(user)
|
||||
|
||||
expect(prompts.length).to eq(9)
|
||||
expect(prompts.map { |p| p[:name] }).to contain_exactly(
|
||||
"translate",
|
||||
"generate_titles",
|
||||
"proofread",
|
||||
"markdown_table",
|
||||
"custom_prompt",
|
||||
"explain",
|
||||
"illustrate_post",
|
||||
"detect_text_locale",
|
||||
"replace_dates",
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe("#localize_prompt!") do
|
||||
describe("#attach_user_context") do
|
||||
before { SiteSetting.allow_user_locale = true }
|
||||
|
||||
let(:context) { DiscourseAi::Personas::BotContext.new(user: user) }
|
||||
|
||||
it "is able to perform %LANGUAGE% replacements" do
|
||||
prompt =
|
||||
CompletionPrompt.new(messages: { insts: "This is a %LANGUAGE% test" }).messages_with_input(
|
||||
"test",
|
||||
)
|
||||
subject.attach_user_context(context, user)
|
||||
|
||||
subject.localize_prompt!(prompt, user)
|
||||
|
||||
expect(prompt.messages[0][:content].strip).to eq("This is a English (US) test")
|
||||
expect(context.user_language).to eq("English (US)")
|
||||
end
|
||||
|
||||
it "handles users with empty string locales" do
|
||||
prompt =
|
||||
CompletionPrompt.new(messages: { insts: "This is a %LANGUAGE% test" }).messages_with_input(
|
||||
"test",
|
||||
)
|
||||
subject.attach_user_context(context, empty_locale_user)
|
||||
|
||||
subject.localize_prompt!(prompt, empty_locale_user)
|
||||
|
||||
expect(prompt.messages[0][:content].strip).to eq("This is a English (US) test")
|
||||
expect(context.user_language).to eq("English (US)")
|
||||
end
|
||||
|
||||
context "with temporal context" do
|
||||
let(:prompt) do
|
||||
CompletionPrompt.new(
|
||||
messages: {
|
||||
insts: "Current context: {{temporal_context}}",
|
||||
},
|
||||
).messages_with_input("test")
|
||||
end
|
||||
|
||||
it "replaces temporal context with timezone information" do
|
||||
timezone = "America/New_York"
|
||||
user.user_option.update!(timezone: timezone)
|
||||
freeze_time "2024-01-01 12:00:00"
|
||||
|
||||
subject.localize_prompt!(prompt, user)
|
||||
subject.attach_user_context(context, user)
|
||||
|
||||
content = prompt.messages[0][:content]
|
||||
|
||||
expect(content).to include(%("timezone":"America/New_York"))
|
||||
expect(context.temporal_context).to include(%("timezone":"America/New_York"))
|
||||
end
|
||||
|
||||
it "uses UTC as default timezone when user timezone is not set" do
|
||||
user.user_option.update!(timezone: nil)
|
||||
|
||||
freeze_time "2024-01-01 12:00:00" do
|
||||
subject.localize_prompt!(prompt, user)
|
||||
subject.attach_user_context(context, user)
|
||||
|
||||
parsed_context = JSON.parse(prompt.messages[0][:content].match(/context: (.+)$/)[1])
|
||||
expect(parsed_context["user"]["timezone"]).to eq("UTC")
|
||||
parsed_context = JSON.parse(context.temporal_context)
|
||||
expect(parsed_context.dig("user", "timezone")).to eq("UTC")
|
||||
end
|
||||
end
|
||||
|
||||
it "does not replace temporal context when user is nil" do
|
||||
prompt_content = prompt.messages[0][:content].dup
|
||||
subject.localize_prompt!(prompt, nil)
|
||||
expect(prompt.messages[0][:content]).to eq(prompt_content)
|
||||
subject.attach_user_context(context, nil)
|
||||
|
||||
expect(context.temporal_context).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#generate_and_send_prompt" do
|
||||
context "when using a prompt that returns text" do
|
||||
let(:mode) { CompletionPrompt::TRANSLATE }
|
||||
let(:mode) { described_class::TRANSLATE }
|
||||
|
||||
let(:text_to_translate) { <<~STRING }
|
||||
Para que su horror sea perfecto, César, acosado al pie de la estatua por lo impacientes puñales de sus amigos,
|
||||
|
@ -195,7 +171,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
it "Sends the prompt to the LLM and returns the response" do
|
||||
response =
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([english_text]) do
|
||||
subject.generate_and_send_prompt(prompt, text_to_translate, user)
|
||||
subject.generate_and_send_prompt(mode, text_to_translate, user)
|
||||
end
|
||||
|
||||
expect(response[:suggestions]).to contain_exactly(english_text)
|
||||
|
@ -203,7 +179,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
end
|
||||
|
||||
context "when using a prompt that returns a list" do
|
||||
let(:mode) { CompletionPrompt::GENERATE_TITLES }
|
||||
let(:mode) { described_class::GENERATE_TITLES }
|
||||
|
||||
let(:titles) do
|
||||
"<item>The solitary horse</item><item>The horse etched in gold</item><item>A horse's infinite journey</item><item>A horse lost in time</item><item>A horse's last ride</item>"
|
||||
|
@ -220,7 +196,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
|
|||
|
||||
response =
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([titles]) do
|
||||
subject.generate_and_send_prompt(prompt, english_text, user)
|
||||
subject.generate_and_send_prompt(mode, english_text, user)
|
||||
end
|
||||
|
||||
expect(response[:suggestions]).to contain_exactly(*expected)
|
||||
|
|
|
@ -1,80 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe CompletionPrompt do
|
||||
describe "validations" do
|
||||
context "when there are too many messages" do
|
||||
it "doesn't accept more than 20 messages" do
|
||||
prompt = described_class.new(messages: [{ role: "system", content: "a" }] * 21)
|
||||
|
||||
expect(prompt.valid?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the message is over the max length" do
|
||||
it "doesn't accept messages when the length is more than 1000 characters" do
|
||||
prompt = described_class.new(messages: [{ role: "system", content: "a" * 1001 }])
|
||||
|
||||
expect(prompt.valid?).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "messages_with_input" do
|
||||
let(:user_input) { "A user wrote this." }
|
||||
|
||||
context "when mapping to a prompt" do
|
||||
it "correctly maps everything to the prompt" do
|
||||
cp =
|
||||
CompletionPrompt.new(
|
||||
messages: {
|
||||
insts: "Instructions",
|
||||
post_insts: "Post Instructions",
|
||||
examples: [["Request 1", "Response 1"]],
|
||||
},
|
||||
)
|
||||
|
||||
prompt = cp.messages_with_input("hello")
|
||||
|
||||
expected = [
|
||||
{ type: :system, content: "Instructions\nPost Instructions" },
|
||||
{ type: :user, content: "Request 1" },
|
||||
{ type: :model, content: "Response 1" },
|
||||
{ type: :user, content: "<input>hello</input>" },
|
||||
]
|
||||
|
||||
expect(prompt.messages).to eq(expected)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the record has the custom_prompt type" do
|
||||
let(:custom_prompt) { described_class.find(described_class::CUSTOM_PROMPT) }
|
||||
|
||||
it "wraps the user input with <input> XML tags and adds a custom instruction if given" do
|
||||
expected = <<~TEXT.strip
|
||||
<input>Translate to Turkish:
|
||||
#{user_input}</input>
|
||||
TEXT
|
||||
|
||||
custom_prompt.custom_instruction = "Translate to Turkish"
|
||||
|
||||
prompt = custom_prompt.messages_with_input(user_input)
|
||||
|
||||
expect(prompt.messages.last[:content]).to eq(expected)
|
||||
end
|
||||
end
|
||||
|
||||
context "when the records don't have the custom_prompt type" do
|
||||
let(:title_prompt) { described_class.find(described_class::GENERATE_TITLES) }
|
||||
|
||||
it "wraps user input with <input> XML tags" do
|
||||
expected = "<input>#{user_input}</input>"
|
||||
|
||||
title_prompt.custom_instruction = "Translate to Turkish"
|
||||
|
||||
prompt = title_prompt.messages_with_input(user_input)
|
||||
|
||||
expect(prompt.messages.last[:content]).to eq(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -11,6 +11,7 @@ describe Plugin::Instance do
|
|||
SiteSetting.ai_helper_enabled = true
|
||||
SiteSetting.ai_helper_illustrate_post_model = "disabled"
|
||||
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
|
||||
Group.refresh_automatic_groups!
|
||||
|
||||
DiscourseAi::AiHelper::Assistant.clear_prompt_cache!
|
||||
end
|
||||
|
@ -19,7 +20,15 @@ describe Plugin::Instance do
|
|||
|
||||
it "returns the available prompts" do
|
||||
expect(serializer.ai_helper_prompts).to be_present
|
||||
expect(serializer.ai_helper_prompts.object.count).to eq(8)
|
||||
|
||||
expect(serializer.ai_helper_prompts.object.map { |p| p[:name] }).to contain_exactly(
|
||||
"translate",
|
||||
"generate_titles",
|
||||
"proofread",
|
||||
"markdown_table",
|
||||
"explain",
|
||||
"replace_dates",
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -22,7 +22,7 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do
|
|||
text: "hello wrld",
|
||||
location: "composer",
|
||||
client_id: "1234",
|
||||
mode: CompletionPrompt::PROOFREAD,
|
||||
mode: DiscourseAi::AiHelper::Assistant::PROOFREAD,
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
@ -41,7 +41,7 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do
|
|||
describe "#suggest" do
|
||||
let(:text_to_proofread) { "The rain in spain stays mainly in the plane." }
|
||||
let(:proofread_text) { "The rain in Spain, stays mainly in the Plane." }
|
||||
let(:mode) { CompletionPrompt::PROOFREAD }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::PROOFREAD }
|
||||
|
||||
context "when not logged in" do
|
||||
it "returns a 403 response" do
|
||||
|
@ -121,7 +121,7 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do
|
|||
DiscourseAi::Completions::Llm.with_prepared_responses([translated_text]) do
|
||||
post "/discourse-ai/ai-helper/suggest",
|
||||
params: {
|
||||
mode: CompletionPrompt::CUSTOM_PROMPT,
|
||||
mode: DiscourseAi::AiHelper::Assistant::CUSTOM_PROMPT,
|
||||
text: "A user wrote this",
|
||||
custom_prompt: "Translate to Spanish",
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do
|
|||
expect {
|
||||
post "/discourse-ai/ai-helper/suggest",
|
||||
params: {
|
||||
mode: CompletionPrompt::ILLUSTRATE_POST,
|
||||
mode: DiscourseAi::AiHelper::Assistant::ILLUSTRATE_POST,
|
||||
text: text_to_proofread,
|
||||
force_default_locale: true,
|
||||
}
|
||||
|
@ -153,8 +153,14 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do
|
|||
amount = rate_limit[:amount]
|
||||
|
||||
amount.times do
|
||||
post "/discourse-ai/ai-helper/suggest", params: { mode: mode, text: text_to_proofread }
|
||||
expect(response.status).to eq(200)
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([proofread_text]) do
|
||||
post "/discourse-ai/ai-helper/suggest",
|
||||
params: {
|
||||
mode: mode,
|
||||
text: text_to_proofread,
|
||||
}
|
||||
expect(response.status).to eq(200)
|
||||
end
|
||||
end
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([proofread_text]) do
|
||||
post "/discourse-ai/ai-helper/suggest", params: { mode: mode, text: text_to_proofread }
|
||||
|
|
|
@ -62,7 +62,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when using custom prompt" do
|
||||
let(:mode) { CompletionPrompt::CUSTOM_PROMPT }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::CUSTOM_PROMPT }
|
||||
|
||||
let(:custom_prompt_input) { "Translate to French" }
|
||||
let(:custom_prompt_response) { "La pluie en Espagne reste principalement dans l'avion." }
|
||||
|
@ -94,7 +94,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when not a member of custom prompt group" do
|
||||
let(:mode) { CompletionPrompt::CUSTOM_PROMPT }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::CUSTOM_PROMPT }
|
||||
before { SiteSetting.ai_helper_custom_prompts_allowed_groups = non_member_group.id.to_s }
|
||||
|
||||
it "does not show custom prompt option" do
|
||||
|
@ -104,7 +104,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when using translation mode" do
|
||||
let(:mode) { CompletionPrompt::TRANSLATE }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::TRANSLATE }
|
||||
|
||||
let(:spanish_input) { "La lluvia en España se queda principalmente en el avión." }
|
||||
|
||||
|
@ -163,7 +163,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when using the proofreading mode" do
|
||||
let(:mode) { CompletionPrompt::PROOFREAD }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::PROOFREAD }
|
||||
|
||||
let(:proofread_text) { "The rain in Spain, stays mainly in the Plane." }
|
||||
|
||||
|
@ -182,7 +182,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when suggesting titles with AI title suggester" do
|
||||
let(:mode) { CompletionPrompt::GENERATE_TITLES }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::GENERATE_TITLES }
|
||||
|
||||
let(:titles) do
|
||||
"<item>Rainy Spain</item><item>Plane-Bound Delights</item><item>Mysterious Spain</item><item>Plane-Rain Chronicles</item><item>Unveiling Spain</item>"
|
||||
|
@ -330,7 +330,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when AI helper is disabled" do
|
||||
let(:mode) { CompletionPrompt::GENERATE_TITLES }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::GENERATE_TITLES }
|
||||
before { SiteSetting.ai_helper_enabled = false }
|
||||
|
||||
it "does not show the AI helper button in the composer toolbar" do
|
||||
|
@ -349,7 +349,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when user is not a member of AI helper allowed group" do
|
||||
let(:mode) { CompletionPrompt::GENERATE_TITLES }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::GENERATE_TITLES }
|
||||
before { SiteSetting.composer_ai_helper_allowed_groups = non_member_group.id.to_s }
|
||||
|
||||
it "does not show the AI helper button in the composer toolbar" do
|
||||
|
@ -368,7 +368,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when suggestion features are disabled" do
|
||||
let(:mode) { CompletionPrompt::GENERATE_TITLES }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::GENERATE_TITLES }
|
||||
before { SiteSetting.ai_helper_enabled_features = "context_menu" }
|
||||
|
||||
it "does not show suggestion buttons in the composer" do
|
||||
|
@ -398,7 +398,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
|
|||
composer.click_toolbar_button("ai-helper-trigger")
|
||||
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses([input]) do
|
||||
ai_helper_menu.select_helper_model(CompletionPrompt::TRANSLATE)
|
||||
ai_helper_menu.select_helper_model(DiscourseAi::AiHelper::Assistant::TRANSLATE)
|
||||
expect(ai_helper_menu).to have_no_context_menu
|
||||
expect(diff_modal).to be_visible
|
||||
end
|
||||
|
|
|
@ -80,7 +80,7 @@ RSpec.describe "AI Post helper", type: :system, js: true do
|
|||
end
|
||||
|
||||
context "when using proofread mode" do
|
||||
let(:mode) { CompletionPrompt::PROOFREAD }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::PROOFREAD }
|
||||
let(:proofread_response) do
|
||||
"The Toyota Supra delivers 382 horsepower making it a very fast car."
|
||||
end
|
||||
|
|
|
@ -54,7 +54,7 @@ RSpec.describe "AI Post helper", type: :system, js: true do
|
|||
|
||||
describe "moving posts to a new topic" do
|
||||
context "when suggesting titles with AI title suggester" do
|
||||
let(:mode) { CompletionPrompt::GENERATE_TITLES }
|
||||
let(:mode) { DiscourseAi::AiHelper::Assistant::GENERATE_TITLES }
|
||||
let(:titles) do
|
||||
"<item>Pie: A delicious dessert</item><item>Cake is the best!</item><item>Croissants are delightful</item><item>Some great desserts</item><item>What is the best dessert?</item>"
|
||||
end
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
export default [
|
||||
{
|
||||
id: -301,
|
||||
name: "translate",
|
||||
translated_name: "Translate to English (US)",
|
||||
prompt_type: "text",
|
||||
|
@ -8,7 +7,6 @@ export default [
|
|||
location: ["composer", "post"],
|
||||
},
|
||||
{
|
||||
id: -303,
|
||||
name: "proofread",
|
||||
translated_name: "Proofread text",
|
||||
prompt_type: "diff",
|
||||
|
@ -16,7 +14,6 @@ export default [
|
|||
location: ["composer", "post"],
|
||||
},
|
||||
{
|
||||
id: -304,
|
||||
name: "markdown_table",
|
||||
translated_name: "Generate Markdown table",
|
||||
prompt_type: "diff",
|
||||
|
@ -24,7 +21,6 @@ export default [
|
|||
location: ["composer"],
|
||||
},
|
||||
{
|
||||
id: -305,
|
||||
name: "custom_prompt",
|
||||
translated_name: "Custom Prompt",
|
||||
prompt_type: "diff",
|
||||
|
@ -32,7 +28,6 @@ export default [
|
|||
location: ["composer", "post"],
|
||||
},
|
||||
{
|
||||
id: -306,
|
||||
name: "explain",
|
||||
translated_name: "Explain",
|
||||
prompt_type: "text",
|
||||
|
@ -40,7 +35,6 @@ export default [
|
|||
location: ["post"],
|
||||
},
|
||||
{
|
||||
id: -307,
|
||||
name: "generate_titles",
|
||||
translated_name: "Suggest topic titles",
|
||||
prompt_type: "list",
|
||||
|
@ -48,19 +42,10 @@ export default [
|
|||
location: ["composer"],
|
||||
},
|
||||
{
|
||||
id: -308,
|
||||
name: "illustrate_post",
|
||||
translated_name: "Illustrate Post",
|
||||
prompt_type: "list",
|
||||
icon: "images",
|
||||
location: ["composer"],
|
||||
},
|
||||
{
|
||||
id: -309,
|
||||
name: "detect_text_locale",
|
||||
translated_name: "detect_text_locale",
|
||||
prompt_type: "text",
|
||||
icon: null,
|
||||
location: [],
|
||||
},
|
||||
];
|
||||
|
|
Loading…
Reference in New Issue