diff --git a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
index 38cf1b50..8676a6af 100644
--- a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
+++ b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
@@ -202,13 +202,15 @@ export default class AiLlmEditorForm extends Component {
if (isNew) {
this.args.llms.addObject(this.args.model);
- this.router.transitionTo("adminPlugins.show.discourse-ai-llms.index");
- } else {
- this.toasts.success({
- data: { message: i18n("discourse_ai.llms.saved") },
- duration: 2000,
- });
+ await this.router.replaceWith(
+ "adminPlugins.show.discourse-ai-llms.edit",
+ this.args.model.id
+ );
}
+ this.toasts.success({
+ data: { message: i18n("discourse_ai.llms.saved") },
+ duration: 2000,
+ });
} catch (e) {
popupAjaxError(e);
} finally {
@@ -340,7 +342,7 @@ export default class AiLlmEditorForm extends Component {
@format="large"
as |field|
>
-
+
diff --git a/assets/javascripts/discourse/components/ai-persona-editor.gjs b/assets/javascripts/discourse/components/ai-persona-editor.gjs
index 6dcbf8bf..80970b5b 100644
--- a/assets/javascripts/discourse/components/ai-persona-editor.gjs
+++ b/assets/javascripts/discourse/components/ai-persona-editor.gjs
@@ -122,16 +122,15 @@ export default class PersonaEditor extends Component {
if (isNew && this.args.model.rag_uploads.length === 0) {
this.args.personas.addObject(personaToSave);
- this.router.transitionTo(
+ await this.router.replaceWith(
"adminPlugins.show.discourse-ai-personas.edit",
personaToSave
);
- } else {
- this.toasts.success({
- data: { message: i18n("discourse_ai.ai_persona.saved") },
- duration: 2000,
- });
}
+ this.toasts.success({
+ data: { message: i18n("discourse_ai.ai_persona.saved") },
+ duration: 2000,
+ });
} catch (e) {
popupAjaxError(e);
} finally {
diff --git a/assets/javascripts/discourse/components/ai-tool-editor-form.gjs b/assets/javascripts/discourse/components/ai-tool-editor-form.gjs
index 60c7b8ef..cccd65f5 100644
--- a/assets/javascripts/discourse/components/ai-tool-editor-form.gjs
+++ b/assets/javascripts/discourse/components/ai-tool-editor-form.gjs
@@ -91,7 +91,7 @@ export default class AiToolEditorForm extends Component {
this.args.tools.pushObject(this.args.model);
}
- this.router.transitionTo(
+ this.router.replaceWith(
"adminPlugins.show.discourse-ai-tools.edit",
this.args.model
);
diff --git a/spec/system/llms/ai_llm_spec.rb b/spec/system/llms/ai_llm_spec.rb
index ad4ccda3..a8945dad 100644
--- a/spec/system/llms/ai_llm_spec.rb
+++ b/spec/system/llms/ai_llm_spec.rb
@@ -38,6 +38,8 @@ RSpec.describe "Managing LLM configurations", type: :system, js: true do
end
it "manually configures an LLM" do
+ llm_count = LlmModel.count
+
visit "/admin/plugins/discourse-ai/ai-llms"
expect(page_header).to be_visible
@@ -58,19 +60,32 @@ RSpec.describe "Managing LLM configurations", type: :system, js: true do
form.field("enabled_chat_bot").toggle
form.submit
- expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms")
-
+ expect(page).to have_current_path(%r{/admin/plugins/discourse-ai/ai-llms/\d+/edit})
llm = LlmModel.order(:id).last
+ expect(llm.max_output_tokens.to_i).to eq(2000)
+ expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms/#{llm.id}/edit")
+
+ form.field("max_output_tokens").fill_in(2001)
+ form.submit
+
+ # should go to llm list and see the llms correctly configured
+ page.go_back
+
+ expect(page).to have_selector(".ai-llms-list-editor__configured .ai-llm-list__row", count: 1)
+
+ llm.reload
expect(llm.display_name).to eq("Self-hosted LLM")
expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf")
expect(llm.url).to eq("srv://self-hostest.test")
expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer")
expect(llm.max_prompt_tokens.to_i).to eq(8000)
expect(llm.provider).to eq("vllm")
- expect(llm.max_output_tokens.to_i).to eq(2000)
+ expect(llm.max_output_tokens.to_i).to eq(2001)
expect(llm.vision_enabled).to eq(true)
expect(llm.user_id).not_to be_nil
+
+ expect(LlmModel.count).to eq(llm_count + 1)
end
context "when changing the provider" do