FIX: improve transition logic in forms

previously back button would take you back to the /new route
This commit is contained in:
Sam Saffron 2025-06-12 12:32:23 +10:00
parent 8c8fd969ef
commit 2f845d1efe
No known key found for this signature in database
GPG Key ID: B9606168D2FFD9F5
4 changed files with 33 additions and 17 deletions

View File

@ -202,13 +202,15 @@ export default class AiLlmEditorForm extends Component {
if (isNew) { if (isNew) {
this.args.llms.addObject(this.args.model); this.args.llms.addObject(this.args.model);
this.router.transitionTo("adminPlugins.show.discourse-ai-llms.index"); await this.router.replaceWith(
} else { "adminPlugins.show.discourse-ai-llms.edit",
this.args.model.id
);
}
this.toasts.success({ this.toasts.success({
data: { message: i18n("discourse_ai.llms.saved") }, data: { message: i18n("discourse_ai.llms.saved") },
duration: 2000, duration: 2000,
}); });
}
} catch (e) { } catch (e) {
popupAjaxError(e); popupAjaxError(e);
} finally { } finally {
@ -340,7 +342,7 @@ export default class AiLlmEditorForm extends Component {
@format="large" @format="large"
as |field| as |field|
> >
<field.Password /> <field.Password autocomplete="off" data-1p-ignore />
</form.Field> </form.Field>
<form.Object @name="provider_params" as |object providerParamsData|> <form.Object @name="provider_params" as |object providerParamsData|>

View File

@ -122,16 +122,15 @@ export default class PersonaEditor extends Component {
if (isNew && this.args.model.rag_uploads.length === 0) { if (isNew && this.args.model.rag_uploads.length === 0) {
this.args.personas.addObject(personaToSave); this.args.personas.addObject(personaToSave);
this.router.transitionTo( await this.router.replaceWith(
"adminPlugins.show.discourse-ai-personas.edit", "adminPlugins.show.discourse-ai-personas.edit",
personaToSave personaToSave
); );
} else { }
this.toasts.success({ this.toasts.success({
data: { message: i18n("discourse_ai.ai_persona.saved") }, data: { message: i18n("discourse_ai.ai_persona.saved") },
duration: 2000, duration: 2000,
}); });
}
} catch (e) { } catch (e) {
popupAjaxError(e); popupAjaxError(e);
} finally { } finally {

View File

@ -91,7 +91,7 @@ export default class AiToolEditorForm extends Component {
this.args.tools.pushObject(this.args.model); this.args.tools.pushObject(this.args.model);
} }
this.router.transitionTo( this.router.replaceWith(
"adminPlugins.show.discourse-ai-tools.edit", "adminPlugins.show.discourse-ai-tools.edit",
this.args.model this.args.model
); );

View File

@ -38,6 +38,8 @@ RSpec.describe "Managing LLM configurations", type: :system, js: true do
end end
it "manually configures an LLM" do it "manually configures an LLM" do
llm_count = LlmModel.count
visit "/admin/plugins/discourse-ai/ai-llms" visit "/admin/plugins/discourse-ai/ai-llms"
expect(page_header).to be_visible expect(page_header).to be_visible
@ -58,19 +60,32 @@ RSpec.describe "Managing LLM configurations", type: :system, js: true do
form.field("enabled_chat_bot").toggle form.field("enabled_chat_bot").toggle
form.submit form.submit
expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms") expect(page).to have_current_path(%r{/admin/plugins/discourse-ai/ai-llms/\d+/edit})
llm = LlmModel.order(:id).last llm = LlmModel.order(:id).last
expect(llm.max_output_tokens.to_i).to eq(2000)
expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms/#{llm.id}/edit")
form.field("max_output_tokens").fill_in(2001)
form.submit
# should go to llm list and see the llms correctly configured
page.go_back
expect(page).to have_selector(".ai-llms-list-editor__configured .ai-llm-list__row", count: 1)
llm.reload
expect(llm.display_name).to eq("Self-hosted LLM") expect(llm.display_name).to eq("Self-hosted LLM")
expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf") expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf")
expect(llm.url).to eq("srv://self-hostest.test") expect(llm.url).to eq("srv://self-hostest.test")
expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer") expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer")
expect(llm.max_prompt_tokens.to_i).to eq(8000) expect(llm.max_prompt_tokens.to_i).to eq(8000)
expect(llm.provider).to eq("vllm") expect(llm.provider).to eq("vllm")
expect(llm.max_output_tokens.to_i).to eq(2000) expect(llm.max_output_tokens.to_i).to eq(2001)
expect(llm.vision_enabled).to eq(true) expect(llm.vision_enabled).to eq(true)
expect(llm.user_id).not_to be_nil expect(llm.user_id).not_to be_nil
expect(LlmModel.count).to eq(llm_count + 1)
end end
context "when changing the provider" do context "when changing the provider" do