DEV: Log AI related configuration to staff action log (#1416)
is update adds logging for changes made in the AI admin panel. When making configuration changes to Embeddings, LLMs, Personas, Tools, or Spam that aren't site setting related, changes will now be logged in Admin > Logs & Screening. This will help admins debug issues related to AI. In this update a helper lib is created called `AiStaffActionLogger` which can be easily used in the future to add logging support for any other admin config we need logged for AI.
This commit is contained in:
parent
fc83bed7cd
commit
9be1049de6
|
|
@ -40,6 +40,7 @@ module DiscourseAi
|
|||
embedding_def = EmbeddingDefinition.new(ai_embeddings_params)
|
||||
|
||||
if embedding_def.save
|
||||
log_ai_embedding_creation(embedding_def)
|
||||
render json: AiEmbeddingDefinitionSerializer.new(embedding_def), status: :created
|
||||
else
|
||||
render_json_error embedding_def
|
||||
|
|
@ -55,7 +56,10 @@ module DiscourseAi
|
|||
)
|
||||
end
|
||||
|
||||
initial_attributes = embedding_def.attributes.dup
|
||||
|
||||
if embedding_def.update(ai_embeddings_params.except(:dimensions))
|
||||
log_ai_embedding_update(embedding_def, initial_attributes)
|
||||
render json: AiEmbeddingDefinitionSerializer.new(embedding_def)
|
||||
else
|
||||
render_json_error embedding_def
|
||||
|
|
@ -75,7 +79,16 @@ module DiscourseAi
|
|||
return render_json_error(I18n.t("discourse_ai.embeddings.delete_failed"), status: 409)
|
||||
end
|
||||
|
||||
embedding_details = {
|
||||
embedding_id: embedding_def.id,
|
||||
display_name: embedding_def.display_name,
|
||||
provider: embedding_def.provider,
|
||||
dimensions: embedding_def.dimensions,
|
||||
subject: embedding_def.display_name,
|
||||
}
|
||||
|
||||
if embedding_def.destroy
|
||||
log_ai_embedding_deletion(embedding_details)
|
||||
head :no_content
|
||||
else
|
||||
render_json_error embedding_def
|
||||
|
|
@ -128,6 +141,60 @@ module DiscourseAi
|
|||
|
||||
permitted
|
||||
end
|
||||
|
||||
def ai_embeddings_logger_fields
|
||||
{
|
||||
display_name: {
|
||||
},
|
||||
provider: {
|
||||
},
|
||||
dimensions: {
|
||||
},
|
||||
url: {
|
||||
},
|
||||
tokenizer_class: {
|
||||
},
|
||||
max_sequence_length: {
|
||||
},
|
||||
embed_prompt: {
|
||||
type: :large_text,
|
||||
},
|
||||
search_prompt: {
|
||||
type: :large_text,
|
||||
},
|
||||
matryoshka_dimensions: {
|
||||
},
|
||||
api_key: {
|
||||
type: :sensitive,
|
||||
},
|
||||
# JSON fields should be tracked as simple changes
|
||||
json_fields: [:provider_params],
|
||||
}
|
||||
end
|
||||
|
||||
def log_ai_embedding_creation(embedding_def)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { embedding_id: embedding_def.id, subject: embedding_def.display_name }
|
||||
logger.log_creation("embedding", embedding_def, ai_embeddings_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_ai_embedding_update(embedding_def, initial_attributes)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { embedding_id: embedding_def.id, subject: embedding_def.display_name }
|
||||
|
||||
logger.log_update(
|
||||
"embedding",
|
||||
embedding_def,
|
||||
initial_attributes,
|
||||
ai_embeddings_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_ai_embedding_deletion(embedding_details)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
logger.log_deletion("embedding", embedding_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ module DiscourseAi
|
|||
|
||||
if llm_model.save
|
||||
llm_model.toggle_companion_user
|
||||
log_llm_model_creation(llm_model)
|
||||
render json: LlmModelSerializer.new(llm_model), status: :created
|
||||
else
|
||||
render_json_error llm_model
|
||||
|
|
@ -56,6 +57,10 @@ module DiscourseAi
|
|||
def update
|
||||
llm_model = LlmModel.find(params[:id])
|
||||
|
||||
# Capture initial state for logging
|
||||
initial_attributes = llm_model.attributes.dup
|
||||
initial_quotas = llm_model.llm_quotas.map(&:attributes)
|
||||
|
||||
if params[:ai_llm].key?(:llm_quotas)
|
||||
if quota_params
|
||||
existing_quota_group_ids = llm_model.llm_quotas.pluck(:group_id)
|
||||
|
|
@ -81,6 +86,7 @@ module DiscourseAi
|
|||
|
||||
if llm_model.update(ai_llm_params(updating: llm_model))
|
||||
llm_model.toggle_companion_user
|
||||
log_llm_model_update(llm_model, initial_attributes, initial_quotas)
|
||||
render json: LlmModelSerializer.new(llm_model)
|
||||
else
|
||||
render_json_error llm_model
|
||||
|
|
@ -109,11 +115,20 @@ module DiscourseAi
|
|||
)
|
||||
end
|
||||
|
||||
# Capture model details for logging before destruction
|
||||
model_details = {
|
||||
model_id: llm_model.id,
|
||||
display_name: llm_model.display_name,
|
||||
name: llm_model.name,
|
||||
provider: llm_model.provider,
|
||||
}
|
||||
|
||||
# Clean up companion users
|
||||
llm_model.enabled_chat_bot = false
|
||||
llm_model.toggle_companion_user
|
||||
|
||||
if llm_model.destroy
|
||||
log_llm_model_deletion(model_details)
|
||||
head :no_content
|
||||
else
|
||||
render_json_error llm_model
|
||||
|
|
@ -190,6 +205,89 @@ module DiscourseAi
|
|||
|
||||
permitted
|
||||
end
|
||||
|
||||
def ai_llm_logger_fields
|
||||
{
|
||||
display_name: {
|
||||
},
|
||||
name: {
|
||||
},
|
||||
provider: {
|
||||
},
|
||||
tokenizer: {
|
||||
},
|
||||
url: {
|
||||
},
|
||||
max_prompt_tokens: {
|
||||
},
|
||||
max_output_tokens: {
|
||||
},
|
||||
enabled_chat_bot: {
|
||||
},
|
||||
vision_enabled: {
|
||||
},
|
||||
api_key: {
|
||||
type: :sensitive,
|
||||
},
|
||||
input_cost: {
|
||||
},
|
||||
output_cost: {
|
||||
},
|
||||
# JSON fields should be tracked as simple changes
|
||||
json_fields: [:provider_params],
|
||||
}
|
||||
end
|
||||
|
||||
def log_llm_model_creation(llm_model)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { model_id: llm_model.id, subject: llm_model.display_name }
|
||||
|
||||
# Add quota information as a special case
|
||||
if llm_model.llm_quotas.any?
|
||||
entity_details[:quotas] = llm_model
|
||||
.llm_quotas
|
||||
.map do |quota|
|
||||
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
||||
end
|
||||
.join("; ")
|
||||
end
|
||||
|
||||
logger.log_creation("llm_model", llm_model, ai_llm_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_llm_model_update(llm_model, initial_attributes, initial_quotas)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { model_id: llm_model.id, subject: llm_model.display_name }
|
||||
|
||||
# Track quota changes separately as they're a special case
|
||||
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
||||
if initial_quotas != current_quotas
|
||||
initial_quota_summary =
|
||||
initial_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
current_quota_summary =
|
||||
current_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
entity_details[:quotas_changed] = true
|
||||
entity_details[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||
end
|
||||
|
||||
logger.log_update(
|
||||
"llm_model",
|
||||
llm_model,
|
||||
initial_attributes,
|
||||
ai_llm_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_llm_model_deletion(model_details)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
model_details[:subject] = model_details[:display_name]
|
||||
logger.log_deletion("llm_model", model_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -61,6 +61,7 @@ module DiscourseAi
|
|||
ai_persona = AiPersona.new(ai_persona_params.except(:rag_uploads))
|
||||
if ai_persona.save
|
||||
RagDocumentFragment.link_target_and_uploads(ai_persona, attached_upload_ids)
|
||||
log_ai_persona_creation(ai_persona)
|
||||
|
||||
render json: {
|
||||
ai_persona: LocalizedAiPersonaSerializer.new(ai_persona, root: false),
|
||||
|
|
@ -77,8 +78,11 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def update
|
||||
initial_attributes = @ai_persona.attributes.dup
|
||||
|
||||
if @ai_persona.update(ai_persona_params.except(:rag_uploads))
|
||||
RagDocumentFragment.update_target_uploads(@ai_persona, attached_upload_ids)
|
||||
log_ai_persona_update(@ai_persona, initial_attributes)
|
||||
|
||||
render json: LocalizedAiPersonaSerializer.new(@ai_persona, root: false)
|
||||
else
|
||||
|
|
@ -87,7 +91,14 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def destroy
|
||||
persona_details = {
|
||||
persona_id: @ai_persona.id,
|
||||
name: @ai_persona.name,
|
||||
description: @ai_persona.description,
|
||||
}
|
||||
|
||||
if @ai_persona.destroy
|
||||
log_ai_persona_deletion(persona_details)
|
||||
head :no_content
|
||||
else
|
||||
render_json_error @ai_persona
|
||||
|
|
@ -264,6 +275,92 @@ module DiscourseAi
|
|||
|
||||
examples.map { |example_arr| example_arr.take(2).map(&:to_s) }
|
||||
end
|
||||
|
||||
def ai_persona_logger_fields
|
||||
{
|
||||
name: {
|
||||
},
|
||||
description: {
|
||||
},
|
||||
enabled: {
|
||||
},
|
||||
priority: {
|
||||
},
|
||||
system_prompt: {
|
||||
type: :large_text,
|
||||
},
|
||||
default_llm_id: {
|
||||
},
|
||||
temperature: {
|
||||
},
|
||||
top_p: {
|
||||
},
|
||||
user_id: {
|
||||
},
|
||||
max_context_posts: {
|
||||
},
|
||||
vision_enabled: {
|
||||
},
|
||||
vision_max_pixels: {
|
||||
},
|
||||
rag_chunk_tokens: {
|
||||
},
|
||||
rag_chunk_overlap_tokens: {
|
||||
},
|
||||
rag_conversation_chunks: {
|
||||
},
|
||||
rag_llm_model_id: {
|
||||
},
|
||||
question_consolidator_llm_id: {
|
||||
},
|
||||
allow_chat_channel_mentions: {
|
||||
},
|
||||
allow_chat_direct_messages: {
|
||||
},
|
||||
allow_topic_mentions: {
|
||||
},
|
||||
allow_personal_messages: {
|
||||
},
|
||||
tool_details: {
|
||||
type: :large_text,
|
||||
},
|
||||
forced_tool_count: {
|
||||
},
|
||||
force_default_llm: {
|
||||
},
|
||||
# JSON fields
|
||||
json_fields: %i[tools response_format examples allowed_group_ids],
|
||||
}
|
||||
end
|
||||
|
||||
def log_ai_persona_creation(ai_persona)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { persona_id: ai_persona.id, subject: ai_persona.name }
|
||||
entity_details[:tools_count] = (ai_persona.tools || []).size
|
||||
|
||||
logger.log_creation("persona", ai_persona, ai_persona_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_ai_persona_update(ai_persona, initial_attributes)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { persona_id: ai_persona.id, subject: ai_persona.name }
|
||||
entity_details[:tools_count] = ai_persona.tools.size if ai_persona.tools.present?
|
||||
|
||||
logger.log_update(
|
||||
"persona",
|
||||
ai_persona,
|
||||
initial_attributes,
|
||||
ai_persona_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_ai_persona_deletion(persona_details)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
persona_details[:subject] = persona_details[:name]
|
||||
|
||||
logger.log_deletion("persona", persona_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def update
|
||||
initial_settings = AiModerationSetting.spam
|
||||
initial_custom_instructions = initial_settings&.data&.dig("custom_instructions")
|
||||
initial_llm_model_id = initial_settings&.llm_model_id
|
||||
|
||||
updated_params = {}
|
||||
if allowed_params.key?(:llm_model_id)
|
||||
llm_model_id = updated_params[:llm_model_id] = allowed_params[:llm_model_id]
|
||||
|
|
@ -36,6 +40,8 @@ module DiscourseAi
|
|||
else
|
||||
AiModerationSetting.create!(updated_params.merge(setting_type: :spam))
|
||||
end
|
||||
|
||||
log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, allowed_params)
|
||||
end
|
||||
|
||||
is_enabled = ActiveModel::Type::Boolean.new.cast(allowed_params[:is_enabled])
|
||||
|
|
@ -113,6 +119,30 @@ module DiscourseAi
|
|||
|
||||
private
|
||||
|
||||
def log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, params)
|
||||
changes_to_log = {}
|
||||
|
||||
if params.key?(:llm_model_id) && initial_llm_model_id.to_s != params[:llm_model_id].to_s
|
||||
old_model_name =
|
||||
LlmModel.find_by(id: initial_llm_model_id)&.display_name || initial_llm_model_id
|
||||
new_model_name =
|
||||
LlmModel.find_by(id: params[:llm_model_id])&.display_name || params[:llm_model_id]
|
||||
|
||||
changes_to_log[:llm_model_id] = "#{old_model_name} → #{new_model_name}"
|
||||
end
|
||||
|
||||
if params.key?(:custom_instructions) &&
|
||||
initial_custom_instructions != params[:custom_instructions]
|
||||
changes_to_log[:custom_instructions] = params[:custom_instructions]
|
||||
end
|
||||
|
||||
if changes_to_log.present?
|
||||
changes_to_log[:subject] = I18n.t("discourse_ai.spam_detection.logging_subject")
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
logger.log_custom("update_ai_spam_settings", changes_to_log)
|
||||
end
|
||||
end
|
||||
|
||||
def allowed_params
|
||||
params.permit(:is_enabled, :llm_model_id, :custom_instructions)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ module DiscourseAi
|
|||
|
||||
if ai_tool.save
|
||||
RagDocumentFragment.link_target_and_uploads(ai_tool, attached_upload_ids)
|
||||
log_ai_tool_creation(ai_tool)
|
||||
render_serialized(ai_tool, AiCustomToolSerializer, status: :created)
|
||||
else
|
||||
render_json_error ai_tool
|
||||
|
|
@ -32,8 +33,11 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def update
|
||||
initial_attributes = @ai_tool.attributes.dup
|
||||
|
||||
if @ai_tool.update(ai_tool_params)
|
||||
RagDocumentFragment.update_target_uploads(@ai_tool, attached_upload_ids)
|
||||
log_ai_tool_update(@ai_tool, initial_attributes)
|
||||
render_serialized(@ai_tool, AiCustomToolSerializer)
|
||||
else
|
||||
render_json_error @ai_tool
|
||||
|
|
@ -41,7 +45,15 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def destroy
|
||||
tool_logger_details = {
|
||||
tool_id: @ai_tool.id,
|
||||
name: @ai_tool.name,
|
||||
tool_name: @ai_tool.tool_name,
|
||||
subject: @ai_tool.name,
|
||||
}
|
||||
|
||||
if @ai_tool.destroy
|
||||
log_ai_tool_deletion(tool_logger_details)
|
||||
head :no_content
|
||||
else
|
||||
render_json_error @ai_tool
|
||||
|
|
@ -96,6 +108,60 @@ module DiscourseAi
|
|||
)
|
||||
.except(:rag_uploads)
|
||||
end
|
||||
|
||||
def ai_tool_logger_fields
|
||||
{
|
||||
name: {
|
||||
},
|
||||
tool_name: {
|
||||
},
|
||||
description: {
|
||||
},
|
||||
summary: {
|
||||
},
|
||||
enabled: {
|
||||
},
|
||||
rag_chunk_tokens: {
|
||||
},
|
||||
rag_chunk_overlap_tokens: {
|
||||
},
|
||||
rag_llm_model_id: {
|
||||
},
|
||||
script: {
|
||||
type: :large_text,
|
||||
},
|
||||
parameters: {
|
||||
type: :large_text,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
def log_ai_tool_creation(ai_tool)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
|
||||
entity_details = { tool_id: ai_tool.id, subject: ai_tool.name }
|
||||
entity_details[:parameter_count] = ai_tool.parameters.size if ai_tool.parameters.present?
|
||||
|
||||
logger.log_creation("tool", ai_tool, ai_tool_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_ai_tool_update(ai_tool, initial_attributes)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { tool_id: ai_tool.id, subject: ai_tool.name }
|
||||
|
||||
logger.log_update(
|
||||
"tool",
|
||||
ai_tool,
|
||||
initial_attributes,
|
||||
ai_tool_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_ai_tool_deletion(tool_details)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
logger.log_deletion("tool", tool_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -24,6 +24,22 @@ en:
|
|||
label: "Sort by"
|
||||
tag:
|
||||
label: "Tag"
|
||||
logs:
|
||||
staff_actions:
|
||||
actions:
|
||||
create_ai_llm_model: "Create LLM model"
|
||||
update_ai_llm_model: "Update LLM model"
|
||||
delete_ai_llm_model: "Delete LLM model"
|
||||
create_ai_persona: "Create AI persona"
|
||||
update_ai_persona: "Update AI persona"
|
||||
delete_ai_persona: "Delete AI persona"
|
||||
create_ai_tool: "Create AI tool"
|
||||
update_ai_tool: "Update AI tool"
|
||||
delete_ai_tool: "Delete AI tool"
|
||||
create_ai_embedding: "Create AI embedding"
|
||||
update_ai_embedding: "Update AI embedding"
|
||||
delete_ai_embedding: "Delete AI embedding"
|
||||
update_ai_spam_settings: "Update AI spam settings"
|
||||
|
||||
js:
|
||||
discourse_automation:
|
||||
|
|
|
|||
|
|
@ -284,6 +284,7 @@ en:
|
|||
unexpected: "An unexpected error occured"
|
||||
bot_user_update_failed: "Failed to update the spam scanning bot user"
|
||||
configuration_missing: "The AI spam detection configuration is missing. Add configuration in the 'Admin > Plugins > Discourse AI > Spam' before enabling."
|
||||
logging_subject: "Spam detection"
|
||||
|
||||
ai_bot:
|
||||
reply_error: "Sorry, it looks like our system encountered an unexpected issue while trying to reply.\n\n[details='Error details']\n%{details}\n[/details]"
|
||||
|
|
@ -585,6 +586,11 @@ en:
|
|||
missing_provider_param: "%{param} can't be blank"
|
||||
bedrock_invalid_url: "Please complete all the fields to use this model."
|
||||
|
||||
ai_staff_action_logger:
|
||||
updated: "updated"
|
||||
set: "set"
|
||||
removed: "removed"
|
||||
|
||||
errors:
|
||||
quota_exceeded: "You have exceeded the quota for this model. Please try again in %{relative_time}."
|
||||
quota_required: "You must specify maximum tokens or usages for this model"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,206 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Utils
|
||||
class AiStaffActionLogger
|
||||
## Maximum length for text fields before truncation/simplification
|
||||
MAX_TEXT_LENGTH = 100
|
||||
|
||||
def initialize(current_user)
|
||||
@current_user = current_user
|
||||
@staff_logger = ::StaffActionLogger.new(current_user)
|
||||
end
|
||||
|
||||
## Logs the creation of an AI entity (LLM model or persona)
|
||||
## @param entity_type [Symbol] The type of AI entity being created
|
||||
## @param entity [Object] The entity object being created
|
||||
## @param field_config [Hash] Configuration for how to handle different entity fields
|
||||
## @param entity_details [Hash] Additional details about the entity to be logged
|
||||
def log_creation(entity_type, entity, field_config = {}, entity_details = {})
|
||||
# Start with provided entity details (id, name, etc.)
|
||||
# Convert all keys to strings for consistent handling in StaffActionLogger
|
||||
log_details = {}
|
||||
|
||||
# Extract subject for StaffActionLogger.base_attrs
|
||||
subject =
|
||||
entity_details[:subject] ||
|
||||
(entity.respond_to?(:display_name) ? entity.display_name : nil)
|
||||
|
||||
# Add the entity details but preserve subject as a top-level attribute
|
||||
entity_details.each { |k, v| log_details[k.to_s] = v unless k == :subject }
|
||||
|
||||
# Extract attributes based on field configuration and ensure string keys
|
||||
extract_entity_attributes(entity, field_config).each do |key, value|
|
||||
log_details[key.to_s] = value
|
||||
end
|
||||
|
||||
@staff_logger.log_custom("create_ai_#{entity_type}", log_details.merge(subject: subject))
|
||||
end
|
||||
|
||||
## Logs an update to an AI entity with before/after comparison
|
||||
## @param entity_type [Symbol] The type of AI entity being updated
|
||||
## @param entity [Object] The entity object after update
|
||||
## @param initial_attributes [Hash] The attributes of the entity before update
|
||||
## @param field_config [Hash] Configuration for how to handle different entity fields
|
||||
## @param entity_details [Hash] Additional details about the entity to be logged
|
||||
def log_update(
|
||||
entity_type,
|
||||
entity,
|
||||
initial_attributes,
|
||||
field_config = {},
|
||||
entity_details = {}
|
||||
)
|
||||
current_attributes = entity.attributes
|
||||
changes = {}
|
||||
|
||||
# Process changes based on field configuration
|
||||
field_config
|
||||
.except(:json_fields)
|
||||
.each do |field, options|
|
||||
# Skip if field is not to be tracked
|
||||
next if options[:track] == false
|
||||
|
||||
initial_value = initial_attributes[field.to_s]
|
||||
current_value = current_attributes[field.to_s]
|
||||
|
||||
# Only process if there's an actual change
|
||||
if initial_value != current_value
|
||||
# Format the change based on field type
|
||||
changes[field.to_s] = format_field_change(
|
||||
field,
|
||||
initial_value,
|
||||
current_value,
|
||||
options,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
# Process simple JSON fields (arrays, hashes) that should be tracked as "updated"
|
||||
if field_config[:json_fields].present?
|
||||
field_config[:json_fields].each do |field|
|
||||
field_str = field.to_s
|
||||
if initial_attributes[field_str].to_s != current_attributes[field_str].to_s
|
||||
changes[field_str] = I18n.t("discourse_ai.ai_staff_action_logger.updated")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Only log if there are actual changes
|
||||
if changes.any?
|
||||
# Extract subject for StaffActionLogger.base_attrs
|
||||
subject =
|
||||
entity_details[:subject] ||
|
||||
(entity.respond_to?(:display_name) ? entity.display_name : nil)
|
||||
|
||||
log_details = {}
|
||||
# Convert entity_details keys to strings, but preserve subject as a top-level attribute
|
||||
entity_details.each { |k, v| log_details[k.to_s] = v unless k == :subject }
|
||||
# Merge changes (already with string keys)
|
||||
log_details.merge!(changes)
|
||||
|
||||
@staff_logger.log_custom("update_ai_#{entity_type}", log_details.merge(subject: subject))
|
||||
end
|
||||
end
|
||||
|
||||
## Logs the deletion of an AI entity
|
||||
## @param entity_type [Symbol] The type of AI entity being deleted
|
||||
## @param entity_details [Hash] Details about the entity being deleted
|
||||
def log_deletion(entity_type, entity_details)
|
||||
# Extract subject for StaffActionLogger.base_attrs
|
||||
subject = entity_details[:subject]
|
||||
|
||||
# Convert all keys to strings for consistent handling in StaffActionLogger
|
||||
string_details = {}
|
||||
entity_details.each { |k, v| string_details[k.to_s] = v unless k == :subject }
|
||||
|
||||
@staff_logger.log_custom("delete_ai_#{entity_type}", string_details.merge(subject: subject))
|
||||
end
|
||||
|
||||
## Direct custom logging for complex cases
|
||||
## @param action_type [String] The type of action being logged
|
||||
## @param log_details [Hash] Details to be logged
|
||||
def log_custom(action_type, log_details)
|
||||
# Extract subject for StaffActionLogger.base_attrs if present
|
||||
subject = log_details[:subject]
|
||||
|
||||
# Convert all keys to strings for consistent handling in StaffActionLogger
|
||||
string_details = {}
|
||||
log_details.each { |k, v| string_details[k.to_s] = v unless k == :subject }
|
||||
|
||||
@staff_logger.log_custom(action_type, string_details.merge(subject: subject))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
## Formats the change in a field's value for logging
|
||||
## @param field [Symbol] The field that changed
|
||||
## @param initial_value [Object] The original value
|
||||
## @param current_value [Object] The new value
|
||||
## @param options [Hash] Options for formatting
|
||||
## @return [String] Formatted representation of the change
|
||||
def format_field_change(field, initial_value, current_value, options = {})
|
||||
if options[:type] == :sensitive
|
||||
return format_sensitive_field_change(initial_value, current_value)
|
||||
elsif options[:type] == :large_text ||
|
||||
(initial_value.is_a?(String) && initial_value.length > MAX_TEXT_LENGTH) ||
|
||||
(current_value.is_a?(String) && current_value.length > MAX_TEXT_LENGTH)
|
||||
return I18n.t("discourse_ai.ai_staff_action_logger.updated")
|
||||
end
|
||||
|
||||
# Default formatting: "old_value → new_value"
|
||||
"#{initial_value} → #{current_value}"
|
||||
end
|
||||
|
||||
## Formats changes to sensitive fields without exposing actual values
|
||||
## @param initial_value [Object] The original value
|
||||
## @param current_value [Object] The new value
|
||||
## @return [String] Description of the change (updated/set/removed)
|
||||
def format_sensitive_field_change(initial_value, current_value)
|
||||
if initial_value.present? && current_value.present?
|
||||
I18n.t("discourse_ai.ai_staff_action_logger.updated")
|
||||
elsif current_value.present?
|
||||
I18n.t("discourse_ai.ai_staff_action_logger.set")
|
||||
else
|
||||
I18n.t("discourse_ai.ai_staff_action_logger.removed")
|
||||
end
|
||||
end
|
||||
|
||||
## Extracts relevant attributes from an entity based on field configuration
|
||||
## @param entity [Object] The entity to extract attributes from
|
||||
## @param field_config [Hash] Configuration for how to handle different entity fields
|
||||
## @return [Hash] The extracted attributes
|
||||
def extract_entity_attributes(entity, field_config)
|
||||
result = {}
|
||||
field_config.each do |field, options|
|
||||
# Skip special keys like :json_fields which are arrays, not field configurations
|
||||
next if field == :json_fields
|
||||
|
||||
# Skip if options is not a hash or if explicitly marked as not to be extracted
|
||||
next if !options.is_a?(Hash) || options[:extract] == false
|
||||
|
||||
# Get the actual field value
|
||||
field_sym = field.to_sym
|
||||
value = entity.respond_to?(field_sym) ? entity.public_send(field_sym) : nil
|
||||
|
||||
# Apply field-specific handling
|
||||
if options[:type] == :sensitive
|
||||
result[field] = value.present? ? "[FILTERED]" : nil
|
||||
elsif options[:type] == :large_text && value.is_a?(String) &&
|
||||
value.length > MAX_TEXT_LENGTH
|
||||
result[field] = value.truncate(MAX_TEXT_LENGTH)
|
||||
else
|
||||
result[field] = value
|
||||
end
|
||||
end
|
||||
|
||||
# Always include dimensions if it exists on the entity
|
||||
# This is important for embeddings which are tested for dimensions value
|
||||
if entity.respond_to?(:dimensions) && !result.key?(:dimensions)
|
||||
result[:dimensions] = entity.dimensions
|
||||
end
|
||||
|
||||
result
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,480 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
||||
fab!(:admin)
|
||||
fab!(:llm_model)
|
||||
fab!(:ai_persona)
|
||||
fab!(:group)
|
||||
|
||||
subject { described_class.new(admin) }
|
||||
|
||||
describe "#log_creation" do
|
||||
it "logs creation of an entity with field configuration" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Create field configuration
|
||||
field_config = { name: {}, provider: {}, url: {}, api_key: { type: :sensitive } }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
# Setup model with sensitive data
|
||||
llm_model.update!(api_key: "secret_key")
|
||||
|
||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => llm_model.name,
|
||||
"provider" => llm_model.provider,
|
||||
"url" => llm_model.url,
|
||||
"api_key" => "[FILTERED]",
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
it "handles large text fields with type declaration" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Create a persona with a large system prompt
|
||||
large_prompt = "a" * 200
|
||||
ai_persona.update!(system_prompt: large_prompt)
|
||||
|
||||
# Create entity details
|
||||
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||
|
||||
field_config = { name: {}, description: {}, system_prompt: { type: :large_text } }
|
||||
|
||||
subject.log_creation("persona", ai_persona, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"create_ai_persona",
|
||||
hash_including(
|
||||
"persona_id" => ai_persona.id,
|
||||
"name" => ai_persona.name,
|
||||
"system_prompt" => an_instance_of(String),
|
||||
),
|
||||
) do |action, details|
|
||||
# Check that system_prompt was truncated
|
||||
expect(details["system_prompt"].length).to be < 200
|
||||
end
|
||||
end
|
||||
|
||||
it "allows excluding fields from extraction" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
field_config = {
|
||||
name: {
|
||||
},
|
||||
display_name: {
|
||||
},
|
||||
provider: {
|
||||
extract: false,
|
||||
}, # Should be excluded
|
||||
url: {
|
||||
},
|
||||
}
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => llm_model.name,
|
||||
"display_name" => llm_model.display_name,
|
||||
"url" => llm_model.url,
|
||||
),
|
||||
) do |action, details|
|
||||
# Provider should not be present
|
||||
expect(details).not_to have_key("provider")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_update" do
|
||||
it "handles empty arrays and complex JSON properly" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Setup initial attributes with empty JSON arrays
|
||||
initial_attributes = { "name" => "Old Name", "allowed_group_ids" => [] }
|
||||
|
||||
# Update with complex JSON
|
||||
ai_persona.update!(name: "New Name", allowed_group_ids: [group.id, 999])
|
||||
|
||||
field_config = { name: {}, json_fields: %w[allowed_group_ids] }
|
||||
|
||||
# Create entity details
|
||||
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||
|
||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_persona",
|
||||
hash_including(
|
||||
"persona_id" => ai_persona.id,
|
||||
"persona_name" => ai_persona.name,
|
||||
"name" => "Old Name → New Name",
|
||||
"allowed_group_ids" => "updated",
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
it "logs changes to attributes based on field configuration" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"display_name" => "Old Display Name",
|
||||
"provider" => "open_ai",
|
||||
"api_key" => "old_secret",
|
||||
}
|
||||
|
||||
llm_model.update!(
|
||||
name: "New Name",
|
||||
display_name: "New Display Name",
|
||||
provider: "anthropic",
|
||||
api_key: "new_secret",
|
||||
)
|
||||
|
||||
field_config = { name: {}, display_name: {}, provider: {}, api_key: { type: :sensitive } }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => "Old Name → New Name",
|
||||
"display_name" => "Old Display Name → New Display Name",
|
||||
"provider" => "open_ai → anthropic",
|
||||
"api_key" => "updated", # Not showing actual values
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
it "doesn't log when there are no changes" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
initial_attributes = {
|
||||
"name" => llm_model.name,
|
||||
"display_name" => llm_model.display_name,
|
||||
"provider" => llm_model.provider,
|
||||
}
|
||||
|
||||
field_config = { name: {}, display_name: {}, provider: {} }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify log_custom was not called
|
||||
expect(staff_action_logger).not_to have_received(:log_custom)
|
||||
end
|
||||
|
||||
it "handles fields marked as not to be tracked" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"display_name" => "Old Display Name",
|
||||
"provider" => "open_ai",
|
||||
}
|
||||
|
||||
llm_model.update!(name: "New Name", display_name: "New Display Name", provider: "anthropic")
|
||||
|
||||
field_config = {
|
||||
name: {
|
||||
},
|
||||
display_name: {
|
||||
},
|
||||
provider: {
|
||||
track: false,
|
||||
}, # Should not be tracked even though it changed
|
||||
}
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Provider should not appear in the logged changes
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => "Old Name → New Name",
|
||||
"display_name" => "Old Display Name → New Display Name",
|
||||
),
|
||||
) do |action, details|
|
||||
expect(details).not_to have_key("provider")
|
||||
end
|
||||
end
|
||||
|
||||
it "handles json fields properly" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Setup initial attributes with JSON fields
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"tools" => [["search", { "base_query" => "test" }, true]],
|
||||
}
|
||||
|
||||
# Update with different JSON
|
||||
ai_persona.update!(
|
||||
name: "New Name",
|
||||
tools: [["search", { "base_query" => "updated" }, true], ["categories", {}, false]],
|
||||
)
|
||||
|
||||
field_config = { name: {}, json_fields: %w[tools] }
|
||||
|
||||
# Create entity details
|
||||
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||
|
||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_persona",
|
||||
hash_including(
|
||||
"persona_id" => ai_persona.id,
|
||||
"persona_name" => ai_persona.name,
|
||||
"name" => "Old Name → New Name",
|
||||
"tools" => "updated",
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_deletion" do
|
||||
it "logs deletion with the correct entity type" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
details = {
|
||||
model_id: llm_model.id,
|
||||
display_name: llm_model.display_name,
|
||||
name: llm_model.name,
|
||||
}
|
||||
|
||||
subject.log_deletion("llm_model", details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"delete_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => details[:model_id],
|
||||
"display_name" => details[:display_name],
|
||||
"name" => details[:name],
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_custom" do
|
||||
it "delegates to StaffActionLogger#log_custom" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
details = { key: "value" }
|
||||
|
||||
subject.log_custom("custom_action_type", details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"custom_action_type",
|
||||
hash_including("key" => details[:key]),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe "Special cases from controllers" do
|
||||
context "with EmbeddingDefinition" do
|
||||
fab!(:embedding_definition) {
|
||||
Fabricate(
|
||||
:embedding_definition,
|
||||
display_name: "Test Embedding",
|
||||
dimensions: 768,
|
||||
provider: "open_ai"
|
||||
)
|
||||
}
|
||||
|
||||
it "includes dimensions in logged data" do
|
||||
# Setup
|
||||
staff_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||
allow(staff_logger).to receive(:log_custom)
|
||||
|
||||
# Create entity details
|
||||
entity_details = { embedding_id: embedding_definition.id, subject: embedding_definition.display_name }
|
||||
|
||||
# Field config without dimensions
|
||||
field_config = {
|
||||
display_name: {},
|
||||
provider: {},
|
||||
url: {}
|
||||
}
|
||||
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||
logger.log_creation("embedding", embedding_definition, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"create_ai_embedding",
|
||||
hash_including("dimensions" => 768)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "with LlmModel quotas" do
|
||||
before do
|
||||
# Create a quota for the model
|
||||
@quota = Fabricate(:llm_quota, llm_model: llm_model, group: group, max_tokens: 1000)
|
||||
end
|
||||
|
||||
it "handles quota changes in log_llm_model_creation" do
|
||||
# Setup
|
||||
staff_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||
allow(staff_logger).to receive(:log_custom)
|
||||
|
||||
# Call the method directly as it would be called from the controller
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||
field_config = { display_name: {}, name: {} }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
log_details = entity_details.dup
|
||||
log_details.merge!(logger.send(:extract_entity_attributes, llm_model, field_config))
|
||||
|
||||
# Add quota information as a special case
|
||||
log_details[:quotas] = llm_model
|
||||
.llm_quotas
|
||||
.map do |quota|
|
||||
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
||||
end
|
||||
.join("; ")
|
||||
|
||||
logger.log_custom("create_ai_llm_model", log_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"model_name" => llm_model.name,
|
||||
"display_name" => llm_model.display_name,
|
||||
),
|
||||
)
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including("quotas" => a_string_including("Group #{group.id}", "1000 tokens")),
|
||||
)
|
||||
end
|
||||
|
||||
it "handles quota changes in log_llm_model_update" do
|
||||
initial_quotas = llm_model.llm_quotas.map(&:attributes)
|
||||
|
||||
# Update the quota
|
||||
@quota.update!(max_tokens: 2000)
|
||||
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
||||
|
||||
# Setup
|
||||
staff_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||
allow(staff_logger).to receive(:log_custom)
|
||||
|
||||
# Simulate the special quota handling in the controller
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||
changes = {}
|
||||
|
||||
# Track quota changes separately as they're a special case
|
||||
if initial_quotas != current_quotas
|
||||
initial_quota_summary =
|
||||
initial_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
current_quota_summary =
|
||||
current_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
changes[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||
end
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
log_details = entity_details.dup.merge(changes)
|
||||
logger.log_custom("update_ai_llm_model", log_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"quotas" => a_string_including("1000 tokens", "2000 tokens"),
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -35,6 +35,30 @@ RSpec.describe DiscourseAi::Admin::AiEmbeddingsController do
|
|||
expect(created_def.matryoshka_dimensions).to eq(true)
|
||||
end
|
||||
|
||||
it "logs the creation with StaffActionLogger" do
|
||||
expect {
|
||||
post "/admin/plugins/discourse-ai/ai-embeddings.json",
|
||||
params: {
|
||||
ai_embedding: valid_attrs,
|
||||
}
|
||||
}.to change {
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "create_ai_embedding",
|
||||
).count
|
||||
}.by(1)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "create_ai_embedding",
|
||||
).last
|
||||
expect(history.details).to include("display_name: Embedding config test")
|
||||
expect(history.details).to include("provider: hugging_face")
|
||||
expect(history.details).to include("dimensions: 1001")
|
||||
expect(history.subject).to eq("Embedding config test") # Verify subject field is included
|
||||
end
|
||||
|
||||
it "stores provider-specific config params" do
|
||||
post "/admin/plugins/discourse-ai/ai-embeddings.json",
|
||||
params: {
|
||||
|
|
@ -96,6 +120,28 @@ RSpec.describe DiscourseAi::Admin::AiEmbeddingsController do
|
|||
expect(embedding_definition.reload.provider).to eq(update_attrs[:provider])
|
||||
end
|
||||
|
||||
it "logs the update with StaffActionLogger" do
|
||||
expect {
|
||||
put "/admin/plugins/discourse-ai/ai-embeddings/#{embedding_definition.id}.json",
|
||||
params: {
|
||||
ai_embedding: update_attrs,
|
||||
}
|
||||
}.to change {
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_embedding",
|
||||
).count
|
||||
}.by(1)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_embedding",
|
||||
).last
|
||||
expect(history.details).to include("embedding_id: #{embedding_definition.id}")
|
||||
expect(history.subject).to eq(embedding_definition.display_name) # Verify subject field is included
|
||||
end
|
||||
|
||||
it "returns a 404 if there is no model with the given Id" do
|
||||
put "/admin/plugins/discourse-ai/ai-embeddings/9999999.json"
|
||||
|
||||
|
|
@ -142,6 +188,29 @@ RSpec.describe DiscourseAi::Admin::AiEmbeddingsController do
|
|||
}.to change(EmbeddingDefinition, :count).by(-1)
|
||||
end
|
||||
|
||||
it "logs the deletion with StaffActionLogger" do
|
||||
embedding_id = embedding_definition.id
|
||||
display_name = embedding_definition.display_name
|
||||
|
||||
expect {
|
||||
delete "/admin/plugins/discourse-ai/ai-embeddings/#{embedding_definition.id}.json"
|
||||
}.to change {
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "delete_ai_embedding",
|
||||
).count
|
||||
}.by(1)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "delete_ai_embedding",
|
||||
).last
|
||||
expect(history.details).to include("embedding_id: #{embedding_id}")
|
||||
expect(history.details).to include("display_name: #{display_name}")
|
||||
expect(history.subject).to eq(display_name) # Verify subject field is included
|
||||
end
|
||||
|
||||
it "validates the model is not in use" do
|
||||
SiteSetting.ai_embeddings_selected_model = embedding_definition.id
|
||||
|
||||
|
|
|
|||
|
|
@ -136,6 +136,17 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
|
|||
model = LlmModel.find(created_model["id"])
|
||||
expect(model.display_name).to eq(valid_attrs[:display_name])
|
||||
end
|
||||
|
||||
it "logs staff action when creating an LLM model" do
|
||||
# Log the creation
|
||||
post "/admin/plugins/discourse-ai/ai-llms.json", params: { ai_llm: valid_attrs }
|
||||
expect(response.status).to eq(201)
|
||||
|
||||
# Now verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "create_ai_llm_model").last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq(valid_attrs[:display_name]) # Verify subject is set to display_name
|
||||
end
|
||||
|
||||
it "creates a companion user" do
|
||||
post "/admin/plugins/discourse-ai/ai-llms.json",
|
||||
|
|
@ -329,6 +340,25 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
|
|||
expect(response.status).to eq(200)
|
||||
expect(llm_model.reload.provider).to eq(update_attrs[:provider])
|
||||
end
|
||||
|
||||
it "logs staff action when updating an LLM model" do
|
||||
# The initial provider is different from the update
|
||||
original_provider = llm_model.provider
|
||||
display_name = llm_model.display_name
|
||||
|
||||
# Perform the update
|
||||
put "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json",
|
||||
params: {
|
||||
ai_llm: update_attrs,
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Now verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_llm_model").last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq(display_name) # Verify subject is set to display_name
|
||||
end
|
||||
|
||||
it "returns a 404 if there is no model with the given Id" do
|
||||
put "/admin/plugins/discourse-ai/ai-llms/9999999.json"
|
||||
|
|
@ -457,6 +487,21 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
|
|||
expect(response).to have_http_status(:no_content)
|
||||
}.to change(LlmModel, :count).by(-1)
|
||||
end
|
||||
|
||||
it "logs staff action when deleting an LLM model" do
|
||||
# Capture the model details before deletion for comparison
|
||||
model_id = llm_model.id
|
||||
model_display_name = llm_model.display_name
|
||||
|
||||
# Delete the model
|
||||
delete "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json"
|
||||
expect(response).to have_http_status(:no_content)
|
||||
|
||||
# Now verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "delete_ai_llm_model").last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq(model_display_name) # Verify subject is set to display_name
|
||||
end
|
||||
|
||||
it "validates the model is not in use" do
|
||||
fake_llm = assign_fake_provider_to(:ai_helper_model)
|
||||
|
|
|
|||
|
|
@ -223,6 +223,22 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
|
|||
expect(persona.temperature).to eq(0.5)
|
||||
}.to change(AiPersona, :count).by(1)
|
||||
end
|
||||
|
||||
it "logs staff action when creating a persona" do
|
||||
# Create the persona
|
||||
post "/admin/plugins/discourse-ai/ai-personas.json",
|
||||
params: { ai_persona: valid_attributes }.to_json,
|
||||
headers: {
|
||||
"CONTENT_TYPE" => "application/json",
|
||||
}
|
||||
|
||||
expect(response).to be_successful
|
||||
|
||||
# Now verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "create_ai_persona").last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq("superbot") # Verify subject is set to name
|
||||
end
|
||||
end
|
||||
|
||||
context "with invalid params" do
|
||||
|
|
@ -309,6 +325,29 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
|
|||
expect(persona.top_p).to eq(nil)
|
||||
expect(persona.temperature).to eq(nil)
|
||||
end
|
||||
|
||||
it "logs staff action when updating a persona" do
|
||||
persona = Fabricate(:ai_persona, name: "original_name", description: "original description")
|
||||
|
||||
# Update the persona
|
||||
put "/admin/plugins/discourse-ai/ai-personas/#{persona.id}.json",
|
||||
params: {
|
||||
ai_persona: {
|
||||
name: "updated_name",
|
||||
description: "updated description",
|
||||
},
|
||||
}
|
||||
|
||||
expect(response).to have_http_status(:ok)
|
||||
persona.reload
|
||||
expect(persona.name).to eq("updated_name")
|
||||
expect(persona.description).to eq("updated description")
|
||||
|
||||
# Now verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_persona").last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq("updated_name") # Verify subject is set to the new name
|
||||
end
|
||||
|
||||
it "supports updating rag params" do
|
||||
persona = Fabricate(:ai_persona, name: "test_bot2")
|
||||
|
|
@ -461,6 +500,21 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
|
|||
expect(response).to have_http_status(:no_content)
|
||||
}.to change(AiPersona, :count).by(-1)
|
||||
end
|
||||
|
||||
it "logs staff action when deleting a persona" do
|
||||
# Capture persona details before deletion
|
||||
persona_id = ai_persona.id
|
||||
persona_name = ai_persona.name
|
||||
|
||||
# Delete the persona
|
||||
delete "/admin/plugins/discourse-ai/ai-personas/#{ai_persona.id}.json"
|
||||
expect(response).to have_http_status(:no_content)
|
||||
|
||||
# Now verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "delete_ai_persona").last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq(persona_name) # Verify subject is set to name
|
||||
end
|
||||
|
||||
it "is not allowed to delete system personas" do
|
||||
expect {
|
||||
|
|
|
|||
|
|
@ -119,6 +119,90 @@ RSpec.describe DiscourseAi::Admin::AiSpamController do
|
|||
"custom instructions new",
|
||||
)
|
||||
end
|
||||
|
||||
it "logs staff action when custom_instructions change" do
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
||||
params: {
|
||||
is_enabled: true,
|
||||
llm_model_id: llm_model.id,
|
||||
custom_instructions: "updated instructions",
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).last
|
||||
expect(history).to be_present
|
||||
expect(history.details).to include("custom_instructions")
|
||||
end
|
||||
|
||||
it "logs staff action when llm_model_id changes" do
|
||||
# Create another model to change to
|
||||
new_llm_model =
|
||||
Fabricate(:llm_model, name: "New Test Model", display_name: "New Test Model")
|
||||
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json", params: { llm_model_id: new_llm_model.id }
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify the log was created with the right subject
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).last
|
||||
expect(history).to be_present
|
||||
expect(history.details).to include("llm_model_id")
|
||||
end
|
||||
|
||||
it "does not log staff action when only is_enabled changes" do
|
||||
# Check initial count of logs
|
||||
initial_count =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).count
|
||||
|
||||
# Update only the is_enabled setting
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json", params: { is_enabled: false }
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify no new log was created
|
||||
current_count =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).count
|
||||
expect(current_count).to eq(initial_count)
|
||||
end
|
||||
|
||||
it "logs both custom_instructions and llm_model_id changes in one entry" do
|
||||
# Create another model to change to
|
||||
new_llm_model =
|
||||
Fabricate(:llm_model, name: "Another Test Model", display_name: "Another Test Model")
|
||||
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
||||
params: {
|
||||
llm_model_id: new_llm_model.id,
|
||||
custom_instructions: "new instructions for both changes",
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify the log was created with all changes
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).last
|
||||
expect(history).to be_present
|
||||
expect(history.details).to include("llm_model_id")
|
||||
expect(history.details).to include("custom_instructions")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -72,6 +72,30 @@ RSpec.describe DiscourseAi::Admin::AiToolsController do
|
|||
expect(response.parsed_body["ai_tool"]["tool_name"]).to eq("test_tool_1")
|
||||
end
|
||||
|
||||
it "logs the creation with StaffActionLogger" do
|
||||
expect {
|
||||
post "/admin/plugins/discourse-ai/ai-tools.json",
|
||||
params: { ai_tool: valid_attributes }.to_json,
|
||||
headers: {
|
||||
"CONTENT_TYPE" => "application/json",
|
||||
}
|
||||
}.to change {
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "create_ai_tool",
|
||||
).count
|
||||
}.by(1)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "create_ai_tool",
|
||||
).last
|
||||
expect(history.details).to include("name: Test Tool 1")
|
||||
expect(history.details).to include("tool_name: test_tool_1")
|
||||
expect(history.subject).to eq("Test Tool 1") # Verify subject field is included
|
||||
end
|
||||
|
||||
context "when the parameter is a enum" do
|
||||
it "creates the tool with the correct parameters" do
|
||||
attrs = valid_attributes
|
||||
|
|
@ -141,6 +165,33 @@ RSpec.describe DiscourseAi::Admin::AiToolsController do
|
|||
expect(ai_tool.reload.name).to eq("Updated Tool")
|
||||
end
|
||||
|
||||
it "logs the update with StaffActionLogger" do
|
||||
expect {
|
||||
put "/admin/plugins/discourse-ai/ai-tools/#{ai_tool.id}.json",
|
||||
params: {
|
||||
ai_tool: {
|
||||
name: "Updated Tool",
|
||||
description: "Updated description",
|
||||
},
|
||||
}
|
||||
}.to change {
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_tool",
|
||||
).count
|
||||
}.by(1)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_tool",
|
||||
).last
|
||||
expect(history.details).to include("tool_id: #{ai_tool.id}")
|
||||
expect(history.details).to include("name")
|
||||
expect(history.details).to include("description")
|
||||
expect(history.subject).to eq("Updated Tool")
|
||||
end
|
||||
|
||||
context "when updating an enum parameters" do
|
||||
it "updates the enum fixed values" do
|
||||
put "/admin/plugins/discourse-ai/ai-tools/#{ai_tool.id}.json",
|
||||
|
|
@ -172,6 +223,25 @@ RSpec.describe DiscourseAi::Admin::AiToolsController do
|
|||
|
||||
expect(response).to have_http_status(:no_content)
|
||||
end
|
||||
|
||||
it "logs the deletion with StaffActionLogger" do
|
||||
tool_id = ai_tool.id
|
||||
|
||||
expect { delete "/admin/plugins/discourse-ai/ai-tools/#{ai_tool.id}.json" }.to change {
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "delete_ai_tool",
|
||||
).count
|
||||
}.by(1)
|
||||
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "delete_ai_tool",
|
||||
).last
|
||||
expect(history.details).to include("tool_id: #{tool_id}")
|
||||
expect(history.subject).to eq("Test Tool") # Verify subject field is included
|
||||
end
|
||||
end
|
||||
|
||||
describe "#test" do
|
||||
|
|
|
|||
Loading…
Reference in New Issue