DEV: updates...
This commit is contained in:
parent
c3cd5d65da
commit
3009292d1b
|
@ -56,7 +56,6 @@ module DiscourseAi
|
|||
)
|
||||
end
|
||||
|
||||
# Capture initial state for logging
|
||||
initial_attributes = embedding_def.attributes.dup
|
||||
|
||||
if embedding_def.update(ai_embeddings_params.except(:dimensions))
|
||||
|
@ -80,13 +79,12 @@ module DiscourseAi
|
|||
return render_json_error(I18n.t("discourse_ai.embeddings.delete_failed"), status: 409)
|
||||
end
|
||||
|
||||
# Capture embedding details for logging before destruction
|
||||
embedding_details = {
|
||||
embedding_id: embedding_def.id,
|
||||
display_name: embedding_def.display_name,
|
||||
provider: embedding_def.provider,
|
||||
dimensions: embedding_def.dimensions,
|
||||
subject: embedding_def.display_name # Use display_name as the subject for EmbeddingDefinition
|
||||
subject: embedding_def.display_name,
|
||||
}
|
||||
|
||||
if embedding_def.destroy
|
||||
|
@ -144,87 +142,55 @@ module DiscourseAi
|
|||
permitted
|
||||
end
|
||||
|
||||
def log_ai_embedding_creation(embedding_def)
|
||||
# Create log details
|
||||
log_details = {
|
||||
embedding_id: embedding_def.id,
|
||||
display_name: embedding_def.display_name,
|
||||
provider: embedding_def.provider,
|
||||
dimensions: embedding_def.dimensions,
|
||||
subject: embedding_def.display_name # Use display_name as the subject for EmbeddingDefinition
|
||||
def ai_embeddings_logger_fields
|
||||
{
|
||||
display_name: {
|
||||
},
|
||||
provider: {
|
||||
},
|
||||
url: {
|
||||
},
|
||||
tokenizer_class: {
|
||||
},
|
||||
max_sequence_length: {
|
||||
},
|
||||
embed_prompt: {
|
||||
type: :large_text,
|
||||
},
|
||||
search_prompt: {
|
||||
type: :large_text,
|
||||
},
|
||||
matryoshka_dimensions: {
|
||||
},
|
||||
api_key: {
|
||||
type: :sensitive,
|
||||
},
|
||||
# JSON fields should be tracked as simple changes
|
||||
json_fields: [:provider_params],
|
||||
}
|
||||
|
||||
# Only include tokenizer if present
|
||||
if embedding_def.tokenizer_class.present?
|
||||
log_details[:tokenizer] = embedding_def.tokenizer_class
|
||||
end
|
||||
|
||||
# For sensitive fields, don't include the actual content
|
||||
if embedding_def.api_key.present?
|
||||
log_details[:api_key_set] = true
|
||||
end
|
||||
|
||||
# Get subject for the log
|
||||
subject = log_details[:subject]
|
||||
|
||||
# Log the action
|
||||
StaffActionLogger.new(current_user).log_custom("create_ai_embedding", log_details)
|
||||
def log_ai_embedding_creation(embedding_def)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { embedding_id: embedding_def.id, subject: embedding_def.display_name }
|
||||
logger.log_creation("embedding", embedding_def, ai_embeddings_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_ai_embedding_update(embedding_def, initial_attributes)
|
||||
# Create log details
|
||||
log_details = {
|
||||
embedding_id: embedding_def.id,
|
||||
display_name: embedding_def.display_name,
|
||||
subject: embedding_def.display_name # Use display_name as the subject for EmbeddingDefinition
|
||||
}
|
||||
|
||||
# Track changes in fields
|
||||
changed_fields = []
|
||||
|
||||
# Fields to check for changes
|
||||
%w[display_name provider url tokenizer_class max_sequence_length embed_prompt search_prompt matryoshka_dimensions].each do |field|
|
||||
if initial_attributes[field] != embedding_def.attributes[field]
|
||||
changed_fields << field
|
||||
log_details["#{field}_changed"] = true
|
||||
end
|
||||
end
|
||||
|
||||
# Special handling for API key (sensitive)
|
||||
if initial_attributes['api_key'].present? != embedding_def.api_key.present?
|
||||
changed_fields << 'api_key'
|
||||
|
||||
if embedding_def.api_key.present?
|
||||
log_details[:api_key_set] = true
|
||||
else
|
||||
log_details[:api_key_removed] = true
|
||||
end
|
||||
end
|
||||
|
||||
# Special handling for provider_params (JSON)
|
||||
if initial_attributes['provider_params'].to_s != embedding_def.provider_params.to_s
|
||||
changed_fields << 'provider_params'
|
||||
log_details[:provider_params_changed] = true
|
||||
end
|
||||
|
||||
# Only log if there are actual changes
|
||||
if changed_fields.any?
|
||||
log_details[:changed_fields] = changed_fields
|
||||
|
||||
# Get subject for the log
|
||||
subject = log_details[:subject]
|
||||
|
||||
# Log the action
|
||||
StaffActionLogger.new(current_user).log_custom("update_ai_embedding", log_details)
|
||||
end
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { embedding_id: embedding_def.id, subject: embedding_def.display_name }
|
||||
logger.log_update(
|
||||
"embedding",
|
||||
embedding_def,
|
||||
initial_attributes,
|
||||
ai_embeddings_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_ai_embedding_deletion(embedding_details)
|
||||
# Get subject for the log (but keep it in the details hash)
|
||||
subject = embedding_details[:subject]
|
||||
|
||||
# Log the action
|
||||
StaffActionLogger.new(current_user).log_custom("delete_ai_embedding", embedding_details)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
logger.log_deletion("embedding", embedding_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -120,7 +120,7 @@ module DiscourseAi
|
|||
model_id: llm_model.id,
|
||||
display_name: llm_model.display_name,
|
||||
name: llm_model.name,
|
||||
provider: llm_model.provider
|
||||
provider: llm_model.provider,
|
||||
}
|
||||
|
||||
# Clean up companion users
|
||||
|
@ -206,97 +206,87 @@ module DiscourseAi
|
|||
permitted
|
||||
end
|
||||
|
||||
def log_llm_model_creation(llm_model)
|
||||
# Create basic entity details with important attributes
|
||||
log_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name,
|
||||
provider: llm_model.provider,
|
||||
tokenizer: llm_model.tokenizer
|
||||
def ai_llm_logger_fields
|
||||
{
|
||||
display_name: {
|
||||
},
|
||||
name: {
|
||||
},
|
||||
provider: {
|
||||
},
|
||||
tokenizer: {
|
||||
},
|
||||
url: {
|
||||
},
|
||||
max_prompt_tokens: {
|
||||
},
|
||||
max_output_tokens: {
|
||||
},
|
||||
enabled_chat_bot: {
|
||||
},
|
||||
vision_enabled: {
|
||||
},
|
||||
api_key: {
|
||||
type: :sensitive,
|
||||
},
|
||||
input_cost: {
|
||||
},
|
||||
output_cost: {
|
||||
},
|
||||
# JSON fields should be tracked as simple changes
|
||||
json_fields: [:provider_params],
|
||||
}
|
||||
|
||||
# Add API key indication if present (without showing the key)
|
||||
if llm_model.api_key.present?
|
||||
log_details[:api_key_set] = true
|
||||
end
|
||||
|
||||
# Add token limits if set
|
||||
if llm_model.max_prompt_tokens.present?
|
||||
log_details[:max_prompt_tokens] = llm_model.max_prompt_tokens
|
||||
end
|
||||
|
||||
if llm_model.max_output_tokens.present?
|
||||
log_details[:max_output_tokens] = llm_model.max_output_tokens
|
||||
end
|
||||
def log_llm_model_creation(llm_model)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { model_id: llm_model.id, subject: llm_model.display_name }
|
||||
|
||||
# Add quota information as a special case
|
||||
if llm_model.llm_quotas.any?
|
||||
log_details[:quotas] = llm_model.llm_quotas.map do |quota|
|
||||
entity_details[:quotas] = llm_model
|
||||
.llm_quotas
|
||||
.map do |quota|
|
||||
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
||||
end.join("; ")
|
||||
end
|
||||
.join("; ")
|
||||
end
|
||||
|
||||
# Use StaffActionLogger directly with the proper subject
|
||||
StaffActionLogger.new(current_user).log_custom("create_ai_llm_model", log_details.merge(subject: llm_model.display_name))
|
||||
logger.log_creation("llm_model", llm_model, ai_llm_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_llm_model_update(llm_model, initial_attributes, initial_quotas)
|
||||
# Create basic entity details
|
||||
log_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
}
|
||||
|
||||
# Track simple changes
|
||||
%w[display_name name provider tokenizer url max_prompt_tokens max_output_tokens enabled_chat_bot vision_enabled].each do |field|
|
||||
if initial_attributes[field] != llm_model.attributes[field]
|
||||
log_details["#{field}_changed"] = true
|
||||
|
||||
# For simple fields, show the before/after values
|
||||
unless field == "api_key" # Skip sensitive fields
|
||||
initial_value = initial_attributes[field]
|
||||
current_value = llm_model.attributes[field]
|
||||
log_details[field] = "#{initial_value} → #{current_value}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Handle API key changes specially
|
||||
if initial_attributes["api_key"].present? != llm_model.api_key.present?
|
||||
log_details[:api_key_changed] = true
|
||||
if llm_model.api_key.present?
|
||||
log_details[:api_key_status] = "set"
|
||||
else
|
||||
log_details[:api_key_status] = "removed"
|
||||
end
|
||||
end
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { model_id: llm_model.id, subject: llm_model.display_name }
|
||||
|
||||
# Track quota changes separately as they're a special case
|
||||
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
||||
if initial_quotas != current_quotas
|
||||
initial_quota_summary = initial_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
||||
current_quota_summary = current_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
||||
log_details[:quotas_changed] = true
|
||||
log_details[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||
initial_quota_summary =
|
||||
initial_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
current_quota_summary =
|
||||
current_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
entity_details[:quotas_changed] = true
|
||||
entity_details[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||
end
|
||||
|
||||
# Check JSON fields like provider_params
|
||||
if initial_attributes["provider_params"].to_s != llm_model.attributes["provider_params"].to_s
|
||||
log_details[:provider_params_changed] = true
|
||||
end
|
||||
|
||||
# Only log if there are actual changes
|
||||
if log_details.keys.any? { |k| k.to_s.end_with?("_changed") }
|
||||
# Use StaffActionLogger directly with the proper subject
|
||||
StaffActionLogger.new(current_user).log_custom("update_ai_llm_model", log_details.merge(subject: llm_model.display_name))
|
||||
end
|
||||
logger.log_update(
|
||||
"llm_model",
|
||||
llm_model,
|
||||
initial_attributes,
|
||||
ai_llm_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_llm_model_deletion(model_details)
|
||||
# Use StaffActionLogger directly with the proper subject
|
||||
StaffActionLogger.new(current_user).log_custom("delete_ai_llm_model", model_details.merge(subject: model_details[:display_name]))
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
model_details[:subject] = model_details[:display_name]
|
||||
logger.log_deletion("llm_model", model_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -75,7 +75,6 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def update
|
||||
# Capture initial state for logging
|
||||
initial_attributes = @ai_persona.attributes.dup
|
||||
|
||||
if @ai_persona.update(ai_persona_params.except(:rag_uploads))
|
||||
|
@ -89,11 +88,10 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def destroy
|
||||
# Capture persona details for logging before destruction
|
||||
persona_details = {
|
||||
persona_id: @ai_persona.id,
|
||||
name: @ai_persona.name,
|
||||
description: @ai_persona.description
|
||||
description: @ai_persona.description,
|
||||
}
|
||||
|
||||
if @ai_persona.destroy
|
||||
|
@ -275,84 +273,90 @@ module DiscourseAi
|
|||
examples.map { |example_arr| example_arr.take(2).map(&:to_s) }
|
||||
end
|
||||
|
||||
def log_ai_persona_creation(ai_persona)
|
||||
# Create basic entity details with important attributes
|
||||
log_details = {
|
||||
persona_id: ai_persona.id,
|
||||
persona_name: ai_persona.name,
|
||||
description: ai_persona.description
|
||||
def ai_persona_logger_fields
|
||||
{
|
||||
name: {
|
||||
},
|
||||
description: {
|
||||
},
|
||||
enabled: {
|
||||
},
|
||||
priority: {
|
||||
},
|
||||
system_prompt: {
|
||||
type: :large_text,
|
||||
},
|
||||
default_llm_id: {
|
||||
},
|
||||
temperature: {
|
||||
},
|
||||
top_p: {
|
||||
},
|
||||
user_id: {
|
||||
},
|
||||
max_context_posts: {
|
||||
},
|
||||
vision_enabled: {
|
||||
},
|
||||
vision_max_pixels: {
|
||||
},
|
||||
rag_chunk_tokens: {
|
||||
},
|
||||
rag_chunk_overlap_tokens: {
|
||||
},
|
||||
rag_conversation_chunks: {
|
||||
},
|
||||
rag_llm_model_id: {
|
||||
},
|
||||
question_consolidator_llm_id: {
|
||||
},
|
||||
allow_chat_channel_mentions: {
|
||||
},
|
||||
allow_chat_direct_messages: {
|
||||
},
|
||||
allow_topic_mentions: {
|
||||
},
|
||||
allow_personal_messages: {
|
||||
},
|
||||
tool_details: {
|
||||
type: :large_text,
|
||||
},
|
||||
forced_tool_count: {
|
||||
},
|
||||
force_default_llm: {
|
||||
},
|
||||
# JSON fields
|
||||
json_fields: %i[tools response_format examples allowed_group_ids],
|
||||
}
|
||||
|
||||
# Include basic configuration fields if present
|
||||
%w[enabled priority temperature top_p default_llm_id].each do |field|
|
||||
value = ai_persona.send(field) if ai_persona.respond_to?(field)
|
||||
log_details[field] = value if value.present?
|
||||
end
|
||||
|
||||
# Handle system prompt specially (truncate if too long)
|
||||
if ai_persona.system_prompt.present?
|
||||
if ai_persona.system_prompt.length > 100
|
||||
log_details[:system_prompt] = ai_persona.system_prompt.truncate(100)
|
||||
else
|
||||
log_details[:system_prompt] = ai_persona.system_prompt
|
||||
end
|
||||
end
|
||||
def log_ai_persona_creation(ai_persona)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { persona_id: ai_persona.id, subject: ai_persona.name }
|
||||
entity_details[:tools_count] = (ai_persona.tools || []).size
|
||||
|
||||
# Add tools count separately as it's not a direct attribute
|
||||
log_details[:tools_count] = (ai_persona.tools || []).size
|
||||
|
||||
# Add allowed_group_ids
|
||||
log_details[:allowed_group_ids] = ai_persona.allowed_group_ids if ai_persona.allowed_group_ids.present?
|
||||
|
||||
# Use StaffActionLogger directly with the proper subject
|
||||
StaffActionLogger.new(current_user).log_custom("create_ai_persona", log_details.merge(subject: ai_persona.name))
|
||||
logger.log_creation("persona", ai_persona, ai_persona_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_ai_persona_update(ai_persona, initial_attributes)
|
||||
# Create basic entity details
|
||||
log_details = {
|
||||
persona_id: ai_persona.id,
|
||||
persona_name: ai_persona.name
|
||||
}
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { persona_id: ai_persona.id, subject: ai_persona.name }
|
||||
entity_details[:tools_count] = ai_persona.tools.size if ai_persona.tools.present?
|
||||
|
||||
# Track simple changes
|
||||
%w[name description enabled priority system_prompt default_llm_id temperature top_p
|
||||
vision_enabled vision_max_pixels max_context_posts rag_chunk_tokens rag_chunk_overlap_tokens
|
||||
rag_conversation_chunks rag_llm_model_id question_consolidator_llm_id forced_tool_count
|
||||
allow_chat_channel_mentions allow_chat_direct_messages allow_topic_mentions allow_personal_messages].each do |field|
|
||||
|
||||
if initial_attributes[field] != ai_persona.attributes[field]
|
||||
log_details["#{field}_changed"] = true
|
||||
|
||||
# For large text fields, don't include the values
|
||||
if field == "system_prompt"
|
||||
log_details["#{field}_updated"] = true
|
||||
else
|
||||
# For simple fields, show the before/after values
|
||||
initial_value = initial_attributes[field]
|
||||
current_value = ai_persona.attributes[field]
|
||||
log_details[field] = "#{initial_value} → #{current_value}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Check JSON fields
|
||||
%w[allowed_group_ids tools response_format examples].each do |field|
|
||||
if initial_attributes[field].to_s != ai_persona.attributes[field].to_s
|
||||
log_details["#{field}_changed"] = true
|
||||
end
|
||||
end
|
||||
|
||||
# Only log if there are actual changes
|
||||
if log_details.keys.any? { |k| k.to_s.end_with?("_changed") }
|
||||
# Use StaffActionLogger directly with the proper subject
|
||||
StaffActionLogger.new(current_user).log_custom("update_ai_persona", log_details.merge(subject: ai_persona.name))
|
||||
end
|
||||
logger.log_update(
|
||||
"persona",
|
||||
ai_persona,
|
||||
initial_attributes,
|
||||
ai_persona_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_ai_persona_deletion(persona_details)
|
||||
# Use StaffActionLogger directly with the proper subject
|
||||
StaffActionLogger.new(current_user).log_custom("delete_ai_persona", persona_details.merge(subject: persona_details[:name]))
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
persona_details[:subject] = persona_details[:name]
|
||||
|
||||
logger.log_deletion("persona", persona_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,7 +10,6 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def update
|
||||
# Get the initial settings for logging changes
|
||||
initial_settings = AiModerationSetting.spam
|
||||
initial_custom_instructions = initial_settings&.data&.dig("custom_instructions")
|
||||
initial_llm_model_id = initial_settings&.llm_model_id
|
||||
|
@ -42,7 +41,6 @@ module DiscourseAi
|
|||
AiModerationSetting.create!(updated_params.merge(setting_type: :spam))
|
||||
end
|
||||
|
||||
# Log any changes to custom_instructions or llm_model_id
|
||||
log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, allowed_params)
|
||||
end
|
||||
|
||||
|
@ -122,30 +120,24 @@ module DiscourseAi
|
|||
private
|
||||
|
||||
def log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, params)
|
||||
# Track changes for logging
|
||||
changes_to_log = {}
|
||||
|
||||
# Only track llm_model_id changes when it's in the params AND has actually changed
|
||||
if params.key?(:llm_model_id) && initial_llm_model_id.to_s != params[:llm_model_id].to_s
|
||||
# Get model names for better logging
|
||||
old_model_name = LlmModel.find_by(id: initial_llm_model_id)&.display_name || initial_llm_model_id
|
||||
new_model_name = LlmModel.find_by(id: params[:llm_model_id])&.display_name || params[:llm_model_id]
|
||||
old_model_name =
|
||||
LlmModel.find_by(id: initial_llm_model_id)&.display_name || initial_llm_model_id
|
||||
new_model_name =
|
||||
LlmModel.find_by(id: params[:llm_model_id])&.display_name || params[:llm_model_id]
|
||||
|
||||
changes_to_log[:llm_model_id] = "#{old_model_name} → #{new_model_name}"
|
||||
end
|
||||
|
||||
# Only track custom_instructions changes when it's in the params AND has actually changed
|
||||
if params.key?(:custom_instructions) && initial_custom_instructions != params[:custom_instructions]
|
||||
if params.key?(:custom_instructions) &&
|
||||
initial_custom_instructions != params[:custom_instructions]
|
||||
changes_to_log[:custom_instructions] = params[:custom_instructions]
|
||||
end
|
||||
|
||||
# Log the changes if any were made to llm_model_id or custom_instructions
|
||||
if changes_to_log.present?
|
||||
# Log the changes using StaffActionLogger (without a subject as requested)
|
||||
StaffActionLogger.new(current_user).log_custom(
|
||||
"update_ai_spam_settings",
|
||||
changes_to_log
|
||||
)
|
||||
StaffActionLogger.new(current_user).log_custom("update_ai_spam_settings", changes_to_log)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def update
|
||||
# Capture initial state for logging
|
||||
initial_attributes = @ai_tool.attributes.dup
|
||||
|
||||
if @ai_tool.update(ai_tool_params)
|
||||
|
@ -46,16 +45,15 @@ module DiscourseAi
|
|||
end
|
||||
|
||||
def destroy
|
||||
# Capture tool details for logging before destruction
|
||||
tool_details = {
|
||||
tool_logger_details = {
|
||||
tool_id: @ai_tool.id,
|
||||
name: @ai_tool.name,
|
||||
tool_name: @ai_tool.tool_name,
|
||||
subject: @ai_tool.name # Use name as the subject for AiTools
|
||||
subject: @ai_tool.name,
|
||||
}
|
||||
|
||||
if @ai_tool.destroy
|
||||
log_ai_tool_deletion(tool_details)
|
||||
log_ai_tool_deletion(tool_logger_details)
|
||||
head :no_content
|
||||
else
|
||||
render_json_error @ai_tool
|
||||
|
@ -111,86 +109,58 @@ module DiscourseAi
|
|||
.except(:rag_uploads)
|
||||
end
|
||||
|
||||
def log_ai_tool_creation(ai_tool)
|
||||
# Create log details
|
||||
log_details = {
|
||||
tool_id: ai_tool.id,
|
||||
name: ai_tool.name,
|
||||
tool_name: ai_tool.tool_name,
|
||||
description: ai_tool.description,
|
||||
subject: ai_tool.name # Use name as the subject for AiTools
|
||||
def ai_tool_logger_fields
|
||||
{
|
||||
name: {
|
||||
},
|
||||
tool_name: {
|
||||
},
|
||||
description: {
|
||||
},
|
||||
summary: {
|
||||
},
|
||||
enabled: {
|
||||
},
|
||||
rag_chunk_tokens: {
|
||||
},
|
||||
rag_chunk_overlap_tokens: {
|
||||
},
|
||||
rag_llm_model_id: {
|
||||
},
|
||||
script: {
|
||||
type: :large_text,
|
||||
},
|
||||
parameters: {
|
||||
type: :large_text,
|
||||
},
|
||||
}
|
||||
|
||||
# Add parameter count if available
|
||||
if ai_tool.parameters.present?
|
||||
log_details[:parameter_count] = ai_tool.parameters.size
|
||||
end
|
||||
|
||||
# For sensitive/large fields, don't include the full content
|
||||
if ai_tool.script.present?
|
||||
log_details[:script_size] = ai_tool.script.size
|
||||
end
|
||||
def log_ai_tool_creation(ai_tool)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
|
||||
# Get subject for the log
|
||||
subject = log_details[:subject]
|
||||
entity_details = { tool_id: ai_tool.id, subject: ai_tool.name }
|
||||
entity_details[:parameter_count] = ai_tool.parameters.size if ai_tool.parameters.present?
|
||||
|
||||
# Add subject to the details if present
|
||||
log_details[:subject] = subject if subject.present?
|
||||
|
||||
# Log the action
|
||||
StaffActionLogger.new(current_user).log_custom("create_ai_tool", log_details)
|
||||
logger.log_creation("tool", ai_tool, ai_tool_logger_fields, entity_details)
|
||||
end
|
||||
|
||||
def log_ai_tool_update(ai_tool, initial_attributes)
|
||||
# Create log details
|
||||
log_details = {
|
||||
tool_id: ai_tool.id,
|
||||
name: ai_tool.name,
|
||||
tool_name: ai_tool.tool_name,
|
||||
subject: ai_tool.name # Use name as the subject for AiTools
|
||||
}
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
entity_details = { tool_id: ai_tool.id, subject: ai_tool.name }
|
||||
|
||||
# Track changes in fields
|
||||
changed_fields = []
|
||||
|
||||
# Check for changes in basic fields
|
||||
%w[name tool_name description summary enabled].each do |field|
|
||||
if initial_attributes[field] != ai_tool.attributes[field]
|
||||
changed_fields << field
|
||||
log_details["#{field}_changed"] = true
|
||||
end
|
||||
end
|
||||
|
||||
# Special handling for script (sensitive/large)
|
||||
if initial_attributes['script'] != ai_tool.script
|
||||
changed_fields << 'script'
|
||||
log_details[:script_changed] = true
|
||||
end
|
||||
|
||||
# Special handling for parameters (JSON)
|
||||
if initial_attributes['parameters'].to_s != ai_tool.parameters.to_s
|
||||
changed_fields << 'parameters'
|
||||
log_details[:parameters_changed] = true
|
||||
end
|
||||
|
||||
# Only log if there are actual changes
|
||||
if changed_fields.any?
|
||||
log_details[:changed_fields] = changed_fields
|
||||
|
||||
# Get subject for the log
|
||||
subject = log_details[:subject]
|
||||
|
||||
# Log the action
|
||||
StaffActionLogger.new(current_user).log_custom("update_ai_tool", log_details)
|
||||
end
|
||||
logger.log_update(
|
||||
"tool",
|
||||
ai_tool,
|
||||
initial_attributes,
|
||||
ai_tool_logger_fields,
|
||||
entity_details,
|
||||
)
|
||||
end
|
||||
|
||||
def log_ai_tool_deletion(tool_details)
|
||||
# Get subject for the log
|
||||
subject = tool_details[:subject]
|
||||
|
||||
# Log the action
|
||||
StaffActionLogger.new(current_user).log_custom("delete_ai_tool", tool_details)
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||
logger.log_deletion("tool", tool_details)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,359 +4,337 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
|||
fab!(:admin)
|
||||
fab!(:llm_model)
|
||||
fab!(:ai_persona)
|
||||
fab!(:group) { Fabricate(:group) }
|
||||
fab!(:group)
|
||||
|
||||
subject { described_class.new(admin) }
|
||||
|
||||
describe "#log_creation" do
|
||||
it "logs creation of an entity with field configuration" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Create field configuration
|
||||
field_config = {
|
||||
name: {},
|
||||
provider: {},
|
||||
url: {},
|
||||
api_key: { type: :sensitive }
|
||||
}
|
||||
field_config = { name: {}, provider: {}, url: {}, api_key: { type: :sensitive } }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
# Setup model with sensitive data
|
||||
llm_model.update!(api_key: "secret_key")
|
||||
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => llm_model.name,
|
||||
"provider" => llm_model.provider,
|
||||
"url" => llm_model.url,
|
||||
"api_key" => "[FILTERED]"
|
||||
"api_key" => "[FILTERED]",
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||
end
|
||||
|
||||
it "handles large text fields with type declaration" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Create a persona with a large system prompt
|
||||
large_prompt = "a" * 200
|
||||
ai_persona.update!(system_prompt: large_prompt)
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
persona_id: ai_persona.id,
|
||||
persona_name: ai_persona.name
|
||||
}
|
||||
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
description: {},
|
||||
system_prompt: { type: :large_text }
|
||||
}
|
||||
field_config = { name: {}, description: {}, system_prompt: { type: :large_text } }
|
||||
|
||||
# The log_details should include the truncated system_prompt
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
subject.log_creation("persona", ai_persona, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"create_ai_persona",
|
||||
hash_including(
|
||||
"persona_id" => ai_persona.id,
|
||||
"name" => ai_persona.name,
|
||||
"system_prompt" => an_instance_of(String)
|
||||
)
|
||||
"system_prompt" => an_instance_of(String),
|
||||
),
|
||||
) do |action, details|
|
||||
# Check that system_prompt was truncated
|
||||
expect(details["system_prompt"].length).to be < 200
|
||||
end
|
||||
|
||||
subject.log_creation("persona", ai_persona, field_config, entity_details)
|
||||
end
|
||||
|
||||
it "allows excluding fields from extraction" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
display_name: {},
|
||||
provider: { extract: false }, # Should be excluded
|
||||
url: {}
|
||||
name: {
|
||||
},
|
||||
display_name: {
|
||||
},
|
||||
provider: {
|
||||
extract: false,
|
||||
}, # Should be excluded
|
||||
url: {
|
||||
},
|
||||
}
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => llm_model.name,
|
||||
"display_name" => llm_model.display_name,
|
||||
"url" => llm_model.url
|
||||
)
|
||||
"url" => llm_model.url,
|
||||
),
|
||||
) do |action, details|
|
||||
# Provider should not be present
|
||||
expect(details).not_to have_key("provider")
|
||||
end
|
||||
|
||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_update" do
|
||||
it "handles empty arrays and complex JSON properly" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Setup initial attributes with empty JSON arrays
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"allowed_group_ids" => []
|
||||
}
|
||||
initial_attributes = { "name" => "Old Name", "allowed_group_ids" => [] }
|
||||
|
||||
# Update with complex JSON
|
||||
ai_persona.update!(
|
||||
name: "New Name",
|
||||
allowed_group_ids: [group.id, 999]
|
||||
)
|
||||
ai_persona.update!(name: "New Name", allowed_group_ids: [group.id, 999])
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
json_fields: %w[allowed_group_ids]
|
||||
}
|
||||
field_config = { name: {}, json_fields: %w[allowed_group_ids] }
|
||||
|
||||
# It should log that allowed_group_ids was updated without showing the exact diff
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
# Create entity details
|
||||
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||
|
||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_persona",
|
||||
hash_including(
|
||||
"persona_id" => ai_persona.id,
|
||||
"persona_name" => ai_persona.name,
|
||||
"name" => "Old Name → New Name",
|
||||
"allowed_group_ids" => "updated"
|
||||
"allowed_group_ids" => "updated",
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
persona_id: ai_persona.id,
|
||||
persona_name: ai_persona.name
|
||||
}
|
||||
|
||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||
end
|
||||
|
||||
it "logs changes to attributes based on field configuration" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"display_name" => "Old Display Name",
|
||||
"provider" => "open_ai",
|
||||
"api_key" => "old_secret"
|
||||
"api_key" => "old_secret",
|
||||
}
|
||||
|
||||
llm_model.update!(
|
||||
name: "New Name",
|
||||
display_name: "New Display Name",
|
||||
provider: "anthropic",
|
||||
api_key: "new_secret"
|
||||
api_key: "new_secret",
|
||||
)
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
display_name: {},
|
||||
provider: {},
|
||||
api_key: { type: :sensitive }
|
||||
}
|
||||
field_config = { name: {}, display_name: {}, provider: {}, api_key: { type: :sensitive } }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
# It should include changes with appropriate handling for sensitive data
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => "Old Name → New Name",
|
||||
"display_name" => "Old Display Name → New Display Name",
|
||||
"provider" => "open_ai → anthropic",
|
||||
"api_key" => "updated" # Not showing actual values
|
||||
"api_key" => "updated", # Not showing actual values
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
end
|
||||
|
||||
it "doesn't log when there are no changes" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
initial_attributes = {
|
||||
"name" => llm_model.name,
|
||||
"display_name" => llm_model.display_name,
|
||||
"provider" => llm_model.provider
|
||||
"provider" => llm_model.provider,
|
||||
}
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
display_name: {},
|
||||
provider: {}
|
||||
}
|
||||
|
||||
# It should not call log_custom since there are no changes
|
||||
expect(staff_action_logger).not_to receive(:log_custom)
|
||||
field_config = { name: {}, display_name: {}, provider: {} }
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify log_custom was not called
|
||||
expect(staff_action_logger).not_to have_received(:log_custom)
|
||||
end
|
||||
|
||||
it "handles fields marked as not to be tracked" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"display_name" => "Old Display Name",
|
||||
"provider" => "open_ai"
|
||||
"provider" => "open_ai",
|
||||
}
|
||||
|
||||
llm_model.update!(
|
||||
name: "New Name",
|
||||
display_name: "New Display Name",
|
||||
provider: "anthropic"
|
||||
)
|
||||
llm_model.update!(name: "New Name", display_name: "New Display Name", provider: "anthropic")
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
display_name: {},
|
||||
provider: { track: false } # Should not be tracked even though it changed
|
||||
name: {
|
||||
},
|
||||
display_name: {
|
||||
},
|
||||
provider: {
|
||||
track: false,
|
||||
}, # Should not be tracked even though it changed
|
||||
}
|
||||
|
||||
# Provider should not appear in the logged changes
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => "Old Name → New Name",
|
||||
"display_name" => "Old Display Name → New Display Name"
|
||||
)
|
||||
) do |action, details|
|
||||
expect(details).not_to have_key("provider")
|
||||
end
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Provider should not appear in the logged changes
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"name" => "Old Name → New Name",
|
||||
"display_name" => "Old Display Name → New Display Name",
|
||||
),
|
||||
) do |action, details|
|
||||
expect(details).not_to have_key("provider")
|
||||
end
|
||||
end
|
||||
|
||||
it "handles json fields properly" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
# Setup initial attributes with JSON fields
|
||||
initial_attributes = {
|
||||
"name" => "Old Name",
|
||||
"tools" => [["search", { "base_query" => "test" }, true]]
|
||||
"tools" => [["search", { "base_query" => "test" }, true]],
|
||||
}
|
||||
|
||||
# Update with different JSON
|
||||
ai_persona.update!(
|
||||
name: "New Name",
|
||||
tools: [["search", { "base_query" => "updated" }, true], ["categories", {}, false]]
|
||||
tools: [["search", { "base_query" => "updated" }, true], ["categories", {}, false]],
|
||||
)
|
||||
|
||||
field_config = {
|
||||
name: {},
|
||||
json_fields: %w[tools]
|
||||
}
|
||||
field_config = { name: {}, json_fields: %w[tools] }
|
||||
|
||||
# It should log that tools was updated without showing the exact diff
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
# Create entity details
|
||||
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||
|
||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"update_ai_persona",
|
||||
hash_including(
|
||||
"persona_id" => ai_persona.id,
|
||||
"persona_name" => ai_persona.name,
|
||||
"name" => "Old Name → New Name",
|
||||
"tools" => "updated"
|
||||
"tools" => "updated",
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Create entity details
|
||||
entity_details = {
|
||||
persona_id: ai_persona.id,
|
||||
persona_name: ai_persona.name
|
||||
}
|
||||
|
||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_deletion" do
|
||||
it "logs deletion with the correct entity type" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
details = {
|
||||
model_id: llm_model.id,
|
||||
display_name: llm_model.display_name,
|
||||
name: llm_model.name
|
||||
name: llm_model.name,
|
||||
}
|
||||
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
subject.log_deletion("llm_model", details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"delete_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => details[:model_id],
|
||||
"display_name" => details[:display_name],
|
||||
"name" => details[:name]
|
||||
"name" => details[:name],
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
subject.log_deletion("llm_model", details)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_custom" do
|
||||
it "delegates to StaffActionLogger#log_custom" do
|
||||
staff_action_logger = instance_double(StaffActionLogger)
|
||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||
allow(staff_action_logger).to receive(:log_custom)
|
||||
|
||||
details = { key: "value" }
|
||||
|
||||
expect(staff_action_logger).to receive(:log_custom).with(
|
||||
"custom_action_type",
|
||||
hash_including("key" => details[:key])
|
||||
)
|
||||
|
||||
subject.log_custom("custom_action_type", details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||
"custom_action_type",
|
||||
hash_including("key" => details[:key]),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -368,12 +346,10 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
|||
end
|
||||
|
||||
it "handles quota changes in log_llm_model_creation" do
|
||||
expect_any_instance_of(StaffActionLogger).to receive(:log_custom) do |_, action, details|
|
||||
expect(action).to eq("create_ai_llm_model")
|
||||
expect(details).to include("model_id", "model_name", "display_name")
|
||||
expect(details["quotas"]).to include("Group #{group.id}")
|
||||
expect(details["quotas"]).to include("1000 tokens")
|
||||
end
|
||||
# Setup
|
||||
staff_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||
allow(staff_logger).to receive(:log_custom)
|
||||
|
||||
# Call the method directly as it would be called from the controller
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||
|
@ -383,18 +359,35 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
|||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
log_details = entity_details.dup
|
||||
log_details.merge!(logger.send(:extract_entity_attributes, llm_model, field_config))
|
||||
|
||||
# Add quota information as a special case
|
||||
log_details[:quotas] = llm_model.llm_quotas.map do |quota|
|
||||
log_details[:quotas] = llm_model
|
||||
.llm_quotas
|
||||
.map do |quota|
|
||||
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
||||
end.join("; ")
|
||||
end
|
||||
.join("; ")
|
||||
|
||||
logger.log_custom("create_ai_llm_model", log_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"model_name" => llm_model.name,
|
||||
"display_name" => llm_model.display_name,
|
||||
),
|
||||
)
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"create_ai_llm_model",
|
||||
hash_including("quotas" => a_string_including("Group #{group.id}", "1000 tokens")),
|
||||
)
|
||||
end
|
||||
|
||||
it "handles quota changes in log_llm_model_update" do
|
||||
|
@ -404,12 +397,10 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
|||
@quota.update!(max_tokens: 2000)
|
||||
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
||||
|
||||
expect_any_instance_of(StaffActionLogger).to receive(:log_custom) do |_, action, details|
|
||||
expect(action).to eq("update_ai_llm_model")
|
||||
expect(details).to include("model_id", "quotas")
|
||||
expect(details["quotas"]).to include("1000 tokens")
|
||||
expect(details["quotas"]).to include("2000 tokens")
|
||||
end
|
||||
# Setup
|
||||
staff_logger = instance_double(StaffActionLogger)
|
||||
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||
allow(staff_logger).to receive(:log_custom)
|
||||
|
||||
# Simulate the special quota handling in the controller
|
||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||
|
@ -417,8 +408,14 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
|||
|
||||
# Track quota changes separately as they're a special case
|
||||
if initial_quotas != current_quotas
|
||||
initial_quota_summary = initial_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
||||
current_quota_summary = current_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
||||
initial_quota_summary =
|
||||
initial_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
current_quota_summary =
|
||||
current_quotas
|
||||
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||
.join("; ")
|
||||
changes[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||
end
|
||||
|
||||
|
@ -426,11 +423,20 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
|||
entity_details = {
|
||||
model_id: llm_model.id,
|
||||
model_name: llm_model.name,
|
||||
display_name: llm_model.display_name
|
||||
display_name: llm_model.display_name,
|
||||
}
|
||||
|
||||
log_details = entity_details.dup.merge(changes)
|
||||
logger.log_custom("update_ai_llm_model", log_details)
|
||||
|
||||
# Verify with have_received
|
||||
expect(staff_logger).to have_received(:log_custom).with(
|
||||
"update_ai_llm_model",
|
||||
hash_including(
|
||||
"model_id" => llm_model.id,
|
||||
"quotas" => a_string_including("1000 tokens", "2000 tokens"),
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -125,69 +125,82 @@ RSpec.describe DiscourseAi::Admin::AiSpamController do
|
|||
params: {
|
||||
is_enabled: true,
|
||||
llm_model_id: llm_model.id,
|
||||
custom_instructions: "updated instructions"
|
||||
custom_instructions: "updated instructions",
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").last
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq("AI Spam Detection")
|
||||
expect(history.details).to include("custom_instructions_changed")
|
||||
end
|
||||
|
||||
it "logs staff action when llm_model_id changes" do
|
||||
# Create another model to change to
|
||||
new_llm_model = Fabricate(:llm_model, name: "New Test Model", display_name: "New Test Model")
|
||||
new_llm_model =
|
||||
Fabricate(:llm_model, name: "New Test Model", display_name: "New Test Model")
|
||||
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
||||
params: {
|
||||
llm_model_id: new_llm_model.id
|
||||
}
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json", params: { llm_model_id: new_llm_model.id }
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify the log was created with the right subject
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").last
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq("AI Spam Detection")
|
||||
expect(history.details).to include("llm_model_id")
|
||||
end
|
||||
|
||||
it "does not log staff action when only is_enabled changes" do
|
||||
# Check initial count of logs
|
||||
initial_count = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").count
|
||||
initial_count =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).count
|
||||
|
||||
# Update only the is_enabled setting
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
||||
params: {
|
||||
is_enabled: false
|
||||
}
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json", params: { is_enabled: false }
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify no new log was created
|
||||
current_count = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").count
|
||||
current_count =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).count
|
||||
expect(current_count).to eq(initial_count)
|
||||
end
|
||||
|
||||
it "logs both custom_instructions and llm_model_id changes in one entry" do
|
||||
# Create another model to change to
|
||||
new_llm_model = Fabricate(:llm_model, name: "Another Test Model", display_name: "Another Test Model")
|
||||
new_llm_model =
|
||||
Fabricate(:llm_model, name: "Another Test Model", display_name: "Another Test Model")
|
||||
|
||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
||||
params: {
|
||||
llm_model_id: new_llm_model.id,
|
||||
custom_instructions: "new instructions for both changes"
|
||||
custom_instructions: "new instructions for both changes",
|
||||
}
|
||||
|
||||
expect(response.status).to eq(200)
|
||||
|
||||
# Verify the log was created with all changes
|
||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").last
|
||||
history =
|
||||
UserHistory.where(
|
||||
action: UserHistory.actions[:custom_staff],
|
||||
custom_type: "update_ai_spam_settings",
|
||||
).last
|
||||
expect(history).to be_present
|
||||
expect(history.subject).to eq("AI Spam Detection")
|
||||
expect(history.details).to include("llm_model_id")
|
||||
expect(history.details).to include("custom_instructions_changed")
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue