DEV: updates...
This commit is contained in:
parent
c3cd5d65da
commit
3009292d1b
|
@ -56,9 +56,8 @@ module DiscourseAi
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Capture initial state for logging
|
|
||||||
initial_attributes = embedding_def.attributes.dup
|
initial_attributes = embedding_def.attributes.dup
|
||||||
|
|
||||||
if embedding_def.update(ai_embeddings_params.except(:dimensions))
|
if embedding_def.update(ai_embeddings_params.except(:dimensions))
|
||||||
log_ai_embedding_update(embedding_def, initial_attributes)
|
log_ai_embedding_update(embedding_def, initial_attributes)
|
||||||
render json: AiEmbeddingDefinitionSerializer.new(embedding_def)
|
render json: AiEmbeddingDefinitionSerializer.new(embedding_def)
|
||||||
|
@ -80,15 +79,14 @@ module DiscourseAi
|
||||||
return render_json_error(I18n.t("discourse_ai.embeddings.delete_failed"), status: 409)
|
return render_json_error(I18n.t("discourse_ai.embeddings.delete_failed"), status: 409)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Capture embedding details for logging before destruction
|
|
||||||
embedding_details = {
|
embedding_details = {
|
||||||
embedding_id: embedding_def.id,
|
embedding_id: embedding_def.id,
|
||||||
display_name: embedding_def.display_name,
|
display_name: embedding_def.display_name,
|
||||||
provider: embedding_def.provider,
|
provider: embedding_def.provider,
|
||||||
dimensions: embedding_def.dimensions,
|
dimensions: embedding_def.dimensions,
|
||||||
subject: embedding_def.display_name # Use display_name as the subject for EmbeddingDefinition
|
subject: embedding_def.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
if embedding_def.destroy
|
if embedding_def.destroy
|
||||||
log_ai_embedding_deletion(embedding_details)
|
log_ai_embedding_deletion(embedding_details)
|
||||||
head :no_content
|
head :no_content
|
||||||
|
@ -143,88 +141,56 @@ module DiscourseAi
|
||||||
|
|
||||||
permitted
|
permitted
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def ai_embeddings_logger_fields
|
||||||
|
{
|
||||||
|
display_name: {
|
||||||
|
},
|
||||||
|
provider: {
|
||||||
|
},
|
||||||
|
url: {
|
||||||
|
},
|
||||||
|
tokenizer_class: {
|
||||||
|
},
|
||||||
|
max_sequence_length: {
|
||||||
|
},
|
||||||
|
embed_prompt: {
|
||||||
|
type: :large_text,
|
||||||
|
},
|
||||||
|
search_prompt: {
|
||||||
|
type: :large_text,
|
||||||
|
},
|
||||||
|
matryoshka_dimensions: {
|
||||||
|
},
|
||||||
|
api_key: {
|
||||||
|
type: :sensitive,
|
||||||
|
},
|
||||||
|
# JSON fields should be tracked as simple changes
|
||||||
|
json_fields: [:provider_params],
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
def log_ai_embedding_creation(embedding_def)
|
def log_ai_embedding_creation(embedding_def)
|
||||||
# Create log details
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details = {
|
entity_details = { embedding_id: embedding_def.id, subject: embedding_def.display_name }
|
||||||
embedding_id: embedding_def.id,
|
logger.log_creation("embedding", embedding_def, ai_embeddings_logger_fields, entity_details)
|
||||||
display_name: embedding_def.display_name,
|
|
||||||
provider: embedding_def.provider,
|
|
||||||
dimensions: embedding_def.dimensions,
|
|
||||||
subject: embedding_def.display_name # Use display_name as the subject for EmbeddingDefinition
|
|
||||||
}
|
|
||||||
|
|
||||||
# Only include tokenizer if present
|
|
||||||
if embedding_def.tokenizer_class.present?
|
|
||||||
log_details[:tokenizer] = embedding_def.tokenizer_class
|
|
||||||
end
|
|
||||||
|
|
||||||
# For sensitive fields, don't include the actual content
|
|
||||||
if embedding_def.api_key.present?
|
|
||||||
log_details[:api_key_set] = true
|
|
||||||
end
|
|
||||||
|
|
||||||
# Get subject for the log
|
|
||||||
subject = log_details[:subject]
|
|
||||||
|
|
||||||
# Log the action
|
|
||||||
StaffActionLogger.new(current_user).log_custom("create_ai_embedding", log_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_embedding_update(embedding_def, initial_attributes)
|
def log_ai_embedding_update(embedding_def, initial_attributes)
|
||||||
# Create log details
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details = {
|
entity_details = { embedding_id: embedding_def.id, subject: embedding_def.display_name }
|
||||||
embedding_id: embedding_def.id,
|
logger.log_update(
|
||||||
display_name: embedding_def.display_name,
|
"embedding",
|
||||||
subject: embedding_def.display_name # Use display_name as the subject for EmbeddingDefinition
|
embedding_def,
|
||||||
}
|
initial_attributes,
|
||||||
|
ai_embeddings_logger_fields,
|
||||||
# Track changes in fields
|
entity_details,
|
||||||
changed_fields = []
|
)
|
||||||
|
|
||||||
# Fields to check for changes
|
|
||||||
%w[display_name provider url tokenizer_class max_sequence_length embed_prompt search_prompt matryoshka_dimensions].each do |field|
|
|
||||||
if initial_attributes[field] != embedding_def.attributes[field]
|
|
||||||
changed_fields << field
|
|
||||||
log_details["#{field}_changed"] = true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Special handling for API key (sensitive)
|
|
||||||
if initial_attributes['api_key'].present? != embedding_def.api_key.present?
|
|
||||||
changed_fields << 'api_key'
|
|
||||||
|
|
||||||
if embedding_def.api_key.present?
|
|
||||||
log_details[:api_key_set] = true
|
|
||||||
else
|
|
||||||
log_details[:api_key_removed] = true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Special handling for provider_params (JSON)
|
|
||||||
if initial_attributes['provider_params'].to_s != embedding_def.provider_params.to_s
|
|
||||||
changed_fields << 'provider_params'
|
|
||||||
log_details[:provider_params_changed] = true
|
|
||||||
end
|
|
||||||
|
|
||||||
# Only log if there are actual changes
|
|
||||||
if changed_fields.any?
|
|
||||||
log_details[:changed_fields] = changed_fields
|
|
||||||
|
|
||||||
# Get subject for the log
|
|
||||||
subject = log_details[:subject]
|
|
||||||
|
|
||||||
# Log the action
|
|
||||||
StaffActionLogger.new(current_user).log_custom("update_ai_embedding", log_details)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_embedding_deletion(embedding_details)
|
def log_ai_embedding_deletion(embedding_details)
|
||||||
# Get subject for the log (but keep it in the details hash)
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
subject = embedding_details[:subject]
|
logger.log_deletion("embedding", embedding_details)
|
||||||
|
|
||||||
# Log the action
|
|
||||||
StaffActionLogger.new(current_user).log_custom("delete_ai_embedding", embedding_details)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -120,7 +120,7 @@ module DiscourseAi
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
display_name: llm_model.display_name,
|
display_name: llm_model.display_name,
|
||||||
name: llm_model.name,
|
name: llm_model.name,
|
||||||
provider: llm_model.provider
|
provider: llm_model.provider,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Clean up companion users
|
# Clean up companion users
|
||||||
|
@ -206,97 +206,87 @@ module DiscourseAi
|
||||||
permitted
|
permitted
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_llm_model_creation(llm_model)
|
def ai_llm_logger_fields
|
||||||
# Create basic entity details with important attributes
|
{
|
||||||
log_details = {
|
display_name: {
|
||||||
model_id: llm_model.id,
|
},
|
||||||
model_name: llm_model.name,
|
name: {
|
||||||
display_name: llm_model.display_name,
|
},
|
||||||
provider: llm_model.provider,
|
provider: {
|
||||||
tokenizer: llm_model.tokenizer
|
},
|
||||||
|
tokenizer: {
|
||||||
|
},
|
||||||
|
url: {
|
||||||
|
},
|
||||||
|
max_prompt_tokens: {
|
||||||
|
},
|
||||||
|
max_output_tokens: {
|
||||||
|
},
|
||||||
|
enabled_chat_bot: {
|
||||||
|
},
|
||||||
|
vision_enabled: {
|
||||||
|
},
|
||||||
|
api_key: {
|
||||||
|
type: :sensitive,
|
||||||
|
},
|
||||||
|
input_cost: {
|
||||||
|
},
|
||||||
|
output_cost: {
|
||||||
|
},
|
||||||
|
# JSON fields should be tracked as simple changes
|
||||||
|
json_fields: [:provider_params],
|
||||||
}
|
}
|
||||||
|
end
|
||||||
# Add API key indication if present (without showing the key)
|
|
||||||
if llm_model.api_key.present?
|
def log_llm_model_creation(llm_model)
|
||||||
log_details[:api_key_set] = true
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
end
|
entity_details = { model_id: llm_model.id, subject: llm_model.display_name }
|
||||||
|
|
||||||
# Add token limits if set
|
|
||||||
if llm_model.max_prompt_tokens.present?
|
|
||||||
log_details[:max_prompt_tokens] = llm_model.max_prompt_tokens
|
|
||||||
end
|
|
||||||
|
|
||||||
if llm_model.max_output_tokens.present?
|
|
||||||
log_details[:max_output_tokens] = llm_model.max_output_tokens
|
|
||||||
end
|
|
||||||
|
|
||||||
# Add quota information as a special case
|
# Add quota information as a special case
|
||||||
if llm_model.llm_quotas.any?
|
if llm_model.llm_quotas.any?
|
||||||
log_details[:quotas] = llm_model.llm_quotas.map do |quota|
|
entity_details[:quotas] = llm_model
|
||||||
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
.llm_quotas
|
||||||
end.join("; ")
|
.map do |quota|
|
||||||
|
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
||||||
|
end
|
||||||
|
.join("; ")
|
||||||
end
|
end
|
||||||
|
|
||||||
# Use StaffActionLogger directly with the proper subject
|
logger.log_creation("llm_model", llm_model, ai_llm_logger_fields, entity_details)
|
||||||
StaffActionLogger.new(current_user).log_custom("create_ai_llm_model", log_details.merge(subject: llm_model.display_name))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_llm_model_update(llm_model, initial_attributes, initial_quotas)
|
def log_llm_model_update(llm_model, initial_attributes, initial_quotas)
|
||||||
# Create basic entity details
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details = {
|
entity_details = { model_id: llm_model.id, subject: llm_model.display_name }
|
||||||
model_id: llm_model.id,
|
|
||||||
model_name: llm_model.name,
|
|
||||||
display_name: llm_model.display_name
|
|
||||||
}
|
|
||||||
|
|
||||||
# Track simple changes
|
|
||||||
%w[display_name name provider tokenizer url max_prompt_tokens max_output_tokens enabled_chat_bot vision_enabled].each do |field|
|
|
||||||
if initial_attributes[field] != llm_model.attributes[field]
|
|
||||||
log_details["#{field}_changed"] = true
|
|
||||||
|
|
||||||
# For simple fields, show the before/after values
|
|
||||||
unless field == "api_key" # Skip sensitive fields
|
|
||||||
initial_value = initial_attributes[field]
|
|
||||||
current_value = llm_model.attributes[field]
|
|
||||||
log_details[field] = "#{initial_value} → #{current_value}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Handle API key changes specially
|
|
||||||
if initial_attributes["api_key"].present? != llm_model.api_key.present?
|
|
||||||
log_details[:api_key_changed] = true
|
|
||||||
if llm_model.api_key.present?
|
|
||||||
log_details[:api_key_status] = "set"
|
|
||||||
else
|
|
||||||
log_details[:api_key_status] = "removed"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Track quota changes separately as they're a special case
|
# Track quota changes separately as they're a special case
|
||||||
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
||||||
if initial_quotas != current_quotas
|
if initial_quotas != current_quotas
|
||||||
initial_quota_summary = initial_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
initial_quota_summary =
|
||||||
current_quota_summary = current_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
initial_quotas
|
||||||
log_details[:quotas_changed] = true
|
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||||
log_details[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
.join("; ")
|
||||||
end
|
current_quota_summary =
|
||||||
|
current_quotas
|
||||||
# Check JSON fields like provider_params
|
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||||
if initial_attributes["provider_params"].to_s != llm_model.attributes["provider_params"].to_s
|
.join("; ")
|
||||||
log_details[:provider_params_changed] = true
|
entity_details[:quotas_changed] = true
|
||||||
end
|
entity_details[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||||
|
|
||||||
# Only log if there are actual changes
|
|
||||||
if log_details.keys.any? { |k| k.to_s.end_with?("_changed") }
|
|
||||||
# Use StaffActionLogger directly with the proper subject
|
|
||||||
StaffActionLogger.new(current_user).log_custom("update_ai_llm_model", log_details.merge(subject: llm_model.display_name))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
logger.log_update(
|
||||||
|
"llm_model",
|
||||||
|
llm_model,
|
||||||
|
initial_attributes,
|
||||||
|
ai_llm_logger_fields,
|
||||||
|
entity_details,
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_llm_model_deletion(model_details)
|
def log_llm_model_deletion(model_details)
|
||||||
# Use StaffActionLogger directly with the proper subject
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
StaffActionLogger.new(current_user).log_custom("delete_ai_llm_model", model_details.merge(subject: model_details[:display_name]))
|
model_details[:subject] = model_details[:display_name]
|
||||||
|
logger.log_deletion("llm_model", model_details)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -75,7 +75,6 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
# Capture initial state for logging
|
|
||||||
initial_attributes = @ai_persona.attributes.dup
|
initial_attributes = @ai_persona.attributes.dup
|
||||||
|
|
||||||
if @ai_persona.update(ai_persona_params.except(:rag_uploads))
|
if @ai_persona.update(ai_persona_params.except(:rag_uploads))
|
||||||
|
@ -89,11 +88,10 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def destroy
|
def destroy
|
||||||
# Capture persona details for logging before destruction
|
|
||||||
persona_details = {
|
persona_details = {
|
||||||
persona_id: @ai_persona.id,
|
persona_id: @ai_persona.id,
|
||||||
name: @ai_persona.name,
|
name: @ai_persona.name,
|
||||||
description: @ai_persona.description
|
description: @ai_persona.description,
|
||||||
}
|
}
|
||||||
|
|
||||||
if @ai_persona.destroy
|
if @ai_persona.destroy
|
||||||
|
@ -275,84 +273,90 @@ module DiscourseAi
|
||||||
examples.map { |example_arr| example_arr.take(2).map(&:to_s) }
|
examples.map { |example_arr| example_arr.take(2).map(&:to_s) }
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_persona_creation(ai_persona)
|
def ai_persona_logger_fields
|
||||||
# Create basic entity details with important attributes
|
{
|
||||||
log_details = {
|
name: {
|
||||||
persona_id: ai_persona.id,
|
},
|
||||||
persona_name: ai_persona.name,
|
description: {
|
||||||
description: ai_persona.description
|
},
|
||||||
|
enabled: {
|
||||||
|
},
|
||||||
|
priority: {
|
||||||
|
},
|
||||||
|
system_prompt: {
|
||||||
|
type: :large_text,
|
||||||
|
},
|
||||||
|
default_llm_id: {
|
||||||
|
},
|
||||||
|
temperature: {
|
||||||
|
},
|
||||||
|
top_p: {
|
||||||
|
},
|
||||||
|
user_id: {
|
||||||
|
},
|
||||||
|
max_context_posts: {
|
||||||
|
},
|
||||||
|
vision_enabled: {
|
||||||
|
},
|
||||||
|
vision_max_pixels: {
|
||||||
|
},
|
||||||
|
rag_chunk_tokens: {
|
||||||
|
},
|
||||||
|
rag_chunk_overlap_tokens: {
|
||||||
|
},
|
||||||
|
rag_conversation_chunks: {
|
||||||
|
},
|
||||||
|
rag_llm_model_id: {
|
||||||
|
},
|
||||||
|
question_consolidator_llm_id: {
|
||||||
|
},
|
||||||
|
allow_chat_channel_mentions: {
|
||||||
|
},
|
||||||
|
allow_chat_direct_messages: {
|
||||||
|
},
|
||||||
|
allow_topic_mentions: {
|
||||||
|
},
|
||||||
|
allow_personal_messages: {
|
||||||
|
},
|
||||||
|
tool_details: {
|
||||||
|
type: :large_text,
|
||||||
|
},
|
||||||
|
forced_tool_count: {
|
||||||
|
},
|
||||||
|
force_default_llm: {
|
||||||
|
},
|
||||||
|
# JSON fields
|
||||||
|
json_fields: %i[tools response_format examples allowed_group_ids],
|
||||||
}
|
}
|
||||||
|
end
|
||||||
# Include basic configuration fields if present
|
|
||||||
%w[enabled priority temperature top_p default_llm_id].each do |field|
|
def log_ai_persona_creation(ai_persona)
|
||||||
value = ai_persona.send(field) if ai_persona.respond_to?(field)
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details[field] = value if value.present?
|
entity_details = { persona_id: ai_persona.id, subject: ai_persona.name }
|
||||||
end
|
entity_details[:tools_count] = (ai_persona.tools || []).size
|
||||||
|
|
||||||
# Handle system prompt specially (truncate if too long)
|
logger.log_creation("persona", ai_persona, ai_persona_logger_fields, entity_details)
|
||||||
if ai_persona.system_prompt.present?
|
|
||||||
if ai_persona.system_prompt.length > 100
|
|
||||||
log_details[:system_prompt] = ai_persona.system_prompt.truncate(100)
|
|
||||||
else
|
|
||||||
log_details[:system_prompt] = ai_persona.system_prompt
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Add tools count separately as it's not a direct attribute
|
|
||||||
log_details[:tools_count] = (ai_persona.tools || []).size
|
|
||||||
|
|
||||||
# Add allowed_group_ids
|
|
||||||
log_details[:allowed_group_ids] = ai_persona.allowed_group_ids if ai_persona.allowed_group_ids.present?
|
|
||||||
|
|
||||||
# Use StaffActionLogger directly with the proper subject
|
|
||||||
StaffActionLogger.new(current_user).log_custom("create_ai_persona", log_details.merge(subject: ai_persona.name))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_persona_update(ai_persona, initial_attributes)
|
def log_ai_persona_update(ai_persona, initial_attributes)
|
||||||
# Create basic entity details
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details = {
|
entity_details = { persona_id: ai_persona.id, subject: ai_persona.name }
|
||||||
persona_id: ai_persona.id,
|
entity_details[:tools_count] = ai_persona.tools.size if ai_persona.tools.present?
|
||||||
persona_name: ai_persona.name
|
|
||||||
}
|
logger.log_update(
|
||||||
|
"persona",
|
||||||
# Track simple changes
|
ai_persona,
|
||||||
%w[name description enabled priority system_prompt default_llm_id temperature top_p
|
initial_attributes,
|
||||||
vision_enabled vision_max_pixels max_context_posts rag_chunk_tokens rag_chunk_overlap_tokens
|
ai_persona_logger_fields,
|
||||||
rag_conversation_chunks rag_llm_model_id question_consolidator_llm_id forced_tool_count
|
entity_details,
|
||||||
allow_chat_channel_mentions allow_chat_direct_messages allow_topic_mentions allow_personal_messages].each do |field|
|
)
|
||||||
|
|
||||||
if initial_attributes[field] != ai_persona.attributes[field]
|
|
||||||
log_details["#{field}_changed"] = true
|
|
||||||
|
|
||||||
# For large text fields, don't include the values
|
|
||||||
if field == "system_prompt"
|
|
||||||
log_details["#{field}_updated"] = true
|
|
||||||
else
|
|
||||||
# For simple fields, show the before/after values
|
|
||||||
initial_value = initial_attributes[field]
|
|
||||||
current_value = ai_persona.attributes[field]
|
|
||||||
log_details[field] = "#{initial_value} → #{current_value}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Check JSON fields
|
|
||||||
%w[allowed_group_ids tools response_format examples].each do |field|
|
|
||||||
if initial_attributes[field].to_s != ai_persona.attributes[field].to_s
|
|
||||||
log_details["#{field}_changed"] = true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Only log if there are actual changes
|
|
||||||
if log_details.keys.any? { |k| k.to_s.end_with?("_changed") }
|
|
||||||
# Use StaffActionLogger directly with the proper subject
|
|
||||||
StaffActionLogger.new(current_user).log_custom("update_ai_persona", log_details.merge(subject: ai_persona.name))
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_persona_deletion(persona_details)
|
def log_ai_persona_deletion(persona_details)
|
||||||
# Use StaffActionLogger directly with the proper subject
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
StaffActionLogger.new(current_user).log_custom("delete_ai_persona", persona_details.merge(subject: persona_details[:name]))
|
persona_details[:subject] = persona_details[:name]
|
||||||
|
|
||||||
|
logger.log_deletion("persona", persona_details)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,11 +10,10 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
# Get the initial settings for logging changes
|
|
||||||
initial_settings = AiModerationSetting.spam
|
initial_settings = AiModerationSetting.spam
|
||||||
initial_custom_instructions = initial_settings&.data&.dig("custom_instructions")
|
initial_custom_instructions = initial_settings&.data&.dig("custom_instructions")
|
||||||
initial_llm_model_id = initial_settings&.llm_model_id
|
initial_llm_model_id = initial_settings&.llm_model_id
|
||||||
|
|
||||||
updated_params = {}
|
updated_params = {}
|
||||||
if allowed_params.key?(:llm_model_id)
|
if allowed_params.key?(:llm_model_id)
|
||||||
llm_model_id = updated_params[:llm_model_id] = allowed_params[:llm_model_id]
|
llm_model_id = updated_params[:llm_model_id] = allowed_params[:llm_model_id]
|
||||||
|
@ -41,8 +40,7 @@ module DiscourseAi
|
||||||
else
|
else
|
||||||
AiModerationSetting.create!(updated_params.merge(setting_type: :spam))
|
AiModerationSetting.create!(updated_params.merge(setting_type: :spam))
|
||||||
end
|
end
|
||||||
|
|
||||||
# Log any changes to custom_instructions or llm_model_id
|
|
||||||
log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, allowed_params)
|
log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, allowed_params)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -120,32 +118,26 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, params)
|
def log_ai_spam_update(initial_llm_model_id, initial_custom_instructions, params)
|
||||||
# Track changes for logging
|
|
||||||
changes_to_log = {}
|
changes_to_log = {}
|
||||||
|
|
||||||
# Only track llm_model_id changes when it's in the params AND has actually changed
|
|
||||||
if params.key?(:llm_model_id) && initial_llm_model_id.to_s != params[:llm_model_id].to_s
|
if params.key?(:llm_model_id) && initial_llm_model_id.to_s != params[:llm_model_id].to_s
|
||||||
# Get model names for better logging
|
old_model_name =
|
||||||
old_model_name = LlmModel.find_by(id: initial_llm_model_id)&.display_name || initial_llm_model_id
|
LlmModel.find_by(id: initial_llm_model_id)&.display_name || initial_llm_model_id
|
||||||
new_model_name = LlmModel.find_by(id: params[:llm_model_id])&.display_name || params[:llm_model_id]
|
new_model_name =
|
||||||
|
LlmModel.find_by(id: params[:llm_model_id])&.display_name || params[:llm_model_id]
|
||||||
|
|
||||||
changes_to_log[:llm_model_id] = "#{old_model_name} → #{new_model_name}"
|
changes_to_log[:llm_model_id] = "#{old_model_name} → #{new_model_name}"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Only track custom_instructions changes when it's in the params AND has actually changed
|
if params.key?(:custom_instructions) &&
|
||||||
if params.key?(:custom_instructions) && initial_custom_instructions != params[:custom_instructions]
|
initial_custom_instructions != params[:custom_instructions]
|
||||||
changes_to_log[:custom_instructions] = params[:custom_instructions]
|
changes_to_log[:custom_instructions] = params[:custom_instructions]
|
||||||
end
|
end
|
||||||
|
|
||||||
# Log the changes if any were made to llm_model_id or custom_instructions
|
|
||||||
if changes_to_log.present?
|
if changes_to_log.present?
|
||||||
# Log the changes using StaffActionLogger (without a subject as requested)
|
StaffActionLogger.new(current_user).log_custom("update_ai_spam_settings", changes_to_log)
|
||||||
StaffActionLogger.new(current_user).log_custom(
|
|
||||||
"update_ai_spam_settings",
|
|
||||||
changes_to_log
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -170,4 +162,4 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -33,9 +33,8 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def update
|
def update
|
||||||
# Capture initial state for logging
|
|
||||||
initial_attributes = @ai_tool.attributes.dup
|
initial_attributes = @ai_tool.attributes.dup
|
||||||
|
|
||||||
if @ai_tool.update(ai_tool_params)
|
if @ai_tool.update(ai_tool_params)
|
||||||
RagDocumentFragment.update_target_uploads(@ai_tool, attached_upload_ids)
|
RagDocumentFragment.update_target_uploads(@ai_tool, attached_upload_ids)
|
||||||
log_ai_tool_update(@ai_tool, initial_attributes)
|
log_ai_tool_update(@ai_tool, initial_attributes)
|
||||||
|
@ -46,16 +45,15 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def destroy
|
def destroy
|
||||||
# Capture tool details for logging before destruction
|
tool_logger_details = {
|
||||||
tool_details = {
|
|
||||||
tool_id: @ai_tool.id,
|
tool_id: @ai_tool.id,
|
||||||
name: @ai_tool.name,
|
name: @ai_tool.name,
|
||||||
tool_name: @ai_tool.tool_name,
|
tool_name: @ai_tool.tool_name,
|
||||||
subject: @ai_tool.name # Use name as the subject for AiTools
|
subject: @ai_tool.name,
|
||||||
}
|
}
|
||||||
|
|
||||||
if @ai_tool.destroy
|
if @ai_tool.destroy
|
||||||
log_ai_tool_deletion(tool_details)
|
log_ai_tool_deletion(tool_logger_details)
|
||||||
head :no_content
|
head :no_content
|
||||||
else
|
else
|
||||||
render_json_error @ai_tool
|
render_json_error @ai_tool
|
||||||
|
@ -110,87 +108,59 @@ module DiscourseAi
|
||||||
)
|
)
|
||||||
.except(:rag_uploads)
|
.except(:rag_uploads)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def ai_tool_logger_fields
|
||||||
|
{
|
||||||
|
name: {
|
||||||
|
},
|
||||||
|
tool_name: {
|
||||||
|
},
|
||||||
|
description: {
|
||||||
|
},
|
||||||
|
summary: {
|
||||||
|
},
|
||||||
|
enabled: {
|
||||||
|
},
|
||||||
|
rag_chunk_tokens: {
|
||||||
|
},
|
||||||
|
rag_chunk_overlap_tokens: {
|
||||||
|
},
|
||||||
|
rag_llm_model_id: {
|
||||||
|
},
|
||||||
|
script: {
|
||||||
|
type: :large_text,
|
||||||
|
},
|
||||||
|
parameters: {
|
||||||
|
type: :large_text,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
def log_ai_tool_creation(ai_tool)
|
def log_ai_tool_creation(ai_tool)
|
||||||
# Create log details
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details = {
|
|
||||||
tool_id: ai_tool.id,
|
entity_details = { tool_id: ai_tool.id, subject: ai_tool.name }
|
||||||
name: ai_tool.name,
|
entity_details[:parameter_count] = ai_tool.parameters.size if ai_tool.parameters.present?
|
||||||
tool_name: ai_tool.tool_name,
|
|
||||||
description: ai_tool.description,
|
logger.log_creation("tool", ai_tool, ai_tool_logger_fields, entity_details)
|
||||||
subject: ai_tool.name # Use name as the subject for AiTools
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add parameter count if available
|
|
||||||
if ai_tool.parameters.present?
|
|
||||||
log_details[:parameter_count] = ai_tool.parameters.size
|
|
||||||
end
|
|
||||||
|
|
||||||
# For sensitive/large fields, don't include the full content
|
|
||||||
if ai_tool.script.present?
|
|
||||||
log_details[:script_size] = ai_tool.script.size
|
|
||||||
end
|
|
||||||
|
|
||||||
# Get subject for the log
|
|
||||||
subject = log_details[:subject]
|
|
||||||
|
|
||||||
# Add subject to the details if present
|
|
||||||
log_details[:subject] = subject if subject.present?
|
|
||||||
|
|
||||||
# Log the action
|
|
||||||
StaffActionLogger.new(current_user).log_custom("create_ai_tool", log_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_tool_update(ai_tool, initial_attributes)
|
def log_ai_tool_update(ai_tool, initial_attributes)
|
||||||
# Create log details
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
log_details = {
|
entity_details = { tool_id: ai_tool.id, subject: ai_tool.name }
|
||||||
tool_id: ai_tool.id,
|
|
||||||
name: ai_tool.name,
|
logger.log_update(
|
||||||
tool_name: ai_tool.tool_name,
|
"tool",
|
||||||
subject: ai_tool.name # Use name as the subject for AiTools
|
ai_tool,
|
||||||
}
|
initial_attributes,
|
||||||
|
ai_tool_logger_fields,
|
||||||
# Track changes in fields
|
entity_details,
|
||||||
changed_fields = []
|
)
|
||||||
|
|
||||||
# Check for changes in basic fields
|
|
||||||
%w[name tool_name description summary enabled].each do |field|
|
|
||||||
if initial_attributes[field] != ai_tool.attributes[field]
|
|
||||||
changed_fields << field
|
|
||||||
log_details["#{field}_changed"] = true
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Special handling for script (sensitive/large)
|
|
||||||
if initial_attributes['script'] != ai_tool.script
|
|
||||||
changed_fields << 'script'
|
|
||||||
log_details[:script_changed] = true
|
|
||||||
end
|
|
||||||
|
|
||||||
# Special handling for parameters (JSON)
|
|
||||||
if initial_attributes['parameters'].to_s != ai_tool.parameters.to_s
|
|
||||||
changed_fields << 'parameters'
|
|
||||||
log_details[:parameters_changed] = true
|
|
||||||
end
|
|
||||||
|
|
||||||
# Only log if there are actual changes
|
|
||||||
if changed_fields.any?
|
|
||||||
log_details[:changed_fields] = changed_fields
|
|
||||||
|
|
||||||
# Get subject for the log
|
|
||||||
subject = log_details[:subject]
|
|
||||||
|
|
||||||
# Log the action
|
|
||||||
StaffActionLogger.new(current_user).log_custom("update_ai_tool", log_details)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def log_ai_tool_deletion(tool_details)
|
def log_ai_tool_deletion(tool_details)
|
||||||
# Get subject for the log
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(current_user)
|
||||||
subject = tool_details[:subject]
|
logger.log_deletion("tool", tool_details)
|
||||||
|
|
||||||
# Log the action
|
|
||||||
StaffActionLogger.new(current_user).log_custom("delete_ai_tool", tool_details)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,434 +4,440 @@ RSpec.describe DiscourseAi::Utils::AiStaffActionLogger do
|
||||||
fab!(:admin)
|
fab!(:admin)
|
||||||
fab!(:llm_model)
|
fab!(:llm_model)
|
||||||
fab!(:ai_persona)
|
fab!(:ai_persona)
|
||||||
fab!(:group) { Fabricate(:group) }
|
fab!(:group)
|
||||||
|
|
||||||
subject { described_class.new(admin) }
|
subject { described_class.new(admin) }
|
||||||
|
|
||||||
describe "#log_creation" do
|
describe "#log_creation" do
|
||||||
it "logs creation of an entity with field configuration" do
|
it "logs creation of an entity with field configuration" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
# Create field configuration
|
# Create field configuration
|
||||||
field_config = {
|
field_config = { name: {}, provider: {}, url: {}, api_key: { type: :sensitive } }
|
||||||
name: {},
|
|
||||||
provider: {},
|
|
||||||
url: {},
|
|
||||||
api_key: { type: :sensitive }
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
model_name: llm_model.name,
|
model_name: llm_model.name,
|
||||||
display_name: llm_model.display_name
|
display_name: llm_model.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Setup model with sensitive data
|
# Setup model with sensitive data
|
||||||
llm_model.update!(api_key: "secret_key")
|
llm_model.update!(api_key: "secret_key")
|
||||||
|
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||||
"create_ai_llm_model",
|
|
||||||
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
|
"create_ai_llm_model",
|
||||||
hash_including(
|
hash_including(
|
||||||
"model_id" => llm_model.id,
|
"model_id" => llm_model.id,
|
||||||
"name" => llm_model.name,
|
"name" => llm_model.name,
|
||||||
"provider" => llm_model.provider,
|
"provider" => llm_model.provider,
|
||||||
"url" => llm_model.url,
|
"url" => llm_model.url,
|
||||||
"api_key" => "[FILTERED]"
|
"api_key" => "[FILTERED]",
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "handles large text fields with type declaration" do
|
it "handles large text fields with type declaration" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
# Create a persona with a large system prompt
|
# Create a persona with a large system prompt
|
||||||
large_prompt = "a" * 200
|
large_prompt = "a" * 200
|
||||||
ai_persona.update!(system_prompt: large_prompt)
|
ai_persona.update!(system_prompt: large_prompt)
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||||
persona_id: ai_persona.id,
|
|
||||||
persona_name: ai_persona.name
|
field_config = { name: {}, description: {}, system_prompt: { type: :large_text } }
|
||||||
}
|
|
||||||
|
subject.log_creation("persona", ai_persona, field_config, entity_details)
|
||||||
field_config = {
|
|
||||||
name: {},
|
# Verify with have_received
|
||||||
description: {},
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
system_prompt: { type: :large_text }
|
|
||||||
}
|
|
||||||
|
|
||||||
# The log_details should include the truncated system_prompt
|
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
|
||||||
"create_ai_persona",
|
"create_ai_persona",
|
||||||
hash_including(
|
hash_including(
|
||||||
"persona_id" => ai_persona.id,
|
"persona_id" => ai_persona.id,
|
||||||
"name" => ai_persona.name,
|
"name" => ai_persona.name,
|
||||||
"system_prompt" => an_instance_of(String)
|
"system_prompt" => an_instance_of(String),
|
||||||
)
|
),
|
||||||
) do |action, details|
|
) do |action, details|
|
||||||
# Check that system_prompt was truncated
|
# Check that system_prompt was truncated
|
||||||
expect(details["system_prompt"].length).to be < 200
|
expect(details["system_prompt"].length).to be < 200
|
||||||
end
|
end
|
||||||
|
|
||||||
subject.log_creation("persona", ai_persona, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "allows excluding fields from extraction" do
|
it "allows excluding fields from extraction" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
field_config = {
|
field_config = {
|
||||||
name: {},
|
name: {
|
||||||
display_name: {},
|
},
|
||||||
provider: { extract: false }, # Should be excluded
|
display_name: {
|
||||||
url: {}
|
},
|
||||||
|
provider: {
|
||||||
|
extract: false,
|
||||||
|
}, # Should be excluded
|
||||||
|
url: {
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
model_name: llm_model.name,
|
model_name: llm_model.name,
|
||||||
display_name: llm_model.display_name
|
display_name: llm_model.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
||||||
|
|
||||||
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
"create_ai_llm_model",
|
"create_ai_llm_model",
|
||||||
hash_including(
|
hash_including(
|
||||||
"model_id" => llm_model.id,
|
"model_id" => llm_model.id,
|
||||||
"name" => llm_model.name,
|
"name" => llm_model.name,
|
||||||
"display_name" => llm_model.display_name,
|
"display_name" => llm_model.display_name,
|
||||||
"url" => llm_model.url
|
"url" => llm_model.url,
|
||||||
)
|
),
|
||||||
) do |action, details|
|
) do |action, details|
|
||||||
# Provider should not be present
|
# Provider should not be present
|
||||||
expect(details).not_to have_key("provider")
|
expect(details).not_to have_key("provider")
|
||||||
end
|
end
|
||||||
|
|
||||||
subject.log_creation("llm_model", llm_model, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#log_update" do
|
describe "#log_update" do
|
||||||
it "handles empty arrays and complex JSON properly" do
|
it "handles empty arrays and complex JSON properly" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
# Setup initial attributes with empty JSON arrays
|
# Setup initial attributes with empty JSON arrays
|
||||||
initial_attributes = {
|
initial_attributes = { "name" => "Old Name", "allowed_group_ids" => [] }
|
||||||
"name" => "Old Name",
|
|
||||||
"allowed_group_ids" => []
|
|
||||||
}
|
|
||||||
|
|
||||||
# Update with complex JSON
|
# Update with complex JSON
|
||||||
ai_persona.update!(
|
ai_persona.update!(name: "New Name", allowed_group_ids: [group.id, 999])
|
||||||
name: "New Name",
|
|
||||||
allowed_group_ids: [group.id, 999]
|
field_config = { name: {}, json_fields: %w[allowed_group_ids] }
|
||||||
)
|
|
||||||
|
# Create entity details
|
||||||
field_config = {
|
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||||
name: {},
|
|
||||||
json_fields: %w[allowed_group_ids]
|
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||||
}
|
|
||||||
|
# Verify with have_received
|
||||||
# It should log that allowed_group_ids was updated without showing the exact diff
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
|
||||||
"update_ai_persona",
|
"update_ai_persona",
|
||||||
hash_including(
|
hash_including(
|
||||||
"persona_id" => ai_persona.id,
|
"persona_id" => ai_persona.id,
|
||||||
"persona_name" => ai_persona.name,
|
"persona_name" => ai_persona.name,
|
||||||
"name" => "Old Name → New Name",
|
"name" => "Old Name → New Name",
|
||||||
"allowed_group_ids" => "updated"
|
"allowed_group_ids" => "updated",
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create entity details
|
|
||||||
entity_details = {
|
|
||||||
persona_id: ai_persona.id,
|
|
||||||
persona_name: ai_persona.name
|
|
||||||
}
|
|
||||||
|
|
||||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "logs changes to attributes based on field configuration" do
|
it "logs changes to attributes based on field configuration" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
initial_attributes = {
|
initial_attributes = {
|
||||||
"name" => "Old Name",
|
"name" => "Old Name",
|
||||||
"display_name" => "Old Display Name",
|
"display_name" => "Old Display Name",
|
||||||
"provider" => "open_ai",
|
"provider" => "open_ai",
|
||||||
"api_key" => "old_secret"
|
"api_key" => "old_secret",
|
||||||
}
|
}
|
||||||
|
|
||||||
llm_model.update!(
|
llm_model.update!(
|
||||||
name: "New Name",
|
name: "New Name",
|
||||||
display_name: "New Display Name",
|
display_name: "New Display Name",
|
||||||
provider: "anthropic",
|
provider: "anthropic",
|
||||||
api_key: "new_secret"
|
api_key: "new_secret",
|
||||||
)
|
)
|
||||||
|
|
||||||
field_config = {
|
field_config = { name: {}, display_name: {}, provider: {}, api_key: { type: :sensitive } }
|
||||||
name: {},
|
|
||||||
display_name: {},
|
|
||||||
provider: {},
|
|
||||||
api_key: { type: :sensitive }
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
model_name: llm_model.name,
|
model_name: llm_model.name,
|
||||||
display_name: llm_model.display_name
|
display_name: llm_model.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
# It should include changes with appropriate handling for sensitive data
|
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
|
||||||
|
# Verify with have_received
|
||||||
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
"update_ai_llm_model",
|
"update_ai_llm_model",
|
||||||
hash_including(
|
hash_including(
|
||||||
"model_id" => llm_model.id,
|
"model_id" => llm_model.id,
|
||||||
"name" => "Old Name → New Name",
|
"name" => "Old Name → New Name",
|
||||||
"display_name" => "Old Display Name → New Display Name",
|
"display_name" => "Old Display Name → New Display Name",
|
||||||
"provider" => "open_ai → anthropic",
|
"provider" => "open_ai → anthropic",
|
||||||
"api_key" => "updated" # Not showing actual values
|
"api_key" => "updated", # Not showing actual values
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "doesn't log when there are no changes" do
|
it "doesn't log when there are no changes" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
initial_attributes = {
|
initial_attributes = {
|
||||||
"name" => llm_model.name,
|
"name" => llm_model.name,
|
||||||
"display_name" => llm_model.display_name,
|
"display_name" => llm_model.display_name,
|
||||||
"provider" => llm_model.provider
|
"provider" => llm_model.provider,
|
||||||
}
|
}
|
||||||
|
|
||||||
field_config = {
|
field_config = { name: {}, display_name: {}, provider: {} }
|
||||||
name: {},
|
|
||||||
display_name: {},
|
|
||||||
provider: {}
|
|
||||||
}
|
|
||||||
|
|
||||||
# It should not call log_custom since there are no changes
|
|
||||||
expect(staff_action_logger).not_to receive(:log_custom)
|
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
model_name: llm_model.name,
|
model_name: llm_model.name,
|
||||||
display_name: llm_model.display_name
|
display_name: llm_model.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||||
|
|
||||||
|
# Verify log_custom was not called
|
||||||
|
expect(staff_action_logger).not_to have_received(:log_custom)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "handles fields marked as not to be tracked" do
|
it "handles fields marked as not to be tracked" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
initial_attributes = {
|
initial_attributes = {
|
||||||
"name" => "Old Name",
|
"name" => "Old Name",
|
||||||
"display_name" => "Old Display Name",
|
"display_name" => "Old Display Name",
|
||||||
"provider" => "open_ai"
|
"provider" => "open_ai",
|
||||||
}
|
}
|
||||||
|
|
||||||
llm_model.update!(
|
llm_model.update!(name: "New Name", display_name: "New Display Name", provider: "anthropic")
|
||||||
name: "New Name",
|
|
||||||
display_name: "New Display Name",
|
|
||||||
provider: "anthropic"
|
|
||||||
)
|
|
||||||
|
|
||||||
field_config = {
|
field_config = {
|
||||||
name: {},
|
name: {
|
||||||
display_name: {},
|
},
|
||||||
provider: { track: false } # Should not be tracked even though it changed
|
display_name: {
|
||||||
|
},
|
||||||
|
provider: {
|
||||||
|
track: false,
|
||||||
|
}, # Should not be tracked even though it changed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Create entity details
|
||||||
|
entity_details = {
|
||||||
|
model_id: llm_model.id,
|
||||||
|
model_name: llm_model.name,
|
||||||
|
display_name: llm_model.display_name,
|
||||||
|
}
|
||||||
|
|
||||||
|
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
||||||
|
|
||||||
# Provider should not appear in the logged changes
|
# Provider should not appear in the logged changes
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
"update_ai_llm_model",
|
"update_ai_llm_model",
|
||||||
hash_including(
|
hash_including(
|
||||||
"model_id" => llm_model.id,
|
"model_id" => llm_model.id,
|
||||||
"name" => "Old Name → New Name",
|
"name" => "Old Name → New Name",
|
||||||
"display_name" => "Old Display Name → New Display Name"
|
"display_name" => "Old Display Name → New Display Name",
|
||||||
)
|
),
|
||||||
) do |action, details|
|
) do |action, details|
|
||||||
expect(details).not_to have_key("provider")
|
expect(details).not_to have_key("provider")
|
||||||
end
|
end
|
||||||
|
|
||||||
# Create entity details
|
|
||||||
entity_details = {
|
|
||||||
model_id: llm_model.id,
|
|
||||||
model_name: llm_model.name,
|
|
||||||
display_name: llm_model.display_name
|
|
||||||
}
|
|
||||||
|
|
||||||
subject.log_update("llm_model", llm_model, initial_attributes, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "handles json fields properly" do
|
it "handles json fields properly" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
# Setup initial attributes with JSON fields
|
# Setup initial attributes with JSON fields
|
||||||
initial_attributes = {
|
initial_attributes = {
|
||||||
"name" => "Old Name",
|
"name" => "Old Name",
|
||||||
"tools" => [["search", { "base_query" => "test" }, true]]
|
"tools" => [["search", { "base_query" => "test" }, true]],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Update with different JSON
|
# Update with different JSON
|
||||||
ai_persona.update!(
|
ai_persona.update!(
|
||||||
name: "New Name",
|
name: "New Name",
|
||||||
tools: [["search", { "base_query" => "updated" }, true], ["categories", {}, false]]
|
tools: [["search", { "base_query" => "updated" }, true], ["categories", {}, false]],
|
||||||
)
|
)
|
||||||
|
|
||||||
field_config = {
|
field_config = { name: {}, json_fields: %w[tools] }
|
||||||
name: {},
|
|
||||||
json_fields: %w[tools]
|
# Create entity details
|
||||||
}
|
entity_details = { persona_id: ai_persona.id, persona_name: ai_persona.name }
|
||||||
|
|
||||||
# It should log that tools was updated without showing the exact diff
|
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
|
||||||
|
# Verify with have_received
|
||||||
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
"update_ai_persona",
|
"update_ai_persona",
|
||||||
hash_including(
|
hash_including(
|
||||||
"persona_id" => ai_persona.id,
|
"persona_id" => ai_persona.id,
|
||||||
"persona_name" => ai_persona.name,
|
"persona_name" => ai_persona.name,
|
||||||
"name" => "Old Name → New Name",
|
"name" => "Old Name → New Name",
|
||||||
"tools" => "updated"
|
"tools" => "updated",
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create entity details
|
|
||||||
entity_details = {
|
|
||||||
persona_id: ai_persona.id,
|
|
||||||
persona_name: ai_persona.name
|
|
||||||
}
|
|
||||||
|
|
||||||
subject.log_update("persona", ai_persona, initial_attributes, field_config, entity_details)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#log_deletion" do
|
describe "#log_deletion" do
|
||||||
it "logs deletion with the correct entity type" do
|
it "logs deletion with the correct entity type" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
details = {
|
details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
display_name: llm_model.display_name,
|
display_name: llm_model.display_name,
|
||||||
name: llm_model.name
|
name: llm_model.name,
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
subject.log_deletion("llm_model", details)
|
||||||
|
|
||||||
|
# Verify with have_received
|
||||||
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
"delete_ai_llm_model",
|
"delete_ai_llm_model",
|
||||||
hash_including(
|
hash_including(
|
||||||
"model_id" => details[:model_id],
|
"model_id" => details[:model_id],
|
||||||
"display_name" => details[:display_name],
|
"display_name" => details[:display_name],
|
||||||
"name" => details[:name]
|
"name" => details[:name],
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
subject.log_deletion("llm_model", details)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#log_custom" do
|
describe "#log_custom" do
|
||||||
it "delegates to StaffActionLogger#log_custom" do
|
it "delegates to StaffActionLogger#log_custom" do
|
||||||
staff_action_logger = instance_double(StaffActionLogger)
|
staff_action_logger = instance_double(StaffActionLogger)
|
||||||
expect(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_action_logger)
|
||||||
|
allow(staff_action_logger).to receive(:log_custom)
|
||||||
|
|
||||||
details = { key: "value" }
|
details = { key: "value" }
|
||||||
|
|
||||||
expect(staff_action_logger).to receive(:log_custom).with(
|
|
||||||
"custom_action_type",
|
|
||||||
hash_including("key" => details[:key])
|
|
||||||
)
|
|
||||||
|
|
||||||
subject.log_custom("custom_action_type", details)
|
subject.log_custom("custom_action_type", details)
|
||||||
|
|
||||||
|
# Verify with have_received
|
||||||
|
expect(staff_action_logger).to have_received(:log_custom).with(
|
||||||
|
"custom_action_type",
|
||||||
|
hash_including("key" => details[:key]),
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "Special cases from controllers" do
|
describe "Special cases from controllers" do
|
||||||
context "with LlmModel quotas" do
|
context "with LlmModel quotas" do
|
||||||
before do
|
before do
|
||||||
# Create a quota for the model
|
# Create a quota for the model
|
||||||
@quota = Fabricate(:llm_quota, llm_model: llm_model, group: group, max_tokens: 1000)
|
@quota = Fabricate(:llm_quota, llm_model: llm_model, group: group, max_tokens: 1000)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "handles quota changes in log_llm_model_creation" do
|
it "handles quota changes in log_llm_model_creation" do
|
||||||
expect_any_instance_of(StaffActionLogger).to receive(:log_custom) do |_, action, details|
|
# Setup
|
||||||
expect(action).to eq("create_ai_llm_model")
|
staff_logger = instance_double(StaffActionLogger)
|
||||||
expect(details).to include("model_id", "model_name", "display_name")
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||||
expect(details["quotas"]).to include("Group #{group.id}")
|
allow(staff_logger).to receive(:log_custom)
|
||||||
expect(details["quotas"]).to include("1000 tokens")
|
|
||||||
end
|
|
||||||
|
|
||||||
# Call the method directly as it would be called from the controller
|
# Call the method directly as it would be called from the controller
|
||||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||||
field_config = { display_name: {}, name: {} }
|
field_config = { display_name: {}, name: {} }
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
model_name: llm_model.name,
|
model_name: llm_model.name,
|
||||||
display_name: llm_model.display_name
|
display_name: llm_model.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
log_details = entity_details.dup
|
log_details = entity_details.dup
|
||||||
log_details.merge!(logger.send(:extract_entity_attributes, llm_model, field_config))
|
log_details.merge!(logger.send(:extract_entity_attributes, llm_model, field_config))
|
||||||
|
|
||||||
# Add quota information as a special case
|
# Add quota information as a special case
|
||||||
log_details[:quotas] = llm_model.llm_quotas.map do |quota|
|
log_details[:quotas] = llm_model
|
||||||
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
.llm_quotas
|
||||||
end.join("; ")
|
.map do |quota|
|
||||||
|
"Group #{quota.group_id}: #{quota.max_tokens} tokens, #{quota.max_usages} usages, #{quota.duration_seconds}s"
|
||||||
|
end
|
||||||
|
.join("; ")
|
||||||
|
|
||||||
logger.log_custom("create_ai_llm_model", log_details)
|
logger.log_custom("create_ai_llm_model", log_details)
|
||||||
|
|
||||||
|
# Verify with have_received
|
||||||
|
expect(staff_logger).to have_received(:log_custom).with(
|
||||||
|
"create_ai_llm_model",
|
||||||
|
hash_including(
|
||||||
|
"model_id" => llm_model.id,
|
||||||
|
"model_name" => llm_model.name,
|
||||||
|
"display_name" => llm_model.display_name,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
expect(staff_logger).to have_received(:log_custom).with(
|
||||||
|
"create_ai_llm_model",
|
||||||
|
hash_including("quotas" => a_string_including("Group #{group.id}", "1000 tokens")),
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "handles quota changes in log_llm_model_update" do
|
it "handles quota changes in log_llm_model_update" do
|
||||||
initial_quotas = llm_model.llm_quotas.map(&:attributes)
|
initial_quotas = llm_model.llm_quotas.map(&:attributes)
|
||||||
|
|
||||||
# Update the quota
|
# Update the quota
|
||||||
@quota.update!(max_tokens: 2000)
|
@quota.update!(max_tokens: 2000)
|
||||||
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
current_quotas = llm_model.llm_quotas.reload.map(&:attributes)
|
||||||
|
|
||||||
expect_any_instance_of(StaffActionLogger).to receive(:log_custom) do |_, action, details|
|
# Setup
|
||||||
expect(action).to eq("update_ai_llm_model")
|
staff_logger = instance_double(StaffActionLogger)
|
||||||
expect(details).to include("model_id", "quotas")
|
allow(StaffActionLogger).to receive(:new).with(admin).and_return(staff_logger)
|
||||||
expect(details["quotas"]).to include("1000 tokens")
|
allow(staff_logger).to receive(:log_custom)
|
||||||
expect(details["quotas"]).to include("2000 tokens")
|
|
||||||
end
|
|
||||||
|
|
||||||
# Simulate the special quota handling in the controller
|
# Simulate the special quota handling in the controller
|
||||||
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
logger = DiscourseAi::Utils::AiStaffActionLogger.new(admin)
|
||||||
changes = {}
|
changes = {}
|
||||||
|
|
||||||
# Track quota changes separately as they're a special case
|
# Track quota changes separately as they're a special case
|
||||||
if initial_quotas != current_quotas
|
if initial_quotas != current_quotas
|
||||||
initial_quota_summary = initial_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
initial_quota_summary =
|
||||||
current_quota_summary = current_quotas.map { |q| "Group #{q['group_id']}: #{q['max_tokens']} tokens" }.join("; ")
|
initial_quotas
|
||||||
|
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||||
|
.join("; ")
|
||||||
|
current_quota_summary =
|
||||||
|
current_quotas
|
||||||
|
.map { |q| "Group #{q["group_id"]}: #{q["max_tokens"]} tokens" }
|
||||||
|
.join("; ")
|
||||||
changes[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
changes[:quotas] = "#{initial_quota_summary} → #{current_quota_summary}"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Create entity details
|
# Create entity details
|
||||||
entity_details = {
|
entity_details = {
|
||||||
model_id: llm_model.id,
|
model_id: llm_model.id,
|
||||||
model_name: llm_model.name,
|
model_name: llm_model.name,
|
||||||
display_name: llm_model.display_name
|
display_name: llm_model.display_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
log_details = entity_details.dup.merge(changes)
|
log_details = entity_details.dup.merge(changes)
|
||||||
logger.log_custom("update_ai_llm_model", log_details)
|
logger.log_custom("update_ai_llm_model", log_details)
|
||||||
|
|
||||||
|
# Verify with have_received
|
||||||
|
expect(staff_logger).to have_received(:log_custom).with(
|
||||||
|
"update_ai_llm_model",
|
||||||
|
hash_including(
|
||||||
|
"model_id" => llm_model.id,
|
||||||
|
"quotas" => a_string_including("1000 tokens", "2000 tokens"),
|
||||||
|
),
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -125,69 +125,82 @@ RSpec.describe DiscourseAi::Admin::AiSpamController do
|
||||||
params: {
|
params: {
|
||||||
is_enabled: true,
|
is_enabled: true,
|
||||||
llm_model_id: llm_model.id,
|
llm_model_id: llm_model.id,
|
||||||
custom_instructions: "updated instructions"
|
custom_instructions: "updated instructions",
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
# Verify the log was created with the right subject
|
# Verify the log was created with the right subject
|
||||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").last
|
history =
|
||||||
|
UserHistory.where(
|
||||||
|
action: UserHistory.actions[:custom_staff],
|
||||||
|
custom_type: "update_ai_spam_settings",
|
||||||
|
).last
|
||||||
expect(history).to be_present
|
expect(history).to be_present
|
||||||
expect(history.subject).to eq("AI Spam Detection")
|
|
||||||
expect(history.details).to include("custom_instructions_changed")
|
expect(history.details).to include("custom_instructions_changed")
|
||||||
end
|
end
|
||||||
|
|
||||||
it "logs staff action when llm_model_id changes" do
|
it "logs staff action when llm_model_id changes" do
|
||||||
# Create another model to change to
|
# Create another model to change to
|
||||||
new_llm_model = Fabricate(:llm_model, name: "New Test Model", display_name: "New Test Model")
|
new_llm_model =
|
||||||
|
Fabricate(:llm_model, name: "New Test Model", display_name: "New Test Model")
|
||||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
|
||||||
params: {
|
put "/admin/plugins/discourse-ai/ai-spam.json", params: { llm_model_id: new_llm_model.id }
|
||||||
llm_model_id: new_llm_model.id
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
# Verify the log was created with the right subject
|
# Verify the log was created with the right subject
|
||||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").last
|
history =
|
||||||
|
UserHistory.where(
|
||||||
|
action: UserHistory.actions[:custom_staff],
|
||||||
|
custom_type: "update_ai_spam_settings",
|
||||||
|
).last
|
||||||
expect(history).to be_present
|
expect(history).to be_present
|
||||||
expect(history.subject).to eq("AI Spam Detection")
|
|
||||||
expect(history.details).to include("llm_model_id")
|
expect(history.details).to include("llm_model_id")
|
||||||
end
|
end
|
||||||
|
|
||||||
it "does not log staff action when only is_enabled changes" do
|
it "does not log staff action when only is_enabled changes" do
|
||||||
# Check initial count of logs
|
# Check initial count of logs
|
||||||
initial_count = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").count
|
initial_count =
|
||||||
|
UserHistory.where(
|
||||||
|
action: UserHistory.actions[:custom_staff],
|
||||||
|
custom_type: "update_ai_spam_settings",
|
||||||
|
).count
|
||||||
|
|
||||||
# Update only the is_enabled setting
|
# Update only the is_enabled setting
|
||||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
put "/admin/plugins/discourse-ai/ai-spam.json", params: { is_enabled: false }
|
||||||
params: {
|
|
||||||
is_enabled: false
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
# Verify no new log was created
|
# Verify no new log was created
|
||||||
current_count = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").count
|
current_count =
|
||||||
|
UserHistory.where(
|
||||||
|
action: UserHistory.actions[:custom_staff],
|
||||||
|
custom_type: "update_ai_spam_settings",
|
||||||
|
).count
|
||||||
expect(current_count).to eq(initial_count)
|
expect(current_count).to eq(initial_count)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "logs both custom_instructions and llm_model_id changes in one entry" do
|
it "logs both custom_instructions and llm_model_id changes in one entry" do
|
||||||
# Create another model to change to
|
# Create another model to change to
|
||||||
new_llm_model = Fabricate(:llm_model, name: "Another Test Model", display_name: "Another Test Model")
|
new_llm_model =
|
||||||
|
Fabricate(:llm_model, name: "Another Test Model", display_name: "Another Test Model")
|
||||||
|
|
||||||
put "/admin/plugins/discourse-ai/ai-spam.json",
|
put "/admin/plugins/discourse-ai/ai-spam.json",
|
||||||
params: {
|
params: {
|
||||||
llm_model_id: new_llm_model.id,
|
llm_model_id: new_llm_model.id,
|
||||||
custom_instructions: "new instructions for both changes"
|
custom_instructions: "new instructions for both changes",
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
# Verify the log was created with all changes
|
# Verify the log was created with all changes
|
||||||
history = UserHistory.where(action: UserHistory.actions[:custom_staff], custom_type: "update_ai_spam_settings").last
|
history =
|
||||||
|
UserHistory.where(
|
||||||
|
action: UserHistory.actions[:custom_staff],
|
||||||
|
custom_type: "update_ai_spam_settings",
|
||||||
|
).last
|
||||||
expect(history).to be_present
|
expect(history).to be_present
|
||||||
expect(history.subject).to eq("AI Spam Detection")
|
|
||||||
expect(history.details).to include("llm_model_id")
|
expect(history.details).to include("llm_model_id")
|
||||||
expect(history.details).to include("custom_instructions_changed")
|
expect(history.details).to include("custom_instructions_changed")
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue