diff --git a/app/models/account.rb b/app/models/account.rb index 419ccc471..a35affb3e 100644 --- a/app/models/account.rb +++ b/app/models/account.rb @@ -2,18 +2,20 @@ # # Table name: accounts # -# id :integer not null, primary key -# auto_resolve_duration :integer -# custom_attributes :jsonb -# domain :string(100) -# feature_flags :bigint default(0), not null -# limits :jsonb -# locale :integer default("en") -# name :string not null -# status :integer default("active") -# support_email :string(100) -# created_at :datetime not null -# updated_at :datetime not null +# id :integer not null, primary key +# auto_resolve_duration :integer +# contactable_contacts_count :integer default(0) +# custom_attributes :jsonb +# domain :string(100) +# feature_flags :bigint default(0), not null +# internal_attributes :jsonb not null +# limits :jsonb +# locale :integer default("en") +# name :string not null +# status :integer default("active") +# support_email :string(100) +# created_at :datetime not null +# updated_at :datetime not null # # Indexes # diff --git a/config/installation_config.yml b/config/installation_config.yml index 57a5a5faf..70c07ce60 100644 --- a/config/installation_config.yml +++ b/config/installation_config.yml @@ -212,6 +212,10 @@ - name: CHATWOOT_SUPPORT_IDENTIFIER_HASH value: description: 'The Chatwoot identifier hash, to validate the contact in the live chat window.' +- name: ACCOUNT_SECURITY_NOTIFICATION_WEBHOOK_URL + display_title: Webhook URL to post security analysis + value: + description: Used to notify Chatwoot about account abuses, potential threads (Should be a Discord Webhook URL) # ------- End of Chatwoot Internal Config for Self Hosted ----# ## ------ Configs added for enterprise clients ------ ## diff --git a/db/migrate/20250228185548_add_internal_attributes_to_accounts.rb b/db/migrate/20250228185548_add_internal_attributes_to_accounts.rb new file mode 100644 index 000000000..6912c0825 --- /dev/null +++ b/db/migrate/20250228185548_add_internal_attributes_to_accounts.rb @@ -0,0 +1,5 @@ +class AddInternalAttributesToAccounts < ActiveRecord::Migration[7.0] + def change + add_column :accounts, :internal_attributes, :jsonb, null: false, default: {} + end +end diff --git a/db/schema.rb b/db/schema.rb index 6bea44c8b..a68593c68 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema[7.0].define(version: 2025_02_07_040150) do +ActiveRecord::Schema[7.0].define(version: 2025_02_28_185548) do # These extensions should be enabled to support this database enable_extension "pg_stat_statements" enable_extension "pg_trgm" @@ -57,6 +57,7 @@ ActiveRecord::Schema[7.0].define(version: 2025_02_07_040150) do t.jsonb "limits", default: {} t.jsonb "custom_attributes", default: {} t.integer "status", default: 0 + t.jsonb "internal_attributes", default: {}, null: false t.index ["status"], name: "index_accounts_on_status" end diff --git a/enterprise/app/controllers/enterprise/super_admin/app_configs_controller.rb b/enterprise/app/controllers/enterprise/super_admin/app_configs_controller.rb index 21617aa83..1cbf2c6ca 100644 --- a/enterprise/app/controllers/enterprise/super_admin/app_configs_controller.rb +++ b/enterprise/app/controllers/enterprise/super_admin/app_configs_controller.rb @@ -33,6 +33,6 @@ module Enterprise::SuperAdmin::AppConfigsController def internal_config_options %w[CHATWOOT_INBOX_TOKEN CHATWOOT_INBOX_HMAC_KEY ANALYTICS_TOKEN CLEARBIT_API_KEY DASHBOARD_SCRIPTS BLOCKED_EMAIL_DOMAINS - CAPTAIN_CLOUD_PLAN_LIMITS] + CAPTAIN_CLOUD_PLAN_LIMITS ACCOUNT_SECURITY_NOTIFICATION_WEBHOOK_URL] end end diff --git a/enterprise/app/jobs/internal/account_analysis_job.rb b/enterprise/app/jobs/internal/account_analysis_job.rb new file mode 100644 index 000000000..49ddaa2b5 --- /dev/null +++ b/enterprise/app/jobs/internal/account_analysis_job.rb @@ -0,0 +1,9 @@ +class Internal::AccountAnalysisJob < ApplicationJob + queue_as :low + + def perform(account) + return if GlobalConfig.get_value('DEPLOYMENT_ENV') != 'cloud' + + Internal::AccountAnalysis::ThreatAnalyserService.new(account).perform + end +end diff --git a/enterprise/app/services/captain/copilot/chat_service.rb b/enterprise/app/services/captain/copilot/chat_service.rb index 019136540..3d53d7b5c 100644 --- a/enterprise/app/services/captain/copilot/chat_service.rb +++ b/enterprise/app/services/captain/copilot/chat_service.rb @@ -1,6 +1,6 @@ require 'openai' -class Captain::Copilot::ChatService < Captain::Llm::BaseOpenAiService +class Captain::Copilot::ChatService < Llm::BaseOpenAiService include Captain::ChatHelper def initialize(assistant, config) diff --git a/enterprise/app/services/captain/llm/assistant_chat_service.rb b/enterprise/app/services/captain/llm/assistant_chat_service.rb index ef5ae2c0f..3d2b0da44 100644 --- a/enterprise/app/services/captain/llm/assistant_chat_service.rb +++ b/enterprise/app/services/captain/llm/assistant_chat_service.rb @@ -1,6 +1,6 @@ require 'openai' -class Captain::Llm::AssistantChatService < Captain::Llm::BaseOpenAiService +class Captain::Llm::AssistantChatService < Llm::BaseOpenAiService include Captain::ChatHelper def initialize(assistant: nil) diff --git a/enterprise/app/services/captain/llm/contact_attributes_service.rb b/enterprise/app/services/captain/llm/contact_attributes_service.rb index e942a2a56..e7976bf27 100644 --- a/enterprise/app/services/captain/llm/contact_attributes_service.rb +++ b/enterprise/app/services/captain/llm/contact_attributes_service.rb @@ -1,4 +1,4 @@ -class Captain::Llm::ContactAttributesService < Captain::Llm::BaseOpenAiService +class Captain::Llm::ContactAttributesService < Llm::BaseOpenAiService def initialize(assistant, conversation) super() @assistant = assistant diff --git a/enterprise/app/services/captain/llm/contact_notes_service.rb b/enterprise/app/services/captain/llm/contact_notes_service.rb index 245e0d96a..75aee412f 100644 --- a/enterprise/app/services/captain/llm/contact_notes_service.rb +++ b/enterprise/app/services/captain/llm/contact_notes_service.rb @@ -1,4 +1,4 @@ -class Captain::Llm::ContactNotesService < Captain::Llm::BaseOpenAiService +class Captain::Llm::ContactNotesService < Llm::BaseOpenAiService def initialize(assistant, conversation) super() @assistant = assistant diff --git a/enterprise/app/services/captain/llm/conversation_faq_service.rb b/enterprise/app/services/captain/llm/conversation_faq_service.rb index ea33493f2..682818f73 100644 --- a/enterprise/app/services/captain/llm/conversation_faq_service.rb +++ b/enterprise/app/services/captain/llm/conversation_faq_service.rb @@ -1,4 +1,4 @@ -class Captain::Llm::ConversationFaqService < Captain::Llm::BaseOpenAiService +class Captain::Llm::ConversationFaqService < Llm::BaseOpenAiService DISTANCE_THRESHOLD = 0.3 def initialize(assistant, conversation) diff --git a/enterprise/app/services/captain/llm/embedding_service.rb b/enterprise/app/services/captain/llm/embedding_service.rb index ca2ebdc1b..74f3079bf 100644 --- a/enterprise/app/services/captain/llm/embedding_service.rb +++ b/enterprise/app/services/captain/llm/embedding_service.rb @@ -1,6 +1,6 @@ require 'openai' -class Captain::Llm::EmbeddingService < Captain::Llm::BaseOpenAiService +class Captain::Llm::EmbeddingService < Llm::BaseOpenAiService class EmbeddingsError < StandardError; end DEFAULT_MODEL = 'text-embedding-3-small'.freeze diff --git a/enterprise/app/services/captain/llm/faq_generator_service.rb b/enterprise/app/services/captain/llm/faq_generator_service.rb index 6462f5c47..c26ffa2a3 100644 --- a/enterprise/app/services/captain/llm/faq_generator_service.rb +++ b/enterprise/app/services/captain/llm/faq_generator_service.rb @@ -1,4 +1,4 @@ -class Captain::Llm::FaqGeneratorService < Captain::Llm::BaseOpenAiService +class Captain::Llm::FaqGeneratorService < Llm::BaseOpenAiService def initialize(content) super() @content = content diff --git a/enterprise/app/services/internal/account_analysis/account_updater_service.rb b/enterprise/app/services/internal/account_analysis/account_updater_service.rb new file mode 100644 index 000000000..feb54881b --- /dev/null +++ b/enterprise/app/services/internal/account_analysis/account_updater_service.rb @@ -0,0 +1,54 @@ +class Internal::AccountAnalysis::AccountUpdaterService + def initialize(account) + @account = account + end + + def update_with_analysis(analysis, error_message = nil) + if error_message + save_error(error_message) + notify_on_discord + return + end + + save_analysis_results(analysis) + flag_account_if_needed(analysis) + end + + private + + def save_error(error_message) + @account.internal_attributes['security_flagged'] = true + @account.internal_attributes['security_flag_reason'] = "Error: #{error_message}" + @account.save + end + + def save_analysis_results(analysis) + @account.internal_attributes['last_threat_scan_at'] = Time.current + @account.internal_attributes['last_threat_scan_level'] = analysis['threat_level'] + @account.internal_attributes['last_threat_scan_summary'] = analysis['threat_summary'] + @account.internal_attributes['last_threat_scan_recommendation'] = analysis['recommendation'] + @account.save! + end + + def flag_account_if_needed(analysis) + return if analysis['threat_level'] == 'none' + + if %w[high medium].include?(analysis['threat_level']) || + analysis['illegal_activities_detected'] == true || + analysis['recommendation'] == 'block' + + @account.internal_attributes['security_flagged'] = true + @account.internal_attributes['security_flag_reason'] = "Threat detected: #{analysis['threat_summary']}" + @account.save! + + Rails.logger.info("Flagging account #{@account.id} due to threat level: #{analysis['threat_level']}") + end + + notify_on_discord + end + + def notify_on_discord + Rails.logger.info("Account #{@account.id} has been flagged for security review") + Internal::AccountAnalysis::DiscordNotifierService.new.notify_flagged_account(@account) + end +end diff --git a/enterprise/app/services/internal/account_analysis/content_evaluator_service.rb b/enterprise/app/services/internal/account_analysis/content_evaluator_service.rb new file mode 100644 index 000000000..e88c46091 --- /dev/null +++ b/enterprise/app/services/internal/account_analysis/content_evaluator_service.rb @@ -0,0 +1,73 @@ +class Internal::AccountAnalysis::ContentEvaluatorService < Llm::BaseOpenAiService + def initialize + super() + + @model = 'gpt-4o-mini'.freeze + end + + def evaluate(content) + return default_evaluation if content.blank? + + begin + response = send_to_llm(content) + evaluation = handle_response(response) + log_evaluation_results(evaluation) + evaluation + rescue StandardError => e + handle_evaluation_error(e) + end + end + + private + + def send_to_llm(content) + Rails.logger.info('Sending content to LLM for security evaluation') + @client.chat( + parameters: { + model: @model, + messages: llm_messages(content), + response_format: { type: 'json_object' } + } + ) + end + + def handle_response(response) + return default_evaluation if response.nil? + + parsed = JSON.parse(response.dig('choices', 0, 'message', 'content').strip) + + { + 'threat_level' => parsed['threat_level'] || 'unknown', + 'threat_summary' => parsed['threat_summary'] || 'No threat summary provided', + 'detected_threats' => parsed['detected_threats'] || [], + 'illegal_activities_detected' => parsed['illegal_activities_detected'] || false, + 'recommendation' => parsed['recommendation'] || 'review' + } + end + + def default_evaluation(error_type = nil) + { + 'threat_level' => 'unknown', + 'threat_summary' => 'Failed to complete content evaluation', + 'detected_threats' => error_type ? [error_type] : [], + 'illegal_activities_detected' => false, + 'recommendation' => 'review' + } + end + + def log_evaluation_results(evaluation) + Rails.logger.info("LLM evaluation - Level: #{evaluation['threat_level']}, Illegal activities: #{evaluation['illegal_activities_detected']}") + end + + def handle_evaluation_error(error) + Rails.logger.error("Error evaluating content: #{error.message}") + default_evaluation('evaluation_failure') + end + + def llm_messages(content) + [ + { role: 'system', content: 'You are a security analysis system that evaluates content for potential threats and scams.' }, + { role: 'user', content: Internal::AccountAnalysis::PromptsService.threat_analyser(content.to_s[0...10_000]) } + ] + end +end diff --git a/enterprise/app/services/internal/account_analysis/discord_notifier_service.rb b/enterprise/app/services/internal/account_analysis/discord_notifier_service.rb new file mode 100644 index 000000000..01ad09b36 --- /dev/null +++ b/enterprise/app/services/internal/account_analysis/discord_notifier_service.rb @@ -0,0 +1,47 @@ +class Internal::AccountAnalysis::DiscordNotifierService + def notify_flagged_account(account) + if webhook_url.blank? + Rails.logger.error('Cannot send Discord notification: No webhook URL configured') + return + end + + HTTParty.post( + webhook_url, + body: build_message(account).to_json, + headers: { 'Content-Type' => 'application/json' } + ) + + Rails.logger.info("Discord notification sent for flagged account #{account.id}") + rescue StandardError => e + Rails.logger.error("Error sending Discord notification: #{e.message}") + end + + private + + def build_message(account) + analysis = account.internal_attributes + user = account.users.order(id: :asc).first + + content = <<~MESSAGE + --- + An account has been flagged in our security system with the following details: + + 🆔 **Account Details:** + Account ID: #{account.id} + User Email: #{user&.email || 'N/A'} + Threat Level: #{analysis['last_threat_scan_level']} + + 🔎 **System Recommendation:** #{analysis['last_threat_scan_recommendation']} + #{analysis['illegal_activities_detected'] ? '⚠️ Potential illegal activities detected' : 'No illegal activities detected'} + + 📝 **Findings:** + #{analysis['last_threat_scan_summary']} + MESSAGE + + { content: content } + end + + def webhook_url + @webhook_url ||= InstallationConfig.find_by(name: 'ACCOUNT_SECURITY_NOTIFICATION_WEBHOOK_URL')&.value + end +end diff --git a/enterprise/app/services/internal/account_analysis/prompts_service.rb b/enterprise/app/services/internal/account_analysis/prompts_service.rb new file mode 100644 index 000000000..3bd49f334 --- /dev/null +++ b/enterprise/app/services/internal/account_analysis/prompts_service.rb @@ -0,0 +1,31 @@ +class Internal::AccountAnalysis::PromptsService + class << self + def threat_analyser(content) + <<~PROMPT + Analyze the following website content for potential security threats, scams, or illegal activities. + + Focus on identifying: + 1. Phishing attempts + 2. Fraudulent business practices + 3. Malware distribution + 4. Illegal product/service offerings + 5. Money laundering indicators + 6. Identity theft schemes + + Always classify websites under construction or without content to be a medium. + + Website content: + #{content} + + Provide your analysis in the following JSON format: + { + "threat_level": "none|low|medium|high", + "threat_summary": "Brief summary of findings", + "detected_threats": ["threat1", "threat2"], + "illegal_activities_detected": true|false, + "recommendation": "approve|review|block" + } + PROMPT + end + end +end diff --git a/enterprise/app/services/internal/account_analysis/threat_analyser_service.rb b/enterprise/app/services/internal/account_analysis/threat_analyser_service.rb new file mode 100644 index 000000000..2ec4845c4 --- /dev/null +++ b/enterprise/app/services/internal/account_analysis/threat_analyser_service.rb @@ -0,0 +1,43 @@ +class Internal::AccountAnalysis::ThreatAnalyserService + def initialize(account) + @account = account + @user = account.users.order(id: :asc).first + @domain = extract_domain_from_email(@user&.email) + end + + def perform + if @domain.blank? + Rails.logger.info("Skipping threat analysis for account #{@account.id}: No domain found") + return + end + + website_content = Internal::AccountAnalysis::WebsiteScraperService.new(@domain).perform + if website_content.blank? + Rails.logger.info("Skipping threat analysis for account #{@account.id}: No website content found") + Internal::AccountAnalysis::AccountUpdaterService.new(@account).update_with_analysis(nil, 'Scraping error: No content found') + return + end + + content = <<~MESSAGE + Domain: #{@domain} + Content: #{website_content} + MESSAGE + threat_analysis = Internal::AccountAnalysis::ContentEvaluatorService.new.evaluate(content) + Rails.logger.info("Completed threat analysis: level=#{threat_analysis['threat_level']} for account-id: #{@account.id}") + + Internal::AccountAnalysis::AccountUpdaterService.new(@account).update_with_analysis(threat_analysis) + + threat_analysis + end + + private + + def extract_domain_from_email(email) + return nil if email.blank? + + email.split('@').last + rescue StandardError => e + Rails.logger.error("Error extracting domain from email #{email}: #{e.message}") + nil + end +end diff --git a/enterprise/app/services/internal/account_analysis/website_scraper_service.rb b/enterprise/app/services/internal/account_analysis/website_scraper_service.rb new file mode 100644 index 000000000..d03ef8049 --- /dev/null +++ b/enterprise/app/services/internal/account_analysis/website_scraper_service.rb @@ -0,0 +1,32 @@ +class Internal::AccountAnalysis::WebsiteScraperService + def initialize(domain) + @domain = domain + end + + def perform + return nil if @domain.blank? + + Rails.logger.info("Scraping website: #{external_link}") + + begin + response = HTTParty.get(external_link, follow_redirects: true) + response.to_s + rescue StandardError => e + Rails.logger.error("Error scraping website for domain #{@domain}: #{e.message}") + nil + end + end + + private + + def external_link + sanitize_url(@domain) + end + + def sanitize_url(domain) + url = domain + url = "https://#{domain}" unless domain.start_with?('http://', 'https://') + Rails.logger.info("Sanitized URL: #{url}") + url + end +end diff --git a/enterprise/app/services/captain/llm/base_open_ai_service.rb b/enterprise/app/services/llm/base_open_ai_service.rb similarity index 93% rename from enterprise/app/services/captain/llm/base_open_ai_service.rb rename to enterprise/app/services/llm/base_open_ai_service.rb index 7b542c0f2..1f229f182 100644 --- a/enterprise/app/services/captain/llm/base_open_ai_service.rb +++ b/enterprise/app/services/llm/base_open_ai_service.rb @@ -1,4 +1,4 @@ -class Captain::Llm::BaseOpenAiService +class Llm::BaseOpenAiService DEFAULT_MODEL = 'gpt-4o-mini'.freeze def initialize diff --git a/spec/enterprise/services/internal/account_analysis/account_updater_service_spec.rb b/spec/enterprise/services/internal/account_analysis/account_updater_service_spec.rb new file mode 100644 index 000000000..37da7d070 --- /dev/null +++ b/spec/enterprise/services/internal/account_analysis/account_updater_service_spec.rb @@ -0,0 +1,130 @@ +require 'rails_helper' + +RSpec.describe Internal::AccountAnalysis::AccountUpdaterService do + let(:account) { create(:account) } + let(:service) { described_class.new(account) } + let(:discord_notifier) { instance_double(Internal::AccountAnalysis::DiscordNotifierService, notify_flagged_account: true) } + + before do + allow(Internal::AccountAnalysis::DiscordNotifierService).to receive(:new).and_return(discord_notifier) + allow(Rails.logger).to receive(:info) + end + + describe '#update_with_analysis' do + context 'when error_message is provided' do + it 'saves the error and notifies Discord' do + service.update_with_analysis({}, 'Analysis failed') + + expect(account.internal_attributes['security_flagged']).to be true + expect(account.internal_attributes['security_flag_reason']).to eq('Error: Analysis failed') + expect(discord_notifier).to have_received(:notify_flagged_account).with(account) + end + end + + context 'when analysis is successful' do + let(:analysis) do + { + 'threat_level' => 'none', + 'threat_summary' => 'No threats detected', + 'recommendation' => 'allow' + } + end + + it 'saves the analysis results' do + allow(Time).to receive(:current).and_return('2023-01-01 12:00:00') + + service.update_with_analysis(analysis) + + expect(account.internal_attributes['last_threat_scan_at']).to eq('2023-01-01 12:00:00') + expect(account.internal_attributes['last_threat_scan_level']).to eq('none') + expect(account.internal_attributes['last_threat_scan_summary']).to eq('No threats detected') + expect(account.internal_attributes['last_threat_scan_recommendation']).to eq('allow') + end + + it 'does not flag the account when threat level is none' do + service.update_with_analysis(analysis) + + expect(account.internal_attributes).not_to include('security_flagged') + expect(discord_notifier).not_to have_received(:notify_flagged_account) + end + end + + context 'when analysis detects high threat level' do + let(:analysis) do + { + 'threat_level' => 'high', + 'threat_summary' => 'Suspicious activity detected', + 'recommendation' => 'review', + 'illegal_activities_detected' => false + } + end + + it 'flags the account and notifies Discord' do + service.update_with_analysis(analysis) + + expect(account.internal_attributes['security_flagged']).to be true + expect(account.internal_attributes['security_flag_reason']).to eq('Threat detected: Suspicious activity detected') + expect(discord_notifier).to have_received(:notify_flagged_account).with(account) + expect(Rails.logger).to have_received(:info).with("Flagging account #{account.id} due to threat level: high") + expect(Rails.logger).to have_received(:info).with("Account #{account.id} has been flagged for security review") + end + end + + context 'when analysis detects medium threat level' do + let(:analysis) do + { + 'threat_level' => 'medium', + 'threat_summary' => 'Potential issues found', + 'recommendation' => 'review', + 'illegal_activities_detected' => false + } + end + + it 'flags the account and notifies Discord' do + service.update_with_analysis(analysis) + + expect(account.internal_attributes['security_flagged']).to be true + expect(account.internal_attributes['security_flag_reason']).to eq('Threat detected: Potential issues found') + expect(discord_notifier).to have_received(:notify_flagged_account).with(account) + end + end + + context 'when analysis detects illegal activities' do + let(:analysis) do + { + 'threat_level' => 'low', + 'threat_summary' => 'Minor issues found', + 'recommendation' => 'review', + 'illegal_activities_detected' => true + } + end + + it 'flags the account and notifies Discord' do + service.update_with_analysis(analysis) + + expect(account.internal_attributes['security_flagged']).to be true + expect(account.internal_attributes['security_flag_reason']).to eq('Threat detected: Minor issues found') + expect(discord_notifier).to have_received(:notify_flagged_account).with(account) + end + end + + context 'when analysis recommends blocking' do + let(:analysis) do + { + 'threat_level' => 'low', + 'threat_summary' => 'Minor issues found', + 'recommendation' => 'block', + 'illegal_activities_detected' => false + } + end + + it 'flags the account and notifies Discord' do + service.update_with_analysis(analysis) + + expect(account.internal_attributes['security_flagged']).to be true + expect(account.internal_attributes['security_flag_reason']).to eq('Threat detected: Minor issues found') + expect(discord_notifier).to have_received(:notify_flagged_account).with(account) + end + end + end +end diff --git a/spec/enterprise/services/internal/account_analysis/content_evaluator_service_spec.rb b/spec/enterprise/services/internal/account_analysis/content_evaluator_service_spec.rb new file mode 100644 index 000000000..d7bf26c35 --- /dev/null +++ b/spec/enterprise/services/internal/account_analysis/content_evaluator_service_spec.rb @@ -0,0 +1,111 @@ +require 'rails_helper' + +RSpec.describe Internal::AccountAnalysis::ContentEvaluatorService do + let(:service) { described_class.new } + let(:content) { 'This is some test content' } + + before do + create(:installation_config, name: 'CAPTAIN_OPEN_AI_API_KEY', value: 'test-key') + end + + describe '#evaluate' do + context 'when content is present' do + let(:llm_response) do + { + 'choices' => [ + { + 'message' => { + 'content' => { + 'threat_level' => 'low', + 'threat_summary' => 'No significant threats detected', + 'detected_threats' => ['minor_concern'], + 'illegal_activities_detected' => false, + 'recommendation' => 'approve' + }.to_json + } + } + ] + } + end + + before do + allow(service).to receive(:send_to_llm).and_return(llm_response) + allow(Rails.logger).to receive(:info) + end + + it 'returns the evaluation results' do + result = service.evaluate(content) + + expect(result).to include( + 'threat_level' => 'low', + 'threat_summary' => 'No significant threats detected', + 'detected_threats' => ['minor_concern'], + 'illegal_activities_detected' => false, + 'recommendation' => 'approve' + ) + end + + it 'logs the evaluation results' do + service.evaluate(content) + + expect(Rails.logger).to have_received(:info).with('LLM evaluation - Level: low, Illegal activities: false') + end + end + + context 'when content is blank' do + let(:blank_content) { '' } + + it 'returns the default evaluation without calling the LLM' do + expect(service).not_to receive(:send_to_llm) + + result = service.evaluate(blank_content) + + expect(result).to include( + 'threat_level' => 'unknown', + 'threat_summary' => 'Failed to complete content evaluation', + 'detected_threats' => [], + 'illegal_activities_detected' => false, + 'recommendation' => 'review' + ) + end + end + + context 'when LLM response is nil' do + before do + allow(service).to receive(:send_to_llm).and_return(nil) + end + + it 'returns the default evaluation' do + result = service.evaluate(content) + + expect(result).to include( + 'threat_level' => 'unknown', + 'threat_summary' => 'Failed to complete content evaluation', + 'detected_threats' => [], + 'illegal_activities_detected' => false, + 'recommendation' => 'review' + ) + end + end + + context 'when error occurs during evaluation' do + before do + allow(service).to receive(:send_to_llm).and_raise(StandardError.new('Test error')) + allow(Rails.logger).to receive(:error) + end + + it 'logs the error and returns default evaluation with error type' do + result = service.evaluate(content) + + expect(Rails.logger).to have_received(:error).with('Error evaluating content: Test error') + expect(result).to include( + 'threat_level' => 'unknown', + 'threat_summary' => 'Failed to complete content evaluation', + 'detected_threats' => ['evaluation_failure'], + 'illegal_activities_detected' => false, + 'recommendation' => 'review' + ) + end + end + end +end diff --git a/spec/enterprise/services/internal/account_analysis/discord_notifier_service_spec.rb b/spec/enterprise/services/internal/account_analysis/discord_notifier_service_spec.rb new file mode 100644 index 000000000..6591185d9 --- /dev/null +++ b/spec/enterprise/services/internal/account_analysis/discord_notifier_service_spec.rb @@ -0,0 +1,73 @@ +require 'rails_helper' + +RSpec.describe Internal::AccountAnalysis::DiscordNotifierService do + let(:service) { described_class.new } + let(:webhook_url) { 'https://discord.com/api/webhooks/123456789/some-token' } + let(:account) do + create( + :account, + internal_attributes: { + 'last_threat_scan_level' => 'high', + 'last_threat_scan_recommendation' => 'review', + 'illegal_activities_detected' => true, + 'last_threat_scan_summary' => 'Suspicious activity detected' + } + ) + end + let!(:user) { create(:user, account: account) } + + before do + allow(Rails.logger).to receive(:info) + allow(Rails.logger).to receive(:error) + end + + describe '#notify_flagged_account' do + context 'when webhook URL is configured' do + before do + create(:installation_config, name: 'ACCOUNT_SECURITY_NOTIFICATION_WEBHOOK_URL', value: webhook_url) + stub_request(:post, webhook_url).to_return(status: 200) + end + + it 'sends notification to Discord webhook' do + service.notify_flagged_account(account) + expect(WebMock).to have_requested(:post, webhook_url) + .with( + body: hash_including( + content: include( + "Account ID: #{account.id}", + "User Email: #{user.email}", + 'Threat Level: high', + '**System Recommendation:** review', + '⚠️ Potential illegal activities detected', + 'Suspicious activity detected' + ) + ) + ) + end + end + + context 'when webhook URL is not configured' do + it 'logs error and does not make HTTP request' do + service.notify_flagged_account(account) + + expect(Rails.logger).to have_received(:error) + .with('Cannot send Discord notification: No webhook URL configured') + expect(WebMock).not_to have_requested(:post, webhook_url) + end + end + + context 'when HTTP request fails' do + before do + create(:installation_config, name: 'ACCOUNT_SECURITY_NOTIFICATION_WEBHOOK_URL', value: webhook_url) + stub_request(:post, webhook_url).to_raise(StandardError.new('Connection failed')) + end + + it 'catches exception and logs error' do + service.notify_flagged_account(account) + + expect(Rails.logger).to have_received(:error) + .with('Error sending Discord notification: Connection failed') + end + end + end +end diff --git a/spec/enterprise/services/internal/account_analysis/threat_analyser_service_spec.rb b/spec/enterprise/services/internal/account_analysis/threat_analyser_service_spec.rb new file mode 100644 index 000000000..65d106215 --- /dev/null +++ b/spec/enterprise/services/internal/account_analysis/threat_analyser_service_spec.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Internal::AccountAnalysis::ThreatAnalyserService do + subject { described_class.new(account) } + + let(:account) { create(:account) } + let(:user) { create(:user, email: 'test@example.com', account: account) } + let(:website_scraper) { instance_double(Internal::AccountAnalysis::WebsiteScraperService) } + let(:content_evaluator) { instance_double(Internal::AccountAnalysis::ContentEvaluatorService) } + let(:account_updater) { instance_double(Internal::AccountAnalysis::AccountUpdaterService) } + let(:website_content) { 'This is the website content' } + let(:threat_analysis) { { 'threat_level' => 'medium' } } + + before do + user + + allow(Internal::AccountAnalysis::WebsiteScraperService).to receive(:new).with('example.com').and_return(website_scraper) + allow(Internal::AccountAnalysis::ContentEvaluatorService).to receive(:new).and_return(content_evaluator) + allow(Internal::AccountAnalysis::AccountUpdaterService).to receive(:new).with(account).and_return(account_updater) + end + + describe '#perform' do + before do + allow(website_scraper).to receive(:perform).and_return(website_content) + allow(content_evaluator).to receive(:evaluate).and_return(threat_analysis) + allow(account_updater).to receive(:update_with_analysis) + allow(Rails.logger).to receive(:info) + end + + it 'performs threat analysis and updates the account' do + expected_content = <<~MESSAGE + Domain: example.com + Content: This is the website content + MESSAGE + + expect(website_scraper).to receive(:perform) + expect(content_evaluator).to receive(:evaluate).with(expected_content) + expect(account_updater).to receive(:update_with_analysis).with(threat_analysis) + expect(Rails.logger).to receive(:info).with("Completed threat analysis: level=medium for account-id: #{account.id}") + + result = subject.perform + expect(result).to eq(threat_analysis) + end + + context 'when website content is blank' do + before do + allow(website_scraper).to receive(:perform).and_return(nil) + end + + it 'logs info and updates account with error' do + expect(Rails.logger).to receive(:info).with("Skipping threat analysis for account #{account.id}: No website content found") + expect(account_updater).to receive(:update_with_analysis).with(nil, 'Scraping error: No content found') + expect(content_evaluator).not_to receive(:evaluate) + + result = subject.perform + expect(result).to be_nil + end + end + end +end diff --git a/spec/enterprise/services/internal/account_analysis/website_scraper_service_spec.rb b/spec/enterprise/services/internal/account_analysis/website_scraper_service_spec.rb new file mode 100644 index 000000000..6d218e44d --- /dev/null +++ b/spec/enterprise/services/internal/account_analysis/website_scraper_service_spec.rb @@ -0,0 +1,45 @@ +require 'rails_helper' + +RSpec.describe Internal::AccountAnalysis::WebsiteScraperService do + describe '#perform' do + let(:service) { described_class.new(domain) } + let(:html_content) { 'This is sample website content' } + + before do + allow(Rails.logger).to receive(:info) + allow(Rails.logger).to receive(:error) + end + + context 'when domain is nil' do + let(:domain) { nil } + + it 'returns nil' do + expect(service.perform).to be_nil + end + end + + context 'when domain is present' do + let(:domain) { 'example.com' } + + before do + allow(HTTParty).to receive(:get).and_return(html_content) + end + + it 'returns the stripped and normalized content' do + expect(service.perform).to eq(html_content) + end + end + + context 'when an error occurs' do + let(:domain) { 'example.com' } + + before do + allow(HTTParty).to receive(:get).and_raise(StandardError.new('Error')) + end + + it 'returns nil' do + expect(service.perform).to be_nil + end + end + end +end