mirror of
https://github.com/lingble/chatwoot.git
synced 2025-11-01 03:27:52 +00:00
Merge branch 'develop' into feat/add-more-ai-reply-options
This commit is contained in:
@@ -0,0 +1,81 @@
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Integrations::Openai::ProcessorService do
|
||||
subject { described_class.new(hook: hook, event: event) }
|
||||
|
||||
let(:account) { create(:account) }
|
||||
let(:hook) { create(:integrations_hook, :openai, account: account) }
|
||||
let(:expected_headers) { { 'Authorization' => "Bearer #{hook.settings['api_key']}" } }
|
||||
let(:openai_response) do
|
||||
{
|
||||
'choices' => [
|
||||
{
|
||||
'message' => {
|
||||
'content' => 'This is a reply from openai.'
|
||||
}
|
||||
}
|
||||
]
|
||||
}.to_json
|
||||
end
|
||||
let!(:conversation) { create(:conversation, account: account) }
|
||||
let!(:customer_message) { create(:message, account: account, conversation: conversation, message_type: :incoming, content: 'hello agent') }
|
||||
let!(:agent_message) { create(:message, account: account, conversation: conversation, message_type: :outgoing, content: 'hello customer') }
|
||||
|
||||
describe '#perform' do
|
||||
context 'when event name is label_suggestion with labels' do
|
||||
let(:event) { { 'name' => 'label_suggestion', 'data' => { 'conversation_display_id' => conversation.display_id } } }
|
||||
let(:label1) { create(:label, account: account) }
|
||||
let(:label2) { create(:label, account: account) }
|
||||
let(:label_suggestion_payload) do
|
||||
labels = "#{label1.title}, #{label2.title}"
|
||||
messages =
|
||||
"Customer #{customer_message.sender.name} : #{customer_message.content}\nAgent #{agent_message.sender.name} : #{agent_message.content}"
|
||||
|
||||
"Messages:\n#{messages}\n\nLabels:\n#{labels}"
|
||||
end
|
||||
|
||||
it 'returns the label suggestions' do
|
||||
request_body = {
|
||||
'model' => 'gpt-3.5-turbo',
|
||||
'messages' => [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'Your role is as an assistant to a customer support agent. You will be provided with ' \
|
||||
'a transcript of a conversation between a customer and the support agent, along with a list of potential labels. ' \
|
||||
'Your task is to analyze the conversation and select the two labels from the given list that most accurately ' \
|
||||
'represent the themes or issues discussed. Ensure you preserve the exact casing of the labels as they are provided ' \
|
||||
'in the list. Do not create new labels; only choose from those provided. Once you have made your selections, ' \
|
||||
'please provide your response as a comma-separated list of the provided labels. Remember, your response should only contain ' \
|
||||
'the labels you\'ve selected, in their original casing, and nothing else. '
|
||||
},
|
||||
{ role: 'user', content: label_suggestion_payload }
|
||||
]
|
||||
}.to_json
|
||||
|
||||
stub_request(:post, 'https://api.openai.com/v1/chat/completions')
|
||||
.with(body: request_body, headers: expected_headers)
|
||||
.to_return(status: 200, body: openai_response, headers: {})
|
||||
|
||||
result = subject.perform
|
||||
expect(result).to eq('This is a reply from openai.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when event name is label_suggestion with no labels' do
|
||||
let(:event) { { 'name' => 'label_suggestion', 'data' => { 'conversation_display_id' => conversation.display_id } } }
|
||||
|
||||
it 'returns nil' do
|
||||
result = subject.perform
|
||||
expect(result).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when event name is not one that can be processed' do
|
||||
let(:event) { { 'name' => 'unknown', 'data' => {} } }
|
||||
|
||||
it 'returns nil' do
|
||||
expect(subject.perform).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,4 +1,4 @@
|
||||
class Integrations::Openai::ProcessorService < Integrations::OpenaiProcessorService
|
||||
class Integrations::Openai::ProcessorService < Integrations::OpenaiBaseService
|
||||
def reply_suggestion_message
|
||||
make_api_call(reply_suggestion_body)
|
||||
end
|
||||
@@ -11,68 +11,16 @@ class Integrations::Openai::ProcessorService < Integrations::OpenaiProcessorServ
|
||||
make_api_call(rephrase_body)
|
||||
end
|
||||
|
||||
def fix_spelling_grammar_message
|
||||
make_api_call(fix_spelling_grammar_body)
|
||||
end
|
||||
|
||||
def shorten_message
|
||||
make_api_call(shorten_body)
|
||||
end
|
||||
|
||||
def expand_message
|
||||
make_api_call(expand_body)
|
||||
end
|
||||
|
||||
def make_friendly_message
|
||||
make_api_call(make_friendly_body)
|
||||
end
|
||||
|
||||
def make_formal_message
|
||||
make_api_call(make_formal_body)
|
||||
end
|
||||
|
||||
def simplify_message
|
||||
make_api_call(simplify_body)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def rephrase_body
|
||||
build_api_call_body("You are a helpful support agent. Please rephrase the following response to a more #{event['data']['tone']} tone. " \
|
||||
'Reply in the user\'s language.')
|
||||
end
|
||||
|
||||
def fix_spelling_grammar_body
|
||||
build_api_call_body('You are a helpful support agent. Please fix the spelling and grammar of the following response. ' \
|
||||
'Reply in the user\'s language.')
|
||||
end
|
||||
|
||||
def shorten_body
|
||||
build_api_call_body("You are a helpful support agent. Please shorten the following response. Reply in the user's language.")
|
||||
end
|
||||
|
||||
def expand_body
|
||||
build_api_call_body("You are a helpful support agent. Please expand the following response. Reply in the user's language.")
|
||||
end
|
||||
|
||||
def make_friendly_body
|
||||
build_api_call_body("You are a helpful support agent.Please make the following response more friendly. Reply in the user's language.")
|
||||
end
|
||||
|
||||
def make_formal_body
|
||||
build_api_call_body("You are a helpful support agent. Please make the following response more formal. Reply in the user's language.")
|
||||
end
|
||||
|
||||
def simplify_body
|
||||
build_api_call_body("You are a helpful support agent. Please simplify the following response. Reply in the user's language.")
|
||||
end
|
||||
|
||||
def build_api_call_body(system_content, user_content = event['data']['content'])
|
||||
{
|
||||
model: GPT_MODEL,
|
||||
messages: [
|
||||
{ role: 'system', content: system_content },
|
||||
{ role: 'user', content: user_content }
|
||||
{ role: 'system',
|
||||
content: "You are a helpful support agent. Please rephrase the following response to a more #{event['data']['tone']} tone. " \
|
||||
"Reply in the user's language." },
|
||||
{ role: 'user', content: event['data']['content'] }
|
||||
]
|
||||
}.to_json
|
||||
end
|
||||
|
||||
80
lib/integrations/openai_base_service.rb
Normal file
80
lib/integrations/openai_base_service.rb
Normal file
@@ -0,0 +1,80 @@
|
||||
class Integrations::OpenaiBaseService
|
||||
# 3.5 support 4,096 tokens
|
||||
# 1 token is approx 4 characters
|
||||
# 4,096 * 4 = 16,384 characters, sticking to 15,000 to be safe
|
||||
TOKEN_LIMIT = 15_000
|
||||
API_URL = 'https://api.openai.com/v1/chat/completions'.freeze
|
||||
GPT_MODEL = 'gpt-3.5-turbo'.freeze
|
||||
|
||||
ALLOWED_EVENT_NAMES = %w[rephrase summarize reply_suggestion].freeze
|
||||
CACHEABLE_EVENTS = %w[].freeze
|
||||
|
||||
pattr_initialize [:hook!, :event!]
|
||||
|
||||
def perform
|
||||
return nil unless valid_event_name?
|
||||
|
||||
return value_from_cache if value_from_cache.present?
|
||||
|
||||
response = send("#{event_name}_message")
|
||||
save_to_cache(response) if response.present?
|
||||
|
||||
response
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def event_name
|
||||
event['name']
|
||||
end
|
||||
|
||||
def cache_key
|
||||
return nil unless event_is_cacheable?
|
||||
|
||||
conversation = find_conversation
|
||||
return nil unless conversation
|
||||
|
||||
# since the value from cache depends on the conversation last_activity_at, it will always be fresh
|
||||
format(::Redis::Alfred::OPENAI_CONVERSATION_KEY, event_name: event_name, conversation_id: conversation.id,
|
||||
updated_at: conversation.last_activity_at.to_i)
|
||||
end
|
||||
|
||||
def value_from_cache
|
||||
return nil unless event_is_cacheable?
|
||||
return nil if cache_key.blank?
|
||||
|
||||
Redis::Alfred.get(cache_key)
|
||||
end
|
||||
|
||||
def save_to_cache(response)
|
||||
return nil unless event_is_cacheable?
|
||||
|
||||
Redis::Alfred.setex(cache_key, response)
|
||||
end
|
||||
|
||||
def find_conversation
|
||||
hook.account.conversations.find_by(display_id: event['data']['conversation_display_id'])
|
||||
end
|
||||
|
||||
def valid_event_name?
|
||||
# self.class::ALLOWED_EVENT_NAMES is way to access ALLOWED_EVENT_NAMES defined in the class hierarchy of the current object.
|
||||
# This ensures that if ALLOWED_EVENT_NAMES is updated elsewhere in it's ancestors, we access the latest value.
|
||||
self.class::ALLOWED_EVENT_NAMES.include?(event_name)
|
||||
end
|
||||
|
||||
def event_is_cacheable?
|
||||
# self.class::CACHEABLE_EVENTS is way to access CACHEABLE_EVENTS defined in the class hierarchy of the current object.
|
||||
# This ensures that if CACHEABLE_EVENTS is updated elsewhere in it's ancestors, we access the latest value.
|
||||
self.class::CACHEABLE_EVENTS.include?(event_name)
|
||||
end
|
||||
|
||||
def make_api_call(body)
|
||||
headers = {
|
||||
'Content-Type' => 'application/json',
|
||||
'Authorization' => "Bearer #{hook.settings['api_key']}"
|
||||
}
|
||||
|
||||
response = HTTParty.post(API_URL, headers: headers, body: body)
|
||||
JSON.parse(response.body)['choices'].first['message']['content']
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user