fix: OpenAPI label suggestion response payload (#10008)

This commit is contained in:
Muhsin Keloth
2024-08-22 15:32:33 +05:30
committed by GitHub
parent d19a9c38d7
commit eb6de74269
3 changed files with 6 additions and 4 deletions

View File

@@ -27,11 +27,13 @@ module Enterprise::Integrations::OpenaiProcessorService
response = make_api_call(label_suggestion_body)
return response if response[:error].present?
# LLMs are not deterministic, so this is bandaid solution
# To what you ask? Sometimes, the response includes
# "Labels:" in it's response in some format. This is a hacky way to remove it
# TODO: Fix with with a better prompt
response[:message] ? response[:message].gsub(/^(label|labels):/i, '') : ''
{ message: response[:message] ? response[:message].gsub(/^(label|labels):/i, '') : '' }
end
private

View File

@@ -33,7 +33,7 @@ module Redis::RedisKeys
LATEST_CHATWOOT_VERSION = 'LATEST_CHATWOOT_VERSION'.freeze
# Check if a message create with same source-id is in progress?
MESSAGE_SOURCE_KEY = 'MESSAGE_SOURCE_KEY::%<id>s'.freeze
OPENAI_CONVERSATION_KEY = 'OPEN_AI_CONVERSATION_KEY::%<event_name>s::%<conversation_id>d::%<updated_at>d'.freeze
OPENAI_CONVERSATION_KEY = 'OPEN_AI_CONVERSATION_KEY::v1::%<event_name>s::%<conversation_id>d::%<updated_at>d'.freeze
## Sempahores / Locks
# We don't want to process messages from the same sender concurrently to prevent creating double conversations

View File

@@ -54,7 +54,7 @@ RSpec.describe Integrations::Openai::ProcessorService do
.to_return(status: 200, body: openai_response, headers: {})
result = subject.perform
expect(result).to eq('This is a reply from openai.')
expect(result).to eq({ :message => 'This is a reply from openai.' })
end
it 'returns empty string if openai response is blank' do
@@ -63,7 +63,7 @@ RSpec.describe Integrations::Openai::ProcessorService do
.to_return(status: 200, body: '{}', headers: {})
result = subject.perform
expect(result).to eq('')
expect(result).to eq({ :message => '' })
end
end