fix: inconsistent OpenAI cache interface (#10009)

Signed-off-by: Shivam Mishra <scm.mymail@gmail.com>
Co-authored-by: Muhsin Keloth <muhsinkeramam@gmail.com>
This commit is contained in:
Shivam Mishra
2024-08-22 23:04:14 +05:30
committed by GitHub
parent a48f98de9d
commit abc511d00f
4 changed files with 23 additions and 5 deletions

View File

@@ -12,7 +12,12 @@ class Api::V1::Accounts::Integrations::HooksController < Api::V1::Accounts::Base
def process_event
response = @hook.process_event(params[:event])
if response[:error]
# for cases like an invalid event, or when conversation does not have enough messages
# for a label suggestion, the response is nil
if response.nil?
render json: { message: nil }
elsif response[:error]
render json: { error: response[:error] }, status: :unprocessable_entity
else
render json: { message: response[:message] }

View File

@@ -172,7 +172,7 @@ export default {
delay: { show: 600, hide: 0 },
hideOnClick: true,
}"
class="label-suggestion--option"
class="label-suggestion--option !px-0"
@click="pushOrAddLabel(label.title)"
>
<woot-label

View File

@@ -42,13 +42,26 @@ class Integrations::OpenaiBaseService
return nil unless event_is_cacheable?
return nil if cache_key.blank?
Redis::Alfred.get(cache_key)
deserialize_cached_value(Redis::Alfred.get(cache_key))
end
def deserialize_cached_value(value)
return nil if value.blank?
JSON.parse(value, symbolize_names: true)
rescue JSON::ParserError
# If json parse failed, returning the value as is will fail too
# since we access the keys as symbols down the line
# So it's best to return nil
nil
end
def save_to_cache(response)
return nil unless event_is_cacheable?
Redis::Alfred.setex(cache_key, response)
# Serialize to JSON
# This makes parsing easy when response is a hash
Redis::Alfred.setex(cache_key, response.to_json)
end
def conversation

View File

@@ -33,7 +33,7 @@ module Redis::RedisKeys
LATEST_CHATWOOT_VERSION = 'LATEST_CHATWOOT_VERSION'.freeze
# Check if a message create with same source-id is in progress?
MESSAGE_SOURCE_KEY = 'MESSAGE_SOURCE_KEY::%<id>s'.freeze
OPENAI_CONVERSATION_KEY = 'OPEN_AI_CONVERSATION_KEY::v1::%<event_name>s::%<conversation_id>d::%<updated_at>d'.freeze
OPENAI_CONVERSATION_KEY = 'OPEN_AI_CONVERSATION_KEY::V1::%<event_name>s::%<conversation_id>d::%<updated_at>d'.freeze
## Sempahores / Locks
# We don't want to process messages from the same sender concurrently to prevent creating double conversations