diff --git a/ee/lib/gitlab/llm/cache.rb b/ee/lib/gitlab/llm/cache.rb index e5f10dd990c9584c1879b6480d43080b987ba73a..7565a8242474ad6b6ffe4bcbf65aacfacbbdf3ea 100644 --- a/ee/lib/gitlab/llm/cache.rb +++ b/ee/lib/gitlab/llm/cache.rb @@ -33,7 +33,7 @@ def add(payload) role: payload[:role] } data[:content] = payload[:content][0, MAX_TEXT_LIMIT] if payload[:content] - data[:error] = payload[:errors].join(". ") if payload[:errors] + data[:error] = payload[:errors].join(". ") if payload[:errors].present? cache_data(data) end diff --git a/ee/lib/gitlab/llm/open_ai/completions/tanuki_bot.rb b/ee/lib/gitlab/llm/open_ai/completions/tanuki_bot.rb index 8a2cbb54ed1a74f2c8a96443e43d0766a2e9f559..078e734b6108b08b86b3dad71364827516bf179f 100644 --- a/ee/lib/gitlab/llm/open_ai/completions/tanuki_bot.rb +++ b/ee/lib/gitlab/llm/open_ai/completions/tanuki_bot.rb @@ -13,7 +13,7 @@ def execute(user, resource, options) response = ::Gitlab::Llm::TanukiBot.execute(current_user: user, question: question) response_modifier = Gitlab::Llm::OpenAi::ResponseModifiers::TanukiBot.new(response) - response_options = { request_id: options[:request_id] } + response_options = { request_id: params[:request_id] } ::Gitlab::Llm::GraphqlSubscriptionResponseService.new( user, resource, response_modifier, options: response_options diff --git a/ee/spec/lib/gitlab/llm/cache_spec.rb b/ee/spec/lib/gitlab/llm/cache_spec.rb index 8f781533419930fb1920065a5c4b32be8760c8da..a1e624681dd33ae08feaeb904207049909c29e08 100644 --- a/ee/spec/lib/gitlab/llm/cache_spec.rb +++ b/ee/spec/lib/gitlab/llm/cache_spec.rb @@ -43,6 +43,15 @@ expect(last.timestamp).not_to be_nil end + it 'does not set error when errors are empty' do + payload[:errors] = [] + + subject.add(payload) + + last = subject.find_all.last + expect(last.errors).to eq([]) + end + it 'raises an exception when role is missing' do payload[:role] = nil diff --git a/ee/spec/lib/gitlab/llm/open_ai/completions/tanuki_bot_spec.rb b/ee/spec/lib/gitlab/llm/open_ai/completions/tanuki_bot_spec.rb index 53b2ab6da9336ee79023b4c09704c9431791eeba..07b5994c85b87a59457251b7d90fef65479e43a5 100644 --- a/ee/spec/lib/gitlab/llm/open_ai/completions/tanuki_bot_spec.rb +++ b/ee/spec/lib/gitlab/llm/open_ai/completions/tanuki_bot_spec.rb @@ -6,7 +6,8 @@ let_it_be(:user) { create(:user) } let(:question) { 'A question' } - let(:options) { { question: question, request_id: 'uuid' } } + let(:options) { { question: question } } + let(:params) { { request_id: 'uuid' } } let(:template_class) { ::Gitlab::Llm::OpenAi::Templates::TanukiBot } let(:ai_response) do @@ -19,7 +20,7 @@ }.to_json end - subject(:tanuki_bot) { described_class.new(template_class).execute(user, user, options) } + subject(:tanuki_bot) { described_class.new(template_class, params).execute(user, user, options) } describe '#execute' do it 'makes a call to ::Gitlab::Llm::TanukiBot' do