diff --git a/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic.rb b/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic.rb index ec38cd94d8bf570550ac0268823dfb41ad09386a..9cba40ad2d2f1636b5288ad57776d35c3f5dda25 100644 --- a/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic.rb +++ b/ee/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic.rb @@ -12,8 +12,9 @@ class Anthropic < Base def self.prompt(options) history = truncated_conversation_list(options[:conversation]) + base = base_prompt(options) - text = history + base_prompt(options) + text = deduplicate_roles(history + base) Requests::Anthropic.prompt(text) end @@ -35,6 +36,27 @@ def self.truncated_conversation_list(conversation) { role: message.role.to_sym, content: message.content } end end + + def self.deduplicate_roles(messages) + result = [] + previous_role = nil + + messages.each do |message| + current_role = message[:role] + current_content = message[:content] + + if current_role == previous_role + # If the current role is the same as the previous one, update the content + result.last[:content] = current_content + else + # If the role is different, add a new entry + result << { role: current_role, content: current_content } + previous_role = current_role + end + end + + result + end end end end diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb index 0451e57a100292129ee043cc57479ae52fcd007b..57d1bc22ff387b0c1fe08619a0e338533f430378 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb @@ -88,6 +88,40 @@ expect(assistant_prompts[1][:content]).to eq("response 2") end end + + context 'when role is duplicated in history' do + let(:options) do + { + tools_definitions: "tool definitions", + tool_names: "tool names", + user_input: user_input, + agent_scratchpad: "some observation", + conversation: [ + build(:ai_message, request_id: 'uuid1', role: 'user', content: 'question 1'), + build(:ai_message, request_id: 'uuid1', role: 'assistant', content: 'response 1'), + build(:ai_message, request_id: 'uuid1', role: 'user', content: 'question 2'), + build(:ai_message, request_id: 'uuid1', role: 'assistant', content: 'duplicated response 1'), + build(:ai_message, request_id: 'uuid1', role: 'assistant', content: 'duplicated response 2') + ], + prompt_version: prompt_version, + current_code: "", + current_resource: "", + resources: "", + current_user: user, + zero_shot_prompt: zero_shot_prompt, + system_prompt: system_prompt, + source_template: "source template" + } + end + + it 'returns last message with role' do + prompt = subject + + expect(prompt).to be_instance_of(Array) + expect(prompt).not_to include(hash_including(role: :assistant, content: 'duplicated response 1')) + expect(prompt).to include(hash_including(role: :assistant, content: 'duplicated response 2')) + end + end end it_behaves_like 'zero shot prompt'