diff --git a/ee/lib/gitlab/llm/chain/answers/streamed_json.rb b/ee/lib/gitlab/llm/chain/answers/streamed_json.rb deleted file mode 100644 index e9f184f0cae9adc17e4034d27af8260733af9d58..0000000000000000000000000000000000000000 --- a/ee/lib/gitlab/llm/chain/answers/streamed_json.rb +++ /dev/null @@ -1,22 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Llm - module Chain - module Answers - class StreamedJson < StreamedAnswer - def next_chunk(content) - return if content.empty? - - parser = Parsers::SingleActionParser.new(output: content) - parser.parse - - return unless parser.final_answer - - payload(parser.final_answer) - end - end - end - end - end -end diff --git a/ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb b/ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb deleted file mode 100644 index d89e3900488cfca08b8f3f57ac934fd7488c9644..0000000000000000000000000000000000000000 --- a/ee/lib/gitlab/llm/chain/parsers/single_action_parser.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Llm - module Chain - module Parsers - class SingleActionParser < OutputParser - attr_reader :action, :action_input, :thought, :final_answer - - def parse - return unless @output - - @parsed_thoughts = parse_json_objects - - return unless @parsed_thoughts.present? - - parse_final_answer - parse_action - end - - private - - def final_answer? - @parsed_thoughts.first[:type] == 'final_answer_delta' - end - - def parse_final_answer - return unless final_answer? - - @final_answer = '' - - @parsed_thoughts.each do |t| - @final_answer += t[:data][:text] - end - - @final_answer - end - - def parse_action - response = @parsed_thoughts.first - - return unless response[:type] == 'action' - - @thought = response[:data][:thought] - @action = response[:data][:tool].camelcase - @action_input = response[:data][:tool_input] - end - - def parse_json_objects - json_strings = @output.split("\n") - - json_strings.map do |str| - Gitlab::Json.parse(str).with_indifferent_access - end - end - end - end - end - end -end diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index 9925d3f887e87840f3eb117bdc999c0c6c9bf9dd..7ab1bba391a1642fdb574e34f800a7f50ad8a9d7 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -16,7 +16,6 @@ class AiGateway < Base ENDPOINT = '/v1/chat/agent' BASE_ENDPOINT = '/v1/chat' BASE_PROMPTS_CHAT_ENDPOINT = '/v1/prompts/chat' - CHAT_V2_ENDPOINT = '/v2/chat/agent' DEFAULT_TYPE = 'prompt' DEFAULT_SOURCE = 'GitLab EE' TEMPERATURE = 0.1 @@ -34,11 +33,9 @@ def request(prompt, unit_primitive: nil) options = default_options.merge(prompt.fetch(:options, {})) return unless model_provider_valid?(options) - v2_chat_schema = options.delete(:single_action_agent) - response = ai_client.stream( - url: endpoint(unit_primitive, v2_chat_schema, options[:use_ai_gateway_agent_prompt]), - body: body(v2_chat_schema, prompt, options, unit_primitive: unit_primitive) + url: endpoint(unit_primitive, options[:use_ai_gateway_agent_prompt]), + body: body(prompt, options, unit_primitive: unit_primitive) ) do |data| yield data if block_given? end @@ -85,14 +82,12 @@ def model_provider_valid?(options) provider(options) end - def endpoint(unit_primitive, v2_chat_schema, use_ai_gateway_agent_prompt) + def endpoint(unit_primitive, use_ai_gateway_agent_prompt) path = if use_ai_gateway_agent_prompt "#{BASE_PROMPTS_CHAT_ENDPOINT}/#{unit_primitive}" elsif unit_primitive.present? "#{BASE_ENDPOINT}/#{unit_primitive}" - elsif v2_chat_schema - CHAT_V2_ENDPOINT else ENDPOINT end @@ -103,10 +98,8 @@ def endpoint(unit_primitive, v2_chat_schema, use_ai_gateway_agent_prompt) "#{base_url}#{path}" end - def body(v2_chat_schema, prompt, options, unit_primitive: nil) - if v2_chat_schema - request_body_chat_2(prompt: prompt[:prompt], options: options) - elsif options[:use_ai_gateway_agent_prompt] + def body(prompt, options, unit_primitive: nil) + if options[:use_ai_gateway_agent_prompt] request_body_agent(inputs: options[:inputs], unit_primitive: unit_primitive) else request_body(prompt: prompt[:prompt], options: options) @@ -171,28 +164,6 @@ def model_params(options) end end - def request_body_chat_2(prompt:, options: {}) - option_params = { - chat_history: options[:conversation], - agent_scratchpad: { - agent_type: "react", - steps: options[:agent_scratchpad] - }, - context: options[:current_resource_params], - current_file: options[:current_file_params], - additional_context: options[:additional_context] - }.compact - - response = { - prompt: prompt, - options: option_params, - model_metadata: options[:model_metadata], - unavailable_resources: unavailable_resources - } - - response.compact - end - def payload_params(options) allowed_params = ALLOWED_PARAMS.fetch(provider(options)) params = options.slice(*allowed_params) diff --git a/ee/spec/lib/gitlab/llm/chain/answer_spec.rb b/ee/spec/lib/gitlab/llm/chain/answer_spec.rb index b0fa29508e110847e1acbad2eb9873e175645d13..bd4ce9d612ab98050db7096eebf914702f5f35ca 100644 --- a/ee/spec/lib/gitlab/llm/chain/answer_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/answer_spec.rb @@ -100,24 +100,6 @@ expect(answer.content).to eq(input) end end - - context 'with different parser' do - subject(:answer) do - described_class.from_response( - response_body: input, - tools: tools, - context: context, - parser_klass: Gitlab::Llm::Chain::Parsers::SingleActionParser - ) - end - - let(:input) { create(:action_chunk, tool: "issue_reader") } - - it 'returns intermediate answer with parsed values and a tool' do - expect(answer.is_final?).to eq(false) - expect(answer.tool::NAME).to eq('IssueReader') - end - end end describe '.final_answer' do diff --git a/ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb b/ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb deleted file mode 100644 index 481c9593d4504050d93adc7b54952692ba14c37b..0000000000000000000000000000000000000000 --- a/ee/spec/lib/gitlab/llm/chain/answers/streamed_json_spec.rb +++ /dev/null @@ -1,41 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Llm::Chain::Answers::StreamedJson, feature_category: :duo_chat do - describe "#next_chunk" do - subject { described_class.new.next_chunk(chunk) } - - context "when stream is empty" do - let(:chunk) { "" } - - it 'returns nil' do - is_expected.to be_nil - end - end - - context "when stream does not contain the final answer" do - let(:chunk) { create(:action_chunk) } - - it 'returns nil' do - is_expected.to be_nil - end - end - - context "when streaming beginning of the answer" do - let(:chunk) { create(:final_answer_chunk, chunk: "I") } - - it 'returns stream payload' do - is_expected.to eq({ id: 1, content: "I" }) - end - end - - context "when streaming multiple chunks of final answer" do - let(:chunk) { create(:final_answer_multi_chunk, chunks: ["Hello", " there"]) } - - it 'returns stream payload' do - is_expected.to eq({ id: 1, content: "Hello there" }) - end - end - end -end diff --git a/ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb b/ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb deleted file mode 100644 index a1aa9d76a75cd2144f64af6ee6a672d893ba9452..0000000000000000000000000000000000000000 --- a/ee/spec/lib/gitlab/llm/chain/parsers/single_action_parser_spec.rb +++ /dev/null @@ -1,53 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Llm::Chain::Parsers::SingleActionParser, feature_category: :duo_chat do - describe "#parse" do - let(:parser) { described_class.new(output: output) } - - before do - parser.parse - end - - context "with final answer" do - let(:output) { create(:final_answer_multi_chunk, chunks: ["To", " perform", " a", " Git", " re", "base"]) } - - it "returns only the final answer" do - expect(parser.action).to be_nil - expect(parser.action_input).to be_nil - expect(parser.thought).to be_nil - expect(parser.final_answer).to eq("To perform a Git rebase") - end - end - - context "with chosen action" do - let(:output) { create(:action_chunk, thought: "thought", tool: "issue_reader", tool_input: "input") } - - it "returns the action" do - expect(parser.action).to eq("IssueReader") - expect(parser.action_input).to eq("input") - expect(parser.thought).to eq("thought") - expect(parser.final_answer).to be_nil - end - end - - context "with no output" do - let(:output) { nil } - - it "returns nil" do - expect(parser.action).to be_nil - expect(parser.final_answer).to be_nil - end - end - - context "with empty output" do - let(:output) { "" } - - it "returns nil" do - expect(parser.action).to be_nil - expect(parser.final_answer).to be_nil - end - end - end -end diff --git a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb index 476ccf65e7154ebd8dc512c481937a364741b606..bfc767d979aa9fb50cb21378dbf220f9c2548a14 100644 --- a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb @@ -197,60 +197,6 @@ it_behaves_like 'performing request to the AI Gateway' end - context 'when request is sent for a new ReAct Duo Chat prompt' do - let(:endpoint) { described_class::CHAT_V2_ENDPOINT } - let(:prompt) { { prompt: user_prompt, options: options } } - let(:unavailable_resources) { %w[Pipelines Vulnerabilities] } - - let(:model_metadata) do - { api_key: "test_token", endpoint: "http://localhost:11434/v1", name: "mistral", provider: :openai, identifier: 'provider/some-cool-model' } - end - - let(:options) do - { - agent_scratchpad: [], - single_action_agent: true, - conversation: "{:role=>:user, :content=>\"previous question\"}", - current_resource_params: { - type: "issue", - content: "string" - }, - current_file_params: { - file_path: "never.rb", - data: "puts 'gonna give you up'", - selected_code: true - }, - model_metadata: model_metadata - } - end - - let(:body) do - { - prompt: user_prompt, - options: { - chat_history: "{:role=>:user, :content=>\"previous question\"}", - agent_scratchpad: { - agent_type: "react", - steps: [] - }, - context: { - type: "issue", - content: "string" - }, - current_file: { - file_path: "never.rb", - data: "puts 'gonna give you up'", - selected_code: true - } - }, - model_metadata: model_metadata, - unavailable_resources: unavailable_resources - } - end - - it_behaves_like 'performing request to the AI Gateway' - end - context 'when request is sent to chat tools implemented via agents' do let_it_be(:feature_setting) { create(:ai_feature_setting, feature: :duo_chat, provider: :self_hosted) } @@ -328,36 +274,5 @@ end end end - - context 'when request is sent for a new ReAct Duo Chat prompt without optional params' do - let(:endpoint) { described_class::CHAT_V2_ENDPOINT } - - let(:prompt) { { prompt: user_prompt, options: options } } - let(:unavailable_resources) { %w[Pipelines Vulnerabilities] } - - let(:options) do - { - agent_scratchpad: [], - single_action_agent: true, - conversation: "" - } - end - - let(:body) do - { - prompt: user_prompt, - options: { - chat_history: "", - agent_scratchpad: { - agent_type: "react", - steps: [] - } - }, - unavailable_resources: unavailable_resources - } - end - - it_behaves_like 'performing request to the AI Gateway' - end end end