Skip to content
代码片段 群组 项目
未验证 提交 938d039a 编辑于 作者: Kassio Borges's avatar Kassio Borges 提交者: GitLab
浏览文件

Merge branch 'cleanup-duo-chat-unused-code' into 'master'

Cleanup unused code after move to React Executor

See merge request https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169951



Merged-by: default avatarKassio Borges <kborges@gitlab.com>
Approved-by: default avatarManoj M J <mmj@gitlab.com>
Approved-by: default avatarKassio Borges <kborges@gitlab.com>
Co-authored-by: default avatarTetiana Chupryna <tchupryna@gitlab.com>
No related branches found
No related tags found
无相关合并请求
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Answers
class StreamedJson < StreamedAnswer
def next_chunk(content)
return if content.empty?
parser = Parsers::SingleActionParser.new(output: content)
parser.parse
return unless parser.final_answer
payload(parser.final_answer)
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Parsers
class SingleActionParser < OutputParser
attr_reader :action, :action_input, :thought, :final_answer
def parse
return unless @output
@parsed_thoughts = parse_json_objects
return unless @parsed_thoughts.present?
parse_final_answer
parse_action
end
private
def final_answer?
@parsed_thoughts.first[:type] == 'final_answer_delta'
end
def parse_final_answer
return unless final_answer?
@final_answer = ''
@parsed_thoughts.each do |t|
@final_answer += t[:data][:text]
end
@final_answer
end
def parse_action
response = @parsed_thoughts.first
return unless response[:type] == 'action'
@thought = response[:data][:thought]
@action = response[:data][:tool].camelcase
@action_input = response[:data][:tool_input]
end
def parse_json_objects
json_strings = @output.split("\n")
json_strings.map do |str|
Gitlab::Json.parse(str).with_indifferent_access
end
end
end
end
end
end
end
......@@ -16,7 +16,6 @@ class AiGateway < Base
ENDPOINT = '/v1/chat/agent'
BASE_ENDPOINT = '/v1/chat'
BASE_PROMPTS_CHAT_ENDPOINT = '/v1/prompts/chat'
CHAT_V2_ENDPOINT = '/v2/chat/agent'
DEFAULT_TYPE = 'prompt'
DEFAULT_SOURCE = 'GitLab EE'
TEMPERATURE = 0.1
......@@ -34,11 +33,9 @@ def request(prompt, unit_primitive: nil)
options = default_options.merge(prompt.fetch(:options, {}))
return unless model_provider_valid?(options)
v2_chat_schema = options.delete(:single_action_agent)
response = ai_client.stream(
url: endpoint(unit_primitive, v2_chat_schema, options[:use_ai_gateway_agent_prompt]),
body: body(v2_chat_schema, prompt, options, unit_primitive: unit_primitive)
url: endpoint(unit_primitive, options[:use_ai_gateway_agent_prompt]),
body: body(prompt, options, unit_primitive: unit_primitive)
) do |data|
yield data if block_given?
end
......@@ -85,14 +82,12 @@ def model_provider_valid?(options)
provider(options)
end
def endpoint(unit_primitive, v2_chat_schema, use_ai_gateway_agent_prompt)
def endpoint(unit_primitive, use_ai_gateway_agent_prompt)
path =
if use_ai_gateway_agent_prompt
"#{BASE_PROMPTS_CHAT_ENDPOINT}/#{unit_primitive}"
elsif unit_primitive.present?
"#{BASE_ENDPOINT}/#{unit_primitive}"
elsif v2_chat_schema
CHAT_V2_ENDPOINT
else
ENDPOINT
end
......@@ -103,10 +98,8 @@ def endpoint(unit_primitive, v2_chat_schema, use_ai_gateway_agent_prompt)
"#{base_url}#{path}"
end
def body(v2_chat_schema, prompt, options, unit_primitive: nil)
if v2_chat_schema
request_body_chat_2(prompt: prompt[:prompt], options: options)
elsif options[:use_ai_gateway_agent_prompt]
def body(prompt, options, unit_primitive: nil)
if options[:use_ai_gateway_agent_prompt]
request_body_agent(inputs: options[:inputs], unit_primitive: unit_primitive)
else
request_body(prompt: prompt[:prompt], options: options)
......@@ -171,28 +164,6 @@ def model_params(options)
end
end
def request_body_chat_2(prompt:, options: {})
option_params = {
chat_history: options[:conversation],
agent_scratchpad: {
agent_type: "react",
steps: options[:agent_scratchpad]
},
context: options[:current_resource_params],
current_file: options[:current_file_params],
additional_context: options[:additional_context]
}.compact
response = {
prompt: prompt,
options: option_params,
model_metadata: options[:model_metadata],
unavailable_resources: unavailable_resources
}
response.compact
end
def payload_params(options)
allowed_params = ALLOWED_PARAMS.fetch(provider(options))
params = options.slice(*allowed_params)
......
......@@ -100,24 +100,6 @@
expect(answer.content).to eq(input)
end
end
context 'with different parser' do
subject(:answer) do
described_class.from_response(
response_body: input,
tools: tools,
context: context,
parser_klass: Gitlab::Llm::Chain::Parsers::SingleActionParser
)
end
let(:input) { create(:action_chunk, tool: "issue_reader") }
it 'returns intermediate answer with parsed values and a tool' do
expect(answer.is_final?).to eq(false)
expect(answer.tool::NAME).to eq('IssueReader')
end
end
end
describe '.final_answer' do
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Answers::StreamedJson, feature_category: :duo_chat do
describe "#next_chunk" do
subject { described_class.new.next_chunk(chunk) }
context "when stream is empty" do
let(:chunk) { "" }
it 'returns nil' do
is_expected.to be_nil
end
end
context "when stream does not contain the final answer" do
let(:chunk) { create(:action_chunk) }
it 'returns nil' do
is_expected.to be_nil
end
end
context "when streaming beginning of the answer" do
let(:chunk) { create(:final_answer_chunk, chunk: "I") }
it 'returns stream payload' do
is_expected.to eq({ id: 1, content: "I" })
end
end
context "when streaming multiple chunks of final answer" do
let(:chunk) { create(:final_answer_multi_chunk, chunks: ["Hello", " there"]) }
it 'returns stream payload' do
is_expected.to eq({ id: 1, content: "Hello there" })
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Parsers::SingleActionParser, feature_category: :duo_chat do
describe "#parse" do
let(:parser) { described_class.new(output: output) }
before do
parser.parse
end
context "with final answer" do
let(:output) { create(:final_answer_multi_chunk, chunks: ["To", " perform", " a", " Git", " re", "base"]) }
it "returns only the final answer" do
expect(parser.action).to be_nil
expect(parser.action_input).to be_nil
expect(parser.thought).to be_nil
expect(parser.final_answer).to eq("To perform a Git rebase")
end
end
context "with chosen action" do
let(:output) { create(:action_chunk, thought: "thought", tool: "issue_reader", tool_input: "input") }
it "returns the action" do
expect(parser.action).to eq("IssueReader")
expect(parser.action_input).to eq("input")
expect(parser.thought).to eq("thought")
expect(parser.final_answer).to be_nil
end
end
context "with no output" do
let(:output) { nil }
it "returns nil" do
expect(parser.action).to be_nil
expect(parser.final_answer).to be_nil
end
end
context "with empty output" do
let(:output) { "" }
it "returns nil" do
expect(parser.action).to be_nil
expect(parser.final_answer).to be_nil
end
end
end
end
......@@ -197,60 +197,6 @@
it_behaves_like 'performing request to the AI Gateway'
end
context 'when request is sent for a new ReAct Duo Chat prompt' do
let(:endpoint) { described_class::CHAT_V2_ENDPOINT }
let(:prompt) { { prompt: user_prompt, options: options } }
let(:unavailable_resources) { %w[Pipelines Vulnerabilities] }
let(:model_metadata) do
{ api_key: "test_token", endpoint: "http://localhost:11434/v1", name: "mistral", provider: :openai, identifier: 'provider/some-cool-model' }
end
let(:options) do
{
agent_scratchpad: [],
single_action_agent: true,
conversation: "{:role=>:user, :content=>\"previous question\"}",
current_resource_params: {
type: "issue",
content: "string"
},
current_file_params: {
file_path: "never.rb",
data: "puts 'gonna give you up'",
selected_code: true
},
model_metadata: model_metadata
}
end
let(:body) do
{
prompt: user_prompt,
options: {
chat_history: "{:role=>:user, :content=>\"previous question\"}",
agent_scratchpad: {
agent_type: "react",
steps: []
},
context: {
type: "issue",
content: "string"
},
current_file: {
file_path: "never.rb",
data: "puts 'gonna give you up'",
selected_code: true
}
},
model_metadata: model_metadata,
unavailable_resources: unavailable_resources
}
end
it_behaves_like 'performing request to the AI Gateway'
end
context 'when request is sent to chat tools implemented via agents' do
let_it_be(:feature_setting) { create(:ai_feature_setting, feature: :duo_chat, provider: :self_hosted) }
......@@ -328,36 +274,5 @@
end
end
end
context 'when request is sent for a new ReAct Duo Chat prompt without optional params' do
let(:endpoint) { described_class::CHAT_V2_ENDPOINT }
let(:prompt) { { prompt: user_prompt, options: options } }
let(:unavailable_resources) { %w[Pipelines Vulnerabilities] }
let(:options) do
{
agent_scratchpad: [],
single_action_agent: true,
conversation: ""
}
end
let(:body) do
{
prompt: user_prompt,
options: {
chat_history: "",
agent_scratchpad: {
agent_type: "react",
steps: []
}
},
unavailable_resources: unavailable_resources
}
end
it_behaves_like 'performing request to the AI Gateway'
end
end
end
0% 加载中 .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册