Skip to content
代码片段 群组 项目
未验证 提交 801b4872 编辑于 作者: Jessie Young's avatar Jessie Young 提交者: GitLab
浏览文件

Merge branch 'jy-issue-epic-tool-conversion' into 'master'

No related branches found
No related tags found
无相关合并请求
......@@ -8,7 +8,15 @@ module AiDependent
def prompt
return { prompt: base_prompt } unless provider_prompt_class
provider_prompt_class.prompt(prompt_options)
if claude_3_enabled? && provider_prompt_class.respond_to?(:claude_3_prompt)
provider_prompt_class.claude_3_prompt(prompt_options)
else
provider_prompt_class.prompt(prompt_options)
end
end
def claude_3_enabled?
Feature.enabled?(:ai_claude_3_sonnet, context.current_user)
end
def request(&block)
......
......@@ -31,10 +31,8 @@ class Executor < Identifier
vertex_ai: ::Gitlab::Llm::Chain::Tools::EpicIdentifier::Prompts::VertexAi
}.freeze
# our template
PROMPT_TEMPLATE = [
Utils::Prompt.as_system(
<<~PROMPT
SYSTEM_PROMPT = Utils::Prompt.as_system(
<<~PROMPT
You can fetch information about a resource called: an epic.
An epic can be referenced by url or numeric IDs preceded by symbol.
An epic can also be referenced by a GitLab reference.
......@@ -95,8 +93,11 @@ class Executor < Identifier
```
Begin!
PROMPT
),
PROMPT
)
PROMPT_TEMPLATE = [
SYSTEM_PROMPT,
Utils::Prompt.as_assistant("%<suggestions>s"),
Utils::Prompt.as_user("Question: %<input>s")
].freeze
......
......@@ -9,9 +9,25 @@ module Prompts
class Anthropic
include Concerns::AnthropicPrompt
def self.claude_3_prompt(options)
conversation = Utils::Prompt.role_conversation([
::Gitlab::Llm::Chain::Tools::EpicIdentifier::Executor::SYSTEM_PROMPT,
Utils::Prompt.as_user(options[:input]),
Utils::Prompt.as_assistant(options[:suggestions], "```json
\{
\"ResourceIdentifierType\": \"")
])
{
prompt: conversation,
options: { model: ::Gitlab::Llm::AiGateway::Client::CLAUDE_3_HAIKU }
}
end
def self.prompt(options)
base_prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::EpicIdentifier::Executor::PROMPT_TEMPLATE, options
::Gitlab::Llm::Chain::Tools::EpicIdentifier::Executor::PROMPT_TEMPLATE,
options
)
{
......
......@@ -35,10 +35,8 @@ class Executor < Identifier
'reference' => Issue.reference_pattern
}.freeze
# our template
PROMPT_TEMPLATE = [
Utils::Prompt.as_system(
<<~PROMPT
SYSTEM_PROMPT = Utils::Prompt.as_system(
<<~PROMPT
You can fetch information about a resource called: an issue.
An issue can be referenced by url or numeric IDs preceded by symbol.
An issue can also be referenced by a GitLab reference. A GitLab reference ends with a number preceded by the delimiter # and contains one or more /.
......@@ -97,8 +95,11 @@ class Executor < Identifier
```
Begin!
PROMPT
),
PROMPT
)
PROMPT_TEMPLATE = [
SYSTEM_PROMPT,
Utils::Prompt.as_assistant("%<suggestions>s"),
Utils::Prompt.as_user("Question: %<input>s")
].freeze
......
......@@ -8,6 +8,22 @@ module IssueReader
module Prompts
class Anthropic
include Concerns::AnthropicPrompt
def self.claude_3_prompt(options)
conversation = Utils::Prompt.role_conversation([
::Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor::SYSTEM_PROMPT,
Utils::Prompt.as_user(options[:input]),
Utils::Prompt.as_assistant(options[:suggestions], "```json
\{
\"ResourceIdentifierType\": \"")
])
{
prompt: conversation,
options: { model: ::Gitlab::Llm::AiGateway::Client::CLAUDE_3_HAIKU }
}
end
def self.prompt(options)
base_prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor::PROMPT_TEMPLATE, options
......
......@@ -5,6 +5,7 @@ module Llm
module Concerns
module AvailableModels
CLAUDE_3_SONNET = 'claude-3-sonnet-20240229'
CLAUDE_3_HAIKU = 'claude-3-haiku-20240307'
DEFAULT_MODEL = 'claude-2.1'
DEFAULT_INSTANT_MODEL = 'claude-instant-1.2'
......@@ -12,8 +13,7 @@ module AvailableModels
VERTEX_MODEL_CODE = 'code-bison'
VERTEX_MODEL_CODECHAT = 'codechat-bison'
VERTEX_MODEL_TEXT = 'text-bison'
ANTHROPIC_MODELS = [DEFAULT_MODEL, CLAUDE_3_SONNET, DEFAULT_INSTANT_MODEL].freeze
ANTHROPIC_MODELS = [DEFAULT_MODEL, CLAUDE_3_SONNET, CLAUDE_3_HAIKU, DEFAULT_INSTANT_MODEL].freeze
VERTEX_MODELS = [VERTEX_MODEL_CHAT, VERTEX_MODEL_CODECHAT, VERTEX_MODEL_CODE, VERTEX_MODEL_TEXT].freeze
AVAILABLE_MODELS = {
......
......@@ -17,17 +17,27 @@
let(:logger) { instance_double('Gitlab::Llm::Logger') }
describe '#prompt' do
context 'when prompt is called' do
it 'returns provider specific prompt' do
tool = ::Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor.new(context: context, options: options)
context "when claude3 FF is enabled" do
it "returns claude 3 prompt" do
tool = ::Gitlab::Llm::Chain::Tools::IssueReader::Executor.new(context: context, options: options)
expect(tool).not_to receive(:base_prompt).and_call_original
expect(tool.class::PROVIDER_PROMPT_CLASSES[:anthropic]).to receive(:claude_3_prompt).and_call_original
prompt = tool.prompt[:prompt]
tool.prompt
end
end
expect(prompt).to include("You can fetch information about a resource called: an issue.")
expect(prompt).to include("Human:")
expect(prompt).to include("Assistant:")
context "when claude 3 FF is disabled" do
before do
stub_feature_flags(ai_claude_3_sonnet: false)
end
it "returns provider base prompt" do
tool = ::Gitlab::Llm::Chain::Tools::IssueReader::Executor.new(context: context, options: options)
expect(tool.class::PROVIDER_PROMPT_CLASSES[:anthropic]).to receive(:prompt).and_call_original
tool.prompt
end
end
......
......@@ -3,12 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::EpicReader::Prompts::Anthropic, feature_category: :duo_chat do
let(:options) do
{
input: 'foo?',
suggestions: "some suggestions"
}
end
describe '.prompt' do
it 'returns prompt' do
options = {
input: 'foo?',
suggestions: "some suggestions"
}
prompt = described_class.prompt(options)[:prompt]
expect(prompt).to include('Human:')
......@@ -19,4 +22,32 @@
expect(prompt).to include('You can fetch information about a resource called: an epic.')
end
end
describe '.claude_3_prompt' do
context "when calling claude 3 prompt" do
it "returns prompt" do
prompt = described_class.claude_3_prompt(options)[:prompt]
expect(prompt.length).to eq(3)
expect(prompt[0][:role]).to eq(:system)
expect(prompt[0][:content]).to eq(system_prompt)
expect(prompt[1][:role]).to eq(:user)
expect(prompt[1][:content]).to eq(options[:input])
expect(prompt[2][:role]).to eq(:assistant)
expect(prompt[2][:content]).to include(options[:suggestions], "\"ResourceIdentifierType\": \"")
end
it "calls with haiku model" do
model = described_class.claude_3_prompt(options)[:options][:model]
expect(model).to eq(::Gitlab::Llm::AiGateway::Client::CLAUDE_3_HAIKU)
end
end
end
def system_prompt
::Gitlab::Llm::Chain::Tools::EpicIdentifier::Executor::SYSTEM_PROMPT[1]
end
end
......@@ -3,12 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::IssueReader::Prompts::Anthropic, feature_category: :duo_chat do
let(:options) do
{
input: 'foo?',
suggestions: "some suggestions"
}
end
describe '.prompt' do
it 'returns prompt' do
options = {
input: 'foo?',
suggestions: "some suggestions"
}
prompt = described_class.prompt(options)[:prompt]
expect(prompt).to include('Human:')
......@@ -19,4 +22,31 @@
expect(prompt).to include('You can fetch information about a resource called: an issue.')
end
end
describe '.claude_3_prompt' do
it 'returns claude 3 prompt' do
prompt = described_class.claude_3_prompt(options)[:prompt]
expect(prompt.length).to eq(3)
expect(prompt[0][:role]).to eq(:system)
expect(prompt[0][:content]).to eq(system_prompt)
expect(prompt[1][:role]).to eq(:user)
expect(prompt[1][:content]).to eq(options[:input])
expect(prompt[2][:role]).to eq(:assistant)
expect(prompt[2][:content]).to include(options[:suggestions], "\"ResourceIdentifierType\": \"")
end
it "calls with haiku model" do
model = described_class.claude_3_prompt(options)[:options][:model]
expect(model).to eq(::Gitlab::Llm::AiGateway::Client::CLAUDE_3_HAIKU)
end
end
def system_prompt
::Gitlab::Llm::Chain::Tools::IssueReader::Executor::SYSTEM_PROMPT[1]
end
end
0% 加载中 .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册