Skip to content
代码片段 群组 项目
未验证 提交 fd4af503 编辑于 作者: Igor Drozdov's avatar Igor Drozdov 提交者: GitLab
浏览文件

Merge branch 'id-cleanup-code-suggestions-prompts' into 'master'

Clean up migrated prompts for self-hosted code suggestions

See merge request https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167426



Merged-by: default avatarIgor Drozdov <idrozdov@gitlab.com>
Approved-by: default avatarPatrick Cyiza <jpcyiza@gitlab.com>
No related branches found
No related tags found
无相关合并请求
显示
5 个添加1113 个删除
---
name: ai_custom_models_prompts_migration
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/473156
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/160050
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/473358
milestone: '17.3'
group: group::custom models
type: development
default_enabled: true
# frozen_string_literal: true
module CodeSuggestions
module Prompts
module CodeCompletion
class CodeGemmaMessages < AiGatewayCodeCompletionMessage
private
def prompt
<<~PROMPT.strip
<|fim_prefix|>#{pick_prefix}<|fim_suffix|>#{pick_suffix}<|fim_middle|>
PROMPT
end
end
end
end
end
# frozen_string_literal: true
module CodeSuggestions
module Prompts
module CodeCompletion
class CodellamaMessages < AiGatewayCodeCompletionMessage
private
def prompt
<<~PROMPT.strip
<PRE> #{pick_prefix} <SUF>#{pick_suffix} <MID>
PROMPT
end
end
end
end
end
# frozen_string_literal: true
module CodeSuggestions
module Prompts
module CodeCompletion
class CodestralMessages < AiGatewayCodeCompletionMessage
private
def prompt
<<~PROMPT.strip
<s>[SUFFIX]#{pick_suffix}[PREFIX]#{pick_prefix}
PROMPT
end
end
end
end
end
# frozen_string_literal: true
module CodeSuggestions
module Prompts
module CodeGeneration
class CodellamaMessages < AiGatewaySelfHostedMessages
GATEWAY_PROMPT_VERSION = 3
MODEL_PROVIDER = 'litellm'
def extra_params
{
prompt_version: self.class::GATEWAY_PROMPT_VERSION
}
end
def prompt
[{ role: :user, content: instructions }]
end
def instructions
<<~PROMPT.strip
[INST]<<SYS>> You are a tremendously accurate and skilled code generation agent. We want to generate new #{language.name} code inside the file '#{file_path_info}'. Your task is to provide valid code without any additional explanations, comments, or feedback. <</SYS>>
#{pick_prefix}
[SUGGESTION]
#{pick_suffix}
The new code you will generate will start at the position of the cursor, which is currently indicated by the [SUGGESTION] tag.
The comment directly before the cursor position is the instruction, all other comments are not instructions.
When generating the new code, please ensure the following:
1. It is valid #{language.name} code.
2. It matches the existing code's variable, parameter, and function names.
3. The code fulfills the instructions.
4. Do not add any comments, including instructions.
5. Return the code result without any extra explanation or examples.
If you are not able to generate code based on the given instructions, return an empty result.
[/INST]
PROMPT
end
end
end
end
end
# frozen_string_literal: true
module CodeSuggestions
module Prompts
module CodeGeneration
class MistralMessages < AiGatewaySelfHostedMessages
GATEWAY_PROMPT_VERSION = 3
private
def extra_params
{
prompt_version: self.class::GATEWAY_PROMPT_VERSION
}
end
def prompt
[{ role: :user, content: instructions }]
end
def instructions
<<~PROMPT.strip
<s>[INST] You are a tremendously accurate and skilled code generation agent. We want to generate new #{language.name} code inside the file '#{file_path_info}'. Your task is to provide valid code without any additional explanations, comments, or feedback.[/INST]
<s>[INST]
#{pick_prefix}
[SUGGESTION]
#{pick_suffix}
[/INST]</s>
<s>[INST]
The new code you will generate will start at the position of the cursor, which is currently indicated by the [SUGGESTION] tag.
The comment directly before the cursor position is the instruction, all other comments are not instructions.
When generating the new code, please ensure the following:
1. It is valid #{language.name} code.
2. It matches the existing code's variable, parameter, and function names.
3. The code fulfills the instructions.
4. Do not add any comments, including instructions.
5. Return the code result without any extra explanation or examples.
If you are not able to generate code based on the given instructions, return an empty result.
[/INST]</s>
PROMPT
end
end
end
end
end
......@@ -19,27 +19,8 @@ def feature_setting_name
def prompt
if self_hosted?
# rubocop:disable Gitlab/FeatureFlagWithoutActor -- Global development flag for migrating the prompts
if ::Feature.enabled?(:ai_custom_models_prompts_migration)
return CodeSuggestions::Prompts::CodeCompletion::AiGatewayCodeCompletionMessage.new(
feature_setting: feature_setting, params: params)
end
# rubocop:enable Gitlab/FeatureFlagWithoutActor
model_name = feature_setting&.self_hosted_model&.model&.to_sym
case model_name
when :codegemma_2b, :codegemma_7b
CodeSuggestions::Prompts::CodeCompletion::CodeGemmaMessages.new(
feature_setting: feature_setting, params: params)
when :codestral
CodeSuggestions::Prompts::CodeCompletion::CodestralMessages.new(
feature_setting: feature_setting, params: params)
when :codellama_13b_code
CodeSuggestions::Prompts::CodeCompletion::CodellamaMessages.new(
feature_setting: feature_setting, params: params)
else
raise "Unknown model: #{model_name}"
end
CodeSuggestions::Prompts::CodeCompletion::AiGatewayCodeCompletionMessage.new(
feature_setting: feature_setting, params: params)
elsif Feature.enabled?(:use_codestral_for_code_completions, current_user, type: :beta)
# codestral hosted on vertex
CodeSuggestions::Prompts::CodeCompletion::VertexCodestral.new(params)
......
......@@ -19,24 +19,8 @@ def feature_setting_name
def prompt
if self_hosted?
# rubocop:disable Gitlab/FeatureFlagWithoutActor -- Global development flag for migrating the prompts
if ::Feature.enabled?(:ai_custom_models_prompts_migration)
return CodeSuggestions::Prompts::CodeGeneration::AiGatewaySelfHostedMessages.new(
feature_setting: feature_setting, params: params)
end
# rubocop:enable Gitlab/FeatureFlagWithoutActor
model_name = feature_setting&.self_hosted_model&.model&.to_sym
case model_name
when :codellama
CodeSuggestions::Prompts::CodeGeneration::CodellamaMessages.new(
feature_setting: feature_setting, params: params)
when :mistral, :mixtral, :mixtral_8x22b, :codestral, :codegemma
CodeSuggestions::Prompts::CodeGeneration::MistralMessages.new(
feature_setting: feature_setting, params: params)
else
raise "Unknown model: #{model_name}"
end
CodeSuggestions::Prompts::CodeGeneration::AiGatewaySelfHostedMessages.new(
feature_setting: feature_setting, params: params)
elsif ::Feature.enabled?(:anthropic_code_gen_aigw_migration, current_user)
CodeSuggestions::Prompts::CodeGeneration::AiGatewayMessages.new(params)
else
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CodeSuggestions::Prompts::CodeCompletion::CodeGemmaMessages, feature_category: :"self-hosted_models" do
let_it_be(:ai_self_hosted_model) do
create(
:ai_self_hosted_model,
model: :codegemma_7b,
name: 'whatever',
endpoint: 'http://localhost:11434'
)
end
let_it_be(:ai_feature_setting) do
create(
:ai_feature_setting,
feature: :code_completions,
self_hosted_model: ai_self_hosted_model
)
end
let(:prompt_version) { 2 }
let(:language) { instance_double(CodeSuggestions::ProgrammingLanguage) }
let(:language_name) { 'Python' }
let(:prefix) do
<<~PREFIX
def hello_world():
PREFIX
end
let(:suffix) { 'return' }
let(:file_name) { 'hello.py' }
let(:model_name) { 'codegemma_7b' }
let(:unsafe_params) do
{
'current_file' => {
'file_name' => file_name,
'content_above_cursor' => prefix,
'content_below_cursor' => suffix
},
'telemetry' => []
}
end
let(:params) do
{
prefix: prefix,
suffix: suffix,
current_file: unsafe_params['current_file'].with_indifferent_access
}
end
before do
allow(CodeSuggestions::ProgrammingLanguage).to receive(:detect_from_filename)
.with(file_name)
.and_return(language)
allow(language).to receive(:name).and_return(language_name)
end
subject(:codegemma_prompt) do
described_class.new(feature_setting: ::Ai::FeatureSetting.find_by_feature(:code_completions), params: params)
end
describe '#request_params' do
let(:request_params) do
{
model_provider: described_class::MODEL_PROVIDER,
model_name: model_name,
prompt_version: prompt_version,
model_endpoint: 'http://localhost:11434',
model_api_key: "token"
}
end
let(:prompt) do
<<~PROMPT.chomp
<|fim_prefix|>def hello_world():\n<|fim_suffix|>return<|fim_middle|>
PROMPT
end
let(:expected_prompt) do
prompt
end
context 'when instruction is not present' do
it 'returns expected request params' do
expect(codegemma_prompt.request_params).to eq(request_params.merge(prompt: expected_prompt))
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CodeSuggestions::Prompts::CodeCompletion::CodellamaMessages, feature_category: :"self-hosted_models" do
let_it_be(:ai_self_hosted_model) do
create(
:ai_self_hosted_model,
model: :codellama_13b_code,
name: 'whatever',
endpoint: 'http://localhost:11434'
)
end
let_it_be(:ai_feature_setting) do
create(
:ai_feature_setting,
feature: :code_completions,
self_hosted_model: ai_self_hosted_model
)
end
let(:prompt_version) { 2 }
let(:language) { instance_double(CodeSuggestions::ProgrammingLanguage) }
let(:language_name) { 'Python' }
let(:prefix) do
<<~PREFIX
def hello_world():
PREFIX
end
let(:suffix) { 'return' }
let(:file_name) { 'hello.py' }
let(:model_name) { 'codellama_13b_code' }
let(:unsafe_params) do
{
'current_file' => {
'file_name' => file_name,
'content_above_cursor' => prefix,
'content_below_cursor' => suffix
},
'telemetry' => []
}
end
let(:params) do
{
prefix: prefix,
suffix: suffix,
current_file: unsafe_params['current_file'].with_indifferent_access
}
end
before do
allow(CodeSuggestions::ProgrammingLanguage).to receive(:detect_from_filename)
.with(file_name)
.and_return(language)
allow(language).to receive(:name).and_return(language_name)
end
subject(:codellama_prompt) do
described_class.new(feature_setting: ::Ai::FeatureSetting.find_by_feature(:code_completions), params: params)
end
describe '#request_params' do
let(:request_params) do
{
model_provider: described_class::MODEL_PROVIDER,
model_name: model_name,
prompt_version: prompt_version,
model_endpoint: 'http://localhost:11434',
model_api_key: "token"
}
end
let(:prompt) do
<<~PROMPT.chomp
<PRE> def hello_world():\n <SUF>return <MID>
PROMPT
end
let(:expected_prompt) do
prompt
end
context 'when instruction is not present' do
it 'returns expected request params with final prompt' do
expect(codellama_prompt.request_params).to eq(request_params.merge(prompt: expected_prompt))
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CodeSuggestions::Prompts::CodeCompletion::CodestralMessages, feature_category: :"self-hosted_models" do
let_it_be(:ai_self_hosted_model) do
create(
:ai_self_hosted_model,
model: :codestral,
name: 'whatever',
endpoint: 'http://localhost:11434'
)
end
let_it_be(:ai_feature_setting) do
create(
:ai_feature_setting,
feature: :code_completions,
self_hosted_model: ai_self_hosted_model
)
end
let(:prompt_version) { 2 }
let(:language) { instance_double(CodeSuggestions::ProgrammingLanguage) }
let(:language_name) { 'Python' }
let(:prefix) do
<<~PREFIX
def hello_world():
PREFIX
end
let(:suffix) { 'return' }
let(:file_name) { 'hello.py' }
let(:model_name) { 'codestral' }
let(:unsafe_params) do
{
'current_file' => {
'file_name' => file_name,
'content_above_cursor' => prefix,
'content_below_cursor' => suffix
},
'telemetry' => []
}
end
let(:params) do
{
prefix: prefix,
suffix: suffix,
current_file: unsafe_params['current_file'].with_indifferent_access
}
end
before do
allow(CodeSuggestions::ProgrammingLanguage).to receive(:detect_from_filename)
.with(file_name)
.and_return(language)
allow(language).to receive(:name).and_return(language_name)
end
subject(:codestral_prompt) do
described_class.new(feature_setting: ::Ai::FeatureSetting.find_by_feature(:code_completions), params: params)
end
describe '#request_params' do
let(:request_params) do
{
model_provider: described_class::MODEL_PROVIDER,
model_name: model_name,
prompt_version: prompt_version,
model_endpoint: 'http://localhost:11434',
model_api_key: "token"
}
end
let(:prompt) do
<<~PROMPT.chomp
<s>[SUFFIX]return[PREFIX]def hello_world():
PROMPT
end
let(:expected_prompt) do
prompt
end
context 'when instruction is not present' do
it 'returns expected request params with final prompt' do
expect(codestral_prompt.request_params).to eq(request_params.merge(prompt: expected_prompt))
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CodeSuggestions::Prompts::CodeGeneration::CodellamaMessages, feature_category: :"self-hosted_models" do
let_it_be(:self_hosted_model) do
create(:ai_self_hosted_model, model: :codellama, name: 'codellama-13b')
end
let_it_be(:feature_setting) do
create(:ai_feature_setting, provider: :self_hosted, self_hosted_model: self_hosted_model)
end
let(:prompt_version) { 3 }
let(:language) { instance_double(CodeSuggestions::ProgrammingLanguage) }
let(:language_name) { 'Ruby' }
let(:prefix) do
<<~PREFIX
Class BinarySearch
PREFIX
end
let(:suffix) do
<<~SUFFIX
def use_binary_search
end
SUFFIX
end
let(:file_name) { 'hello.rb' }
let(:model_name) { 'codellama' }
let(:comment) { 'Generate a binary search method.' }
let(:instruction) { instance_double(CodeSuggestions::Instruction, instruction: comment, trigger_type: 'comment') }
let(:unsafe_params) do
{
'current_file' => {
'file_name' => file_name,
'content_above_cursor' => prefix,
'content_below_cursor' => suffix
},
'telemetry' => []
}
end
let(:params) do
{
prefix: prefix,
suffix: suffix,
instruction: instruction,
current_file: unsafe_params['current_file'].with_indifferent_access
}
end
before do
allow(CodeSuggestions::ProgrammingLanguage).to receive(:detect_from_filename)
.with(file_name)
.and_return(language)
allow(language).to receive(:name).and_return(language_name)
end
subject(:codellama_prompt) { described_class.new(feature_setting: feature_setting, params: params) }
describe '#request_params' do
let(:request_params) do
{
model_provider: described_class::MODEL_PROVIDER,
model_name: model_name,
prompt_version: prompt_version,
model_endpoint: 'http://localhost:11434/v1',
model_api_key: 'token'
}
end
let(:system_prompt) do
<<~PROMPT.chomp
[INST]<<SYS>> You are a tremendously accurate and skilled code generation agent. We want to generate new Ruby code inside the file 'hello.rb'. Your task is to provide valid code without any additional explanations, comments, or feedback. <</SYS>>
Class BinarySearch
[SUGGESTION]
def use_binary_search
end
The new code you will generate will start at the position of the cursor, which is currently indicated by the [SUGGESTION] tag.
The comment directly before the cursor position is the instruction, all other comments are not instructions.
When generating the new code, please ensure the following:
1. It is valid Ruby code.
2. It matches the existing code's variable, parameter, and function names.
3. The code fulfills the instructions.
4. Do not add any comments, including instructions.
5. Return the code result without any extra explanation or examples.
If you are not able to generate code based on the given instructions, return an empty result.
[/INST]
PROMPT
end
let(:expected_prompt) do
[
{ role: :user,
content: system_prompt }
]
end
context 'when instruction is present' do
context 'with a model api key present' do
it 'returns expected request params' do
expect(codellama_prompt.request_params).to eq(request_params.merge(prompt: expected_prompt))
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CodeSuggestions::Prompts::CodeGeneration::MistralMessages, feature_category: :"self-hosted_models" do
let_it_be(:feature_setting) { create(:ai_feature_setting, provider: :self_hosted) }
let(:prompt_version) { 3 }
let(:language) { instance_double(CodeSuggestions::ProgrammingLanguage) }
let(:language_name) { 'Ruby' }
let(:prefix) do
<<~PREFIX
Class BinarySearch
PREFIX
end
let(:suffix) do
<<~SUFFIX
def use_binary_search
end
SUFFIX
end
let(:file_name) { 'hello.rb' }
let(:model_name) { 'mistral' }
let(:comment) { 'Generate a binary search method.' }
let(:instruction) { instance_double(CodeSuggestions::Instruction, instruction: comment, trigger_type: 'comment') }
let(:unsafe_params) do
{
'current_file' => {
'file_name' => file_name,
'content_above_cursor' => prefix,
'content_below_cursor' => suffix
},
'telemetry' => []
}
end
let(:params) do
{
prefix: prefix,
suffix: suffix,
instruction: instruction,
current_file: unsafe_params['current_file'].with_indifferent_access
}
end
before do
allow(CodeSuggestions::ProgrammingLanguage).to receive(:detect_from_filename)
.with(file_name)
.and_return(language)
allow(language).to receive(:name).and_return(language_name)
end
subject(:mistral_prompt) { described_class.new(feature_setting: feature_setting, params: params) }
describe '#request_params' do
let(:request_params) do
{
model_provider: described_class::MODEL_PROVIDER,
model_name: model_name,
prompt_version: prompt_version,
model_endpoint: 'http://localhost:11434/v1',
model_api_key: 'token'
}
end
let(:system_prompt) do
<<~PROMPT.chomp
<s>[INST] You are a tremendously accurate and skilled code generation agent. We want to generate new Ruby code inside the file 'hello.rb'. Your task is to provide valid code without any additional explanations, comments, or feedback.[/INST]
<s>[INST]
Class BinarySearch
[SUGGESTION]
def use_binary_search
end
[/INST]</s>
<s>[INST]
The new code you will generate will start at the position of the cursor, which is currently indicated by the [SUGGESTION] tag.
The comment directly before the cursor position is the instruction, all other comments are not instructions.
When generating the new code, please ensure the following:
1. It is valid Ruby code.
2. It matches the existing code's variable, parameter, and function names.
3. The code fulfills the instructions.
4. Do not add any comments, including instructions.
5. Return the code result without any extra explanation or examples.
If you are not able to generate code based on the given instructions, return an empty result.
[/INST]</s>
PROMPT
end
let(:expected_prompt) do
[
{ role: :user,
content: system_prompt }
]
end
context 'when instruction is present' do
context 'with a model api key present' do
it 'returns expected request params' do
expect(mistral_prompt.request_params).to eq(request_params.merge(prompt: expected_prompt))
end
end
end
end
end
......@@ -91,225 +91,7 @@
end
end
context 'when using self-hosted model codegemma_7b' do
before do
stub_feature_flags(ai_custom_models_prompts_migration: false)
end
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file
}
end
let(:task) do
described_class.new(
params: params,
unsafe_passthrough_params: unsafe_params
)
end
it_behaves_like 'code suggestion task' do
let_it_be(:ai_self_hosted_model) { create(:ai_self_hosted_model, model: :codegemma_7b, name: 'whatever') }
let_it_be(:ai_feature_setting) do
create(
:ai_feature_setting,
feature: :code_completions,
self_hosted_model: ai_self_hosted_model
)
end
let(:expected_body) do
{
"current_file" => {
"file_name" => "test.py",
"content_above_cursor" => "fix",
"content_below_cursor" => "som"
},
"telemetry" => [],
"stream" => false,
"model_provider" => "litellm",
"prompt_version" => 2,
"prompt" => "<|fim_prefix|>some prefix<|fim_suffix|>some suffix<|fim_middle|>",
"model_endpoint" => "http://localhost:11434/v1",
"model_name" => "codegemma_7b",
"model_api_key" => "token"
}
end
let(:expected_feature_name) { :self_hosted_models }
end
end
context 'when using self-hosted model codestral' do
before do
stub_feature_flags(ai_custom_models_prompts_migration: false)
end
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file
}
end
let(:task) do
described_class.new(
params: params,
unsafe_passthrough_params: unsafe_params
)
end
it_behaves_like 'code suggestion task' do
let_it_be(:ai_self_hosted_model) { create(:ai_self_hosted_model, model: :codestral, name: 'whatever') }
let_it_be(:ai_feature_setting) do
create(
:ai_feature_setting,
feature: :code_completions,
self_hosted_model: ai_self_hosted_model
)
end
let(:expected_body) do
{
"current_file" => {
"file_name" => "test.py",
"content_above_cursor" => "fix",
"content_below_cursor" => "som"
},
"telemetry" => [],
"stream" => false,
"model_provider" => "litellm",
"prompt_version" => 2,
"prompt" => "<s>[SUFFIX]some suffix[PREFIX]some prefix",
"model_endpoint" => "http://localhost:11434/v1",
"model_name" => "codestral",
"model_api_key" => "token"
}
end
let(:expected_feature_name) { :self_hosted_models }
end
end
context 'when using self-hosted model codellama' do
before do
stub_feature_flags(ai_custom_models_prompts_migration: false)
end
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file
}
end
let(:task) do
described_class.new(
params: params,
unsafe_passthrough_params: unsafe_params
)
end
it_behaves_like 'code suggestion task' do
let_it_be(:ai_self_hosted_model) do
create(:ai_self_hosted_model, model: :codellama_13b_code, name: 'whatever')
end
let_it_be(:ai_feature_setting) do
create(
:ai_feature_setting,
feature: :code_completions,
self_hosted_model: ai_self_hosted_model
)
end
let(:expected_body) do
{
"current_file" => {
"file_name" => "test.py",
"content_above_cursor" => "fix",
"content_below_cursor" => "som"
},
"telemetry" => [],
"stream" => false,
"model_provider" => "litellm",
"prompt_version" => 2,
"prompt" => "<PRE> some prefix <SUF>some suffix <MID>",
"model_endpoint" => "http://localhost:11434/v1",
"model_name" => "codellama_13b_code",
"model_api_key" => "token"
}
end
let(:expected_feature_name) { :self_hosted_models }
end
end
context 'when model name is unknown' do
before do
stub_feature_flags(ai_custom_models_prompts_migration: false)
allow(Ai::FeatureSetting).to receive(:find_by_feature).with(:code_completions).and_return(ai_feature_setting)
allow(ai_feature_setting).to receive_message_chain(:self_hosted_model, :model, :to_sym).and_return("unknown")
end
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file
}
end
let(:task) do
described_class.new(
params: params,
unsafe_passthrough_params: unsafe_params
)
end
let(:ai_feature_setting) do
instance_double(Ai::FeatureSetting, self_hosted?: true)
end
it 'raises an error' do
expect { task.body }.to raise_error("Unknown model: unknown")
end
end
context 'when using self-hosted model enable ai_custom_models_prompts_migration' do
before do
stub_feature_flags(ai_custom_models_prompts_migration: true)
end
context 'when using self-hosted model' do
let(:unsafe_params) do
{
'current_file' => current_file,
......
......@@ -222,180 +222,4 @@
let(:expected_feature_name) { :self_hosted_models }
end
end
context 'when ai_custom_models_prompts_migration feature flag is disabled' do
before do
stub_feature_flags(ai_custom_models_prompts_migration: false)
end
context 'when using self hosted mistral, mixtral, codegemma, codestral model' do
let_it_be(:code_generations_feature_setting) { create(:ai_feature_setting, feature: :code_generations) }
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file,
generation_type: 'empty_function'
}
end
let(:mistral_request_params) { { prompt_version: 3, prompt: 'Mistral prompt' } }
let(:mistral_messages_prompt) do
instance_double(CodeSuggestions::Prompts::CodeGeneration::MistralMessages,
request_params: mistral_request_params)
end
subject(:task) do
described_class.new(params: params, unsafe_passthrough_params: unsafe_params)
end
before do
allow(CodeSuggestions::Prompts::CodeGeneration::MistralMessages)
.to receive(:new).and_return(mistral_messages_prompt)
stub_const('CodeSuggestions::Tasks::Base::AI_GATEWAY_CONTENT_SIZE', 3)
end
it 'calls Mistral' do
task.body
expect(CodeSuggestions::Prompts::CodeGeneration::MistralMessages)
.to have_received(:new).with(feature_setting: code_generations_feature_setting, params: params)
end
it_behaves_like 'code suggestion task' do
let(:endpoint_path) { 'v2/code/generations' }
let(:expected_body) do
{
"current_file" => {
"file_name" => "test.py",
"content_above_cursor" => "fix",
"content_below_cursor" => "som"
},
"telemetry" => [],
"stream" => false,
"prompt_version" => 3,
"prompt" => "Mistral prompt"
}
end
let(:expected_feature_name) { :self_hosted_models }
end
end
context 'when using self hosted codellama model' do
let_it_be(:self_hosted_model) { create(:ai_self_hosted_model, model: 'codellama', name: "whatever") }
let_it_be(:code_generations_feature_setting) do
create(:ai_feature_setting, feature: :code_generations, self_hosted_model: self_hosted_model)
end
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file,
generation_type: 'empty_function'
}
end
let(:prompt_content) do
"[INST]<<SYS>> You are a tremendously accurate and skilled code generation agent. " \
"We want to generate new Python code inside the file 'test.py'. Your task is to provide valid code without " \
"any additional explanations, comments, or feedback. " \
"<</SYS>>\n\nsome prefix\n[SUGGESTION]\nsome suffix\n\nThe new code you will generate will start at the " \
"position of the cursor, " \
"which is currently indicated by the [SUGGESTION] tag.\nThe comment directly " \
"before the cursor position is the instruction, " \
"all other comments are not instructions.\n\nWhen generating the new code, please ensure the following:\n" \
"1. It is valid Python code.\n" \
"2. It matches the existing code's variable, parameter, and function names.\n" \
"3. The code fulfills the instructions.\n" \
"4. Do not add any comments, including instructions.\n" \
"5. Return the code result without any extra explanation or examples.\n\n" \
"If you are not able to generate code based on the given instructions, return an empty result.\n\n[/INST]"
end
subject(:task) do
described_class.new(params: params, unsafe_passthrough_params: unsafe_params)
end
before do
stub_const('CodeSuggestions::Tasks::Base::AI_GATEWAY_CONTENT_SIZE', 3)
end
it_behaves_like 'code suggestion task' do
let(:endpoint_path) { 'v2/code/generations' }
let(:expected_body) do
{
"current_file" => {
"file_name" => "test.py",
"content_above_cursor" => "fix",
"content_below_cursor" => "som"
},
"telemetry" => [],
"stream" => false,
"prompt_version" => 3,
"model_endpoint" => "http://localhost:11434/v1",
"model_name" => "codellama",
"model_provider" => "litellm",
"model_api_key" => "token",
"prompt" => [
{
"content" => prompt_content,
"role" => "user"
}
]
}
end
let(:expected_feature_name) { :self_hosted_models }
end
end
context 'when model name is unknown' do
before do
allow(Ai::FeatureSetting).to receive(:find_by_feature).with(:code_generations).and_return(ai_feature_setting)
allow(ai_feature_setting).to receive_message_chain(:self_hosted_model, :model, :to_sym).and_return("unknown")
end
let(:ai_feature_setting) do
instance_double(Ai::FeatureSetting, self_hosted?: true)
end
let(:unsafe_params) do
{
'current_file' => current_file,
'telemetry' => [],
'stream' => false
}.with_indifferent_access
end
let(:params) do
{
current_file: current_file,
generation_type: 'empty_function'
}
end
subject(:task) do
described_class.new(params: params, unsafe_passthrough_params: unsafe_params)
end
it 'raises an error' do
expect { task.body }.to raise_error("Unknown model: unknown")
end
end
end
end
0% 加载中 .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册