From f058a82aceea43613f7e762393d75e084544c7f4 Mon Sep 17 00:00:00 2001 From: Mohamed Hamda <mhamda@gitlab.com> Date: Fri, 19 Jul 2024 11:54:57 +0200 Subject: [PATCH] Add ai_custom_models_prompts_migration FF Add a feature flag to control prompts migration Use the new FF to route to AiGatewayCodeCompletionMessage Adjust the base prompt in AiGatewayCodeCompletionMessage to be nil Changelog: changed EE: true --- .../ai_custom_models_prompts_migration.yml | 9 +++++++ .../ai_gateway_code_completion_message.rb | 6 ++--- .../tasks/self_hosted_code_completion.rb | 27 ++++++++++++------- ...ai_gateway_code_completion_message_spec.rb | 6 ++--- .../tasks/self_hosted_code_completion_spec.rb | 19 +++++++++++++ 5 files changed, 50 insertions(+), 17 deletions(-) create mode 100644 config/feature_flags/development/ai_custom_models_prompts_migration.yml diff --git a/config/feature_flags/development/ai_custom_models_prompts_migration.yml b/config/feature_flags/development/ai_custom_models_prompts_migration.yml new file mode 100644 index 0000000000000..01ba89c719985 --- /dev/null +++ b/config/feature_flags/development/ai_custom_models_prompts_migration.yml @@ -0,0 +1,9 @@ +--- +name: ai_custom_models_prompts_migration +feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/473156 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/160050 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/473358 +milestone: '17.3' +group: group::custom models +type: development +default_enabled: false diff --git a/ee/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message.rb b/ee/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message.rb index b8a1aad944f9a..e8ab9c0452882 100644 --- a/ee/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message.rb +++ b/ee/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message.rb @@ -18,12 +18,12 @@ def request_params end end - private - def prompt - raise NotImplementedError, "#{self.class} has not implemented method #{__method__}" + nil end + private + def pick_prefix prefix.last(500) end diff --git a/ee/lib/code_suggestions/tasks/self_hosted_code_completion.rb b/ee/lib/code_suggestions/tasks/self_hosted_code_completion.rb index 756d6bd9e785b..c26bd9efa2a18 100644 --- a/ee/lib/code_suggestions/tasks/self_hosted_code_completion.rb +++ b/ee/lib/code_suggestions/tasks/self_hosted_code_completion.rb @@ -37,16 +37,23 @@ def params def prompt model_name = feature_setting.self_hosted_model.model.to_sym - case model_name - when :codegemma - CodeSuggestions::Prompts::CodeCompletion::CodeGemmaMessages.new(params) - when :codestral - CodeSuggestions::Prompts::CodeCompletion::CodestralMessages.new(params) - when :'codellama:code' - CodeSuggestions::Prompts::CodeCompletion::CodellamaMessages.new(params) - else - raise "Unknown model: #{model_name}" - end + # rubocop:disable Gitlab/FeatureFlagWithoutActor -- Global development flag for migrating the prompts + prompt_migration_enabled = ::Feature.enabled?(:ai_custom_models_prompts_migration) + # rubocop:enable Gitlab/FeatureFlagWithoutActor + ai_gateway_class = CodeSuggestions::Prompts::CodeCompletion::AiGatewayCodeCompletionMessage + model_classes = { + codegemma: CodeSuggestions::Prompts::CodeCompletion::CodeGemmaMessages, + codestral: CodeSuggestions::Prompts::CodeCompletion::CodestralMessages, + 'codellama:code': CodeSuggestions::Prompts::CodeCompletion::CodellamaMessages + } + + message_class = if prompt_migration_enabled + ai_gateway_class + else + model_classes.fetch(model_name) { raise "Unknown model: #{model_name}" } + end + + message_class.new(params) end strong_memoize_attr :prompt end diff --git a/ee/spec/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message_spec.rb b/ee/spec/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message_spec.rb index ea1bde358710f..8eb38cd10d7e7 100644 --- a/ee/spec/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message_spec.rb +++ b/ee/spec/lib/code_suggestions/prompts/code_completion/ai_gateway_code_completion_message_spec.rb @@ -35,10 +35,8 @@ def prompt end describe '#prompt' do - it 'raises NotImplementedError for the abstract class' do - expect do - described_class.new(params).send(:prompt) - end.to raise_error(NotImplementedError, "#{described_class} has not implemented method prompt") + it 'returns an empty prompt' do + expect(described_class.new(params).prompt).to be_nil end end end diff --git a/ee/spec/lib/code_suggestions/tasks/self_hosted_code_completion_spec.rb b/ee/spec/lib/code_suggestions/tasks/self_hosted_code_completion_spec.rb index a4f4cf2192111..bfb7251257eba 100644 --- a/ee/spec/lib/code_suggestions/tasks/self_hosted_code_completion_spec.rb +++ b/ee/spec/lib/code_suggestions/tasks/self_hosted_code_completion_spec.rb @@ -52,6 +52,10 @@ params: params, unsafe_passthrough_params: unsafe_params) end + before do + stub_feature_flags(ai_custom_models_prompts_migration: false) + end + describe '#body' do before do allow(CodeSuggestions::Prompts::CodeCompletion::CodeGemmaMessages) @@ -85,6 +89,21 @@ expect(CodeSuggestions::Prompts::CodeCompletion::CodeGemmaMessages).to have_received(:new).with(params) end + + context 'when the ai_custom_models_prompts_migration FF is enabled' do + before do + stub_feature_flags(ai_custom_models_prompts_migration: true) + allow(CodeSuggestions::Prompts::CodeCompletion::AiGatewayCodeCompletionMessage) + .to receive(:new).and_return(codgemma_messages_prompt) + end + + it 'calls the base AiGatewayCodeCompletionMessage class' do + task.body + + expect(CodeSuggestions::Prompts::CodeCompletion::AiGatewayCodeCompletionMessage) + .to have_received(:new).with(params) + end + end end describe 'prompt selection per model name' do -- GitLab