From bd653711ee92fcc062afc8e8786f5b4e3984d33d Mon Sep 17 00:00:00 2001
From: Gosia Ksionek <mksionek@gitlab.com>
Date: Wed, 26 Jul 2023 15:49:53 +0000
Subject: [PATCH] Remove alternative ff

Changelog: changed
---
 .../ai_chat_prompt_alternative.yml            |  8 ----
 .../llm/chain/agents/zero_shot/executor.rb    | 39 +------------------
 .../chain/agents/zero_shot/executor_spec.rb   | 31 +++------------
 .../zero_shot/prompts/anthropic_spec.rb       |  2 +-
 .../zero_shot/prompts/vertex_ai_spec.rb       |  2 +-
 5 files changed, 8 insertions(+), 74 deletions(-)
 delete mode 100644 config/feature_flags/development/ai_chat_prompt_alternative.yml

diff --git a/config/feature_flags/development/ai_chat_prompt_alternative.yml b/config/feature_flags/development/ai_chat_prompt_alternative.yml
deleted file mode 100644
index f568abcec247..000000000000
--- a/config/feature_flags/development/ai_chat_prompt_alternative.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: ai_chat_prompt_alternative
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/125092
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/417230
-milestone: '16.2'
-type: development
-group: group::ai-enablement
-default_enabled: false
diff --git a/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb b/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb
index 0dda0427821d..d9086ff31d79 100644
--- a/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb
+++ b/ee/lib/gitlab/llm/chain/agents/zero_shot/executor.rb
@@ -92,11 +92,7 @@ def options
             end
 
             def prompt_version
-              if Feature.enabled?(:ai_chat_prompt_alternative, context.current_user)
-                ALTERNATIVE_PROMPT_TEMPLATE
-              else
-                PROMPT_TEMPLATE
-              end
+              PROMPT_TEMPLATE
             end
 
             def last_conversation
@@ -118,39 +114,6 @@ def conversation
             end
 
             PROMPT_TEMPLATE = [
-              Utils::Prompt.as_system(
-                <<~PROMPT
-                Answer the question as accurate as you can.
-                Start with identifying the resource first.
-                You have access to the following tools:
-                %<tools_definitions>s
-                Consider every tool before making decision.
-                Identifying resource mustn't be the last step.
-                Ensure that your answer is accurate and doesn’t contain any information not directly supported
-                by the information retrieved using provided tools.
-                Use the following format:
-
-                Question: the input question you must answer
-                Thought: you should always think about what to do
-                Action: the action to take, should be one from this list: %<tool_names>s
-                Action Input: the input to the action
-                Observation: the result of the actions
-
-                ... (this Thought/Action/Action Input/Observation sequence can repeat N times)
-
-                Thought: I know the final answer.
-                Final Answer: the final answer to the original input question.
-
-                REMEMBER to ALWAYS start a line with "Final Answer:" to give me the final answer.
-
-                Begin!
-              PROMPT
-              ),
-              Utils::Prompt.as_assistant("%<agent_scratchpad>s"),
-              Utils::Prompt.as_user("Question: %<user_input>s")
-            ].freeze
-
-            ALTERNATIVE_PROMPT_TEMPLATE = [
               Utils::Prompt.as_system(
                 <<~PROMPT
                 Answer the question as accurate as you can.
diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb
index 960a95f99857..dcf21f16a3b8 100644
--- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb
+++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/executor_spec.rb
@@ -132,33 +132,12 @@
       end
     end
 
-    context 'with ai_chat_prompt_alternative feature flag' do
-      context 'with feature flag off' do
-        before do
-          stub_feature_flags(ai_chat_prompt_alternative: false)
-        end
-
-        it 'includes an older version of the prompt' do
-          expect(Gitlab::Llm::Chain::Agents::ZeroShot::Prompts::Anthropic)
-            .to receive(:prompt).once.with(a_hash_including(prompt_version: described_class::PROMPT_TEMPLATE))
-
-          agent.prompt
-        end
-      end
-
-      context 'with feature flag on' do
-        before do
-          stub_feature_flags(ai_chat_prompt_alternative: true)
-        end
-
-        it 'includes a newer version of the prompt' do
-          expect(Gitlab::Llm::Chain::Agents::ZeroShot::Prompts::Anthropic)
-            .to receive(:prompt).once.with(a_hash_including(prompt_version:
-              described_class::ALTERNATIVE_PROMPT_TEMPLATE))
+    it 'includes the prompt' do
+      expect(Gitlab::Llm::Chain::Agents::ZeroShot::Prompts::Anthropic)
+        .to receive(:prompt).once.with(a_hash_including(prompt_version:
+                                                          described_class::PROMPT_TEMPLATE))
 
-          agent.prompt
-        end
-      end
+      agent.prompt
     end
   end
 
diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb
index 92583731bf58..3631a5c7b9d0 100644
--- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb
+++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/anthropic_spec.rb
@@ -29,7 +29,7 @@
       }
     end
 
-    let(:prompt_text) { "Answer the question as accurate as you can.\nStart with identifying the resource first." }
+    let(:prompt_text) { "Answer the question as accurate as you can." }
 
     subject { described_class.prompt(options)[:prompt] }
 
diff --git a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb
index ff676c12d886..a10cea46752a 100644
--- a/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb
+++ b/ee/spec/lib/gitlab/llm/chain/agents/zero_shot/prompts/vertex_ai_spec.rb
@@ -13,7 +13,7 @@
         prompt_version: ::Gitlab::Llm::Chain::Agents::ZeroShot::Executor::PROMPT_TEMPLATE
       }
       prompt = described_class.prompt(options)[:prompt]
-      prompt_text = "Answer the question as accurate as you can.\nStart with identifying the resource first."
+      prompt_text = "Answer the question as accurate as you can."
 
       expect(prompt).to include('foo?')
       expect(prompt).to include('tool definitions')
-- 
GitLab