From 71109150539e62321e6d6ea6592ed188655b453c Mon Sep 17 00:00:00 2001
From: Nathan Weinshenker <nweinshenker@gitlab.com>
Date: Mon, 22 Jul 2024 11:47:49 -0700
Subject: [PATCH] Chore: remove the feature flag default

Removing the feature flag associated to claude 3.5 sonnet.

Changelog: removed
EE: true
---
 config/feature_flags/beta/use_sonnet_35.yml   |  9 ----
 .../gitlab/llm/chain/requests/ai_gateway.rb   |  2 +-
 .../llm/chain/requests/ai_gateway_spec.rb     | 50 +------------------
 3 files changed, 2 insertions(+), 59 deletions(-)
 delete mode 100644 config/feature_flags/beta/use_sonnet_35.yml

diff --git a/config/feature_flags/beta/use_sonnet_35.yml b/config/feature_flags/beta/use_sonnet_35.yml
deleted file mode 100644
index a2ae9c11ca233..0000000000000
--- a/config/feature_flags/beta/use_sonnet_35.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-name: use_sonnet_35
-feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/468334
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/157696
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/469499
-milestone: '17.2'
-group: group::ai framework
-type: beta
-default_enabled: true
diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb
index f55b5ddb7abe2..9b58785c40b74 100644
--- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb
+++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb
@@ -70,7 +70,7 @@ def default_options
           def model(options)
             return options[:model] if options[:model].present?
 
-            Feature.enabled?(:use_sonnet_35, user) ? CLAUDE_3_5_SONNET : CLAUDE_3_SONNET
+            CLAUDE_3_5_SONNET
           end
 
           def provider(options)
diff --git a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb
index a7d45fe38d99f..2316c63192e72 100644
--- a/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb
+++ b/ee/spec/lib/gitlab/llm/chain/requests/ai_gateway_spec.rb
@@ -45,7 +45,7 @@
     let(:ai_client) { double }
     let(:endpoint) { described_class::ENDPOINT }
     let(:model) { nil }
-    let(:expected_model) { described_class::CLAUDE_3_SONNET }
+    let(:expected_model) { described_class::CLAUDE_3_5_SONNET }
     let(:provider) { :anthropic }
     let(:params) do
       {
@@ -88,7 +88,6 @@
     before do
       allow(Gitlab::Llm::Logger).to receive(:build).and_return(logger)
       allow(instance).to receive(:ai_client).and_return(ai_client)
-      stub_feature_flags(use_sonnet_35: false)
     end
 
     shared_examples 'performing request to the AI Gateway' do
@@ -152,53 +151,6 @@
       end
     end
 
-    context 'when using Sonnet 3.5 model' do
-      let(:expected_model) { described_class::CLAUDE_3_5_SONNET }
-
-      before do
-        stub_feature_flags(use_sonnet_35: true)
-      end
-
-      it 'calls the AI Gateway streaming endpoint and yields response without stripping it' do
-        expect(ai_client).to receive(:stream).with(endpoint: endpoint, body: body).and_yield(response)
-          .and_return(response)
-
-        expect { |b| instance.request(prompt, &b) }.to yield_with_args(response)
-      end
-
-      it_behaves_like 'performing request to the AI Gateway'
-
-      it_behaves_like 'tracks events for AI requests', 4, 2, klass: 'Gitlab::Llm::Anthropic::Client' do
-        before do
-          allow(ai_client).to receive(:stream).with(endpoint: endpoint, body: body).and_return(response)
-        end
-      end
-
-      context 'when additional params are passed in as options' do
-        let(:options) do
-          { temperature: 1, stop_sequences: %W[\n\Foo Bar:], max_tokens_to_sample: 1024, disallowed_param: 1, topP: 1 }
-        end
-
-        let(:params) do
-          {
-            max_tokens_to_sample: 1024,
-            stop_sequences: ["\n\Foo", "Bar:"],
-            temperature: 1
-          }
-        end
-
-        it_behaves_like 'performing request to the AI Gateway'
-      end
-
-      context 'when unit primitive is passed' do
-        let(:endpoint) { "#{described_class::BASE_ENDPOINT}/test" }
-
-        subject(:request) { instance.request(prompt, unit_primitive: :test) }
-
-        it_behaves_like 'performing request to the AI Gateway'
-      end
-    end
-
     context 'when invalid model is passed' do
       let(:model) { 'test' }
 
-- 
GitLab