diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml index 45c9cf90b0327dea0f5afdfeef8ae85a6b717a2c..c99d6784b735f01577512ae63f5b76bc456bacfb 100644 --- a/.rubocop_todo/rspec/named_subject.yml +++ b/.rubocop_todo/rspec/named_subject.yml @@ -1097,7 +1097,6 @@ RSpec/NamedSubject: - 'ee/spec/services/llm/generate_test_file_service_spec.rb' - 'ee/spec/services/llm/git_command_service_spec.rb' - 'ee/spec/services/llm/resolve_vulnerability_service_spec.rb' - - 'ee/spec/services/llm/tanuki_bot_service_spec.rb' - 'ee/spec/services/member_roles/create_service_spec.rb' - 'ee/spec/services/merge_request_approval_settings/update_service_spec.rb' - 'ee/spec/services/merge_requests/merge_service_spec.rb' diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index ebd053bdb5a23c257fdae924219f1896165ede14..5649b65c598fa243e5b4e95411f823a3bd77464c 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -1264,7 +1264,6 @@ Input type: `AiActionInput` | <a id="mutationaiactionresolvevulnerability"></a>`resolveVulnerability` | [`AiResolveVulnerabilityInput`](#airesolvevulnerabilityinput) | Input for resolve_vulnerability AI action. | | <a id="mutationaiactionsummarizecomments"></a>`summarizeComments` | [`AiSummarizeCommentsInput`](#aisummarizecommentsinput) | Input for summarize_comments AI action. | | <a id="mutationaiactionsummarizereview"></a>`summarizeReview` | [`AiSummarizeReviewInput`](#aisummarizereviewinput) | Input for summarize_review AI action. | -| <a id="mutationaiactiontanukibot"></a>`tanukiBot` | [`AiTanukiBotInput`](#aitanukibotinput) | Input for tanuki_bot AI action. | #### Fields @@ -32438,15 +32437,6 @@ see the associated mutation type above. | ---- | ---- | ----------- | | <a id="aisummarizereviewinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. | -### `AiTanukiBotInput` - -#### Arguments - -| Name | Type | Description | -| ---- | ---- | ----------- | -| <a id="aitanukibotinputquestion"></a>`question` | [`String!`](#string) | GitLab documentation question for AI to answer. | -| <a id="aitanukibotinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. | - ### `AlertManagementPayloadAlertFieldInput` Field that are available while modifying the custom mapping attributes for an HTTP integration. diff --git a/ee/app/graphql/types/ai/tanuki_bot_input_type.rb b/ee/app/graphql/types/ai/tanuki_bot_input_type.rb deleted file mode 100644 index 4b477dd1f5426bfc9f1516e77e3f5f3c8007d263..0000000000000000000000000000000000000000 --- a/ee/app/graphql/types/ai/tanuki_bot_input_type.rb +++ /dev/null @@ -1,14 +0,0 @@ -# frozen_string_literal: true - -module Types - module Ai - class TanukiBotInputType < BaseMethodInputType - graphql_name 'AiTanukiBotInput' - - argument :question, GraphQL::Types::String, - required: true, - validates: { allow_blank: false }, - description: 'GitLab documentation question for AI to answer.' - end - end -end diff --git a/ee/app/services/llm/execute_method_service.rb b/ee/app/services/llm/execute_method_service.rb index 8656a9140742b6de891c7d5e655c3d2d7f4b59fd..47c0ca6b00b322c9fdec3fc1b870a255292cd9df 100644 --- a/ee/app/services/llm/execute_method_service.rb +++ b/ee/app/services/llm/execute_method_service.rb @@ -11,7 +11,6 @@ class ExecuteMethodService < BaseService summarize_comments: Llm::GenerateSummaryService, summarize_review: Llm::MergeRequests::SummarizeReviewService, explain_code: Llm::ExplainCodeService, - tanuki_bot: Llm::TanukiBotService, generate_test_file: Llm::GenerateTestFileService, generate_description: Llm::GenerateDescriptionService, generate_commit_message: Llm::GenerateCommitMessageService, diff --git a/ee/app/services/llm/tanuki_bot_service.rb b/ee/app/services/llm/tanuki_bot_service.rb deleted file mode 100644 index 8b09467406803cedf471a60b601ffb7433ff2b72..0000000000000000000000000000000000000000 --- a/ee/app/services/llm/tanuki_bot_service.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -module Llm - class TanukiBotService < BaseService - def valid? - super && Gitlab::Llm::TanukiBot.enabled_for?(user: user) - end - - private - - def ai_action - :tanuki_bot - end - - def perform - schedule_completion_worker - end - - def content(_action_name) - options[:question] - end - end -end diff --git a/ee/lib/gitlab/llm/anthropic/completions/tanuki_bot.rb b/ee/lib/gitlab/llm/anthropic/completions/tanuki_bot.rb deleted file mode 100644 index dc966236e9711fbc133e37ee0f418871af9e9829..0000000000000000000000000000000000000000 --- a/ee/lib/gitlab/llm/anthropic/completions/tanuki_bot.rb +++ /dev/null @@ -1,29 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Llm - module Anthropic - module Completions - class TanukiBot < Gitlab::Llm::Completions::Base - # After we remove REST API, refactor so that we use methods defined in templates/tanuki_bot.rb, e.g.: - # initial_prompt = ai_prompt_class.initial_prompt(question) - def execute - question = options[:question] - - response_modifier = ::Gitlab::Llm::TanukiBot.new( - current_user: user, - question: question, - tracking_context: tracking_context - ).execute - - ::Gitlab::Llm::GraphqlSubscriptionResponseService.new( - user, resource, response_modifier, options: response_options - ).execute - - response_modifier - end - end - end - end - end -end diff --git a/ee/lib/gitlab/llm/completions_factory.rb b/ee/lib/gitlab/llm/completions_factory.rb index afb9b368f75efdee2aa96d5ed586df96c32a26eb..07154b7620a5cb5d23cbd15c124449bb6a2625f0 100644 --- a/ee/lib/gitlab/llm/completions_factory.rb +++ b/ee/lib/gitlab/llm/completions_factory.rb @@ -29,11 +29,6 @@ class CompletionsFactory prompt_class: ::Gitlab::Llm::VertexAi::Templates::ExplainCode, feature_category: :ai_abstraction_layer }, - tanuki_bot: { - service_class: ::Gitlab::Llm::Anthropic::Completions::TanukiBot, - prompt_class: ::Gitlab::Llm::Anthropic::Templates::TanukiBot, - feature_category: :ai_abstraction_layer - }, generate_test_file: { service_class: ::Gitlab::Llm::VertexAi::Completions::GenerateTestFile, prompt_class: ::Gitlab::Llm::Templates::GenerateTestFile, diff --git a/ee/spec/lib/gitlab/llm/anthropic/completions/tanuki_bot_spec.rb b/ee/spec/lib/gitlab/llm/anthropic/completions/tanuki_bot_spec.rb deleted file mode 100644 index b8654ab28c6120900e372d3a3dc90f3fb6d6b7bf..0000000000000000000000000000000000000000 --- a/ee/spec/lib/gitlab/llm/anthropic/completions/tanuki_bot_spec.rb +++ /dev/null @@ -1,59 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Llm::Anthropic::Completions::TanukiBot, feature_category: :duo_chat do - let_it_be(:user) { create(:user) } - - let(:question) { 'A question' } - let(:options) { { question: question } } - let(:template_class) { ::Gitlab::Llm::Anthropic::Templates::TanukiBot } - let(:tracking_context) { { request_id: 'uuid', action: :tanuki_bot } } - - let(:ai_response) do - instance_double(Gitlab::Llm::Anthropic::ResponseModifiers::TanukiBot, response_body: "text", errors: [], extras: {}) - end - - let(:prompt_message) do - build(:ai_message, :tanuki_bot, user: user, resource: user, request_id: 'uuid') - end - - subject(:tanuki_bot) { described_class.new(prompt_message, template_class, options).execute } - - describe '#execute' do - let(:tanuki_instance) { instance_double(::Gitlab::Llm::TanukiBot) } - - it 'makes a call to ::Gitlab::Llm::TanukiBot' do - expect(::Gitlab::Llm::TanukiBot).to receive(:new) - .with(current_user: user, question: question, tracking_context: tracking_context).and_return(tanuki_instance) - expect(tanuki_instance).to receive(:execute).and_return(ai_response) - - tanuki_bot - end - - it 'calls ResponseService' do - allow(::Gitlab::Llm::TanukiBot).to receive(:new) - .with(current_user: user, question: question, tracking_context: tracking_context).and_return(tanuki_instance) - allow(tanuki_instance).to receive(:execute).and_return(ai_response) - - response_modifier = ai_response - response_service = double - params = [user, user, response_modifier, { options: { request_id: 'uuid', ai_action: :tanuki_bot } }] - - expect(::Gitlab::Llm::GraphqlSubscriptionResponseService).to receive(:new).with(*params).and_return( - response_service - ) - expect(response_service).to receive(:execute) - - tanuki_bot - end - - it 'handles nil responses' do - allow(::Gitlab::Llm::TanukiBot).to receive(:execute).and_return( - Gitlab::Llm::ResponseModifiers::EmptyResponseModifier.new(nil) - ) - - expect { tanuki_bot }.not_to raise_error - end - end -end diff --git a/ee/spec/services/llm/tanuki_bot_service_spec.rb b/ee/spec/services/llm/tanuki_bot_service_spec.rb deleted file mode 100644 index cbe89b647b89614f6467c1bc564533519f1883f7..0000000000000000000000000000000000000000 --- a/ee/spec/services/llm/tanuki_bot_service_spec.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Llm::TanukiBotService, :saas, feature_category: :global_search do - let_it_be(:user) { create(:user) } - - let_it_be(:options) { { question: 'A question' } } - - subject { described_class.new(user, user, options) } - - before do - stub_licensed_features(ai_tanuki_bot: true) - end - - describe '#perform' do - let(:resource) { user } - let(:action_name) { :tanuki_bot } - let(:content) { options[:question] } - - before do - allow(user).to receive(:any_group_with_ai_available?).and_return(true) - end - - it_behaves_like 'schedules completion worker' - - context 'when ai_global_switch feature flag is disabled' do - before do - stub_feature_flags(ai_global_switch: false) - end - - it 'returns an error' do - expect(Llm::CompletionWorker).not_to receive(:perform_for) - - expect(subject.execute).to be_error - end - end - - context 'when tanuki_bot licensed feature is disabled' do - before do - stub_licensed_features(ai_tanuki_bot: false) - end - - it 'returns an error' do - expect(Llm::CompletionWorker).not_to receive(:perform_for) - - expect(subject.execute).to be_error - end - end - end -end diff --git a/rubocop/rubocop-code_reuse.yml b/rubocop/rubocop-code_reuse.yml index 2bd3339368da83b837d0dac71bb1564f8f091f22..6f9d7902dc68124a333e336f38ce5d3ceac14a23 100644 --- a/rubocop/rubocop-code_reuse.yml +++ b/rubocop/rubocop-code_reuse.yml @@ -42,5 +42,4 @@ CodeReuse/ActiveRecord: - ee/db/fixtures/**/*.rb - ee/lib/tasks/**/*.rake - ee/lib/ee/gitlab/background_migration/**/*.rb - - ee/lib/gitlab/llm/open_ai/response_modifiers/tanuki_bot.rb - ee/lib/gitlab/usage/metrics/instrumentations/**/*.rb