diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index 6fa9aa092d9e0a42f199f9528431f2029013212f..2004a04d278f4aee9ee711ca6bfd755a095574e9 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -34641,6 +34641,15 @@ Action to subscribe to. | ----- | ----------- | | <a id="aiactionchat"></a>`CHAT` | Chat action. | +### `AiAdditionalContextType` + +The type of additional context. + +| Value | Description | +| ----- | ----------- | +| <a id="aiadditionalcontexttypefile"></a>`FILE` | File content type. | +| <a id="aiadditionalcontexttypesnippet"></a>`SNIPPET` | Snippet content type. | + ### `AiMessageRole` Possible message roles for AI features. @@ -39880,12 +39889,23 @@ be used as arguments). Only general use input types are listed here. For mutation input types, see the associated mutation type above. +### `AiAdditionalContextInput` + +#### Arguments + +| Name | Type | Description | +| ---- | ---- | ----------- | +| <a id="aiadditionalcontextinputcontent"></a>`content` | [`String!`](#string) | Content of the additional context. | +| <a id="aiadditionalcontextinputname"></a>`name` | [`String!`](#string) | Name of the additional context. | +| <a id="aiadditionalcontextinputtype"></a>`type` | [`AiAdditionalContextType!`](#aiadditionalcontexttype) | Type of the additional context. | + ### `AiChatInput` #### Arguments | Name | Type | Description | | ---- | ---- | ----------- | +| <a id="aichatinputadditionalcontext"></a>`additionalContext` | [`[AiAdditionalContextInput!]`](#aiadditionalcontextinput) | Additional context to be passed for the chat. | | <a id="aichatinputagentversionid"></a>`agentVersionId` | [`AiAgentVersionID`](#aiagentversionid) | Global ID of the agent version to answer the chat. | | <a id="aichatinputcontent"></a>`content` | [`String!`](#string) | Content of the message. | | <a id="aichatinputcurrentfile"></a>`currentFile` | [`AiCurrentFileInput`](#aicurrentfileinput) | Information about currently selected text which can be passed for additional context. | diff --git a/ee/app/graphql/mutations/ai/action.rb b/ee/app/graphql/mutations/ai/action.rb index fedd7e08df3123255413c3ce80702b949afe9170..e070a833d36b6fe6a8cd2811911cb6ac466738c3 100644 --- a/ee/app/graphql/mutations/ai/action.rb +++ b/ee/app/graphql/mutations/ai/action.rb @@ -102,6 +102,8 @@ def extract_method_params!(attributes) method = methods.each_key.first method_arguments = options.merge(methods[method]) + method_arguments.delete(:additional_context) if Feature.disabled?(:duo_additional_context, current_user) + [method_arguments.delete(:resource_id), method, method_arguments] end end diff --git a/ee/app/graphql/types/ai/additional_context_input_type.rb b/ee/app/graphql/types/ai/additional_context_input_type.rb new file mode 100644 index 0000000000000000000000000000000000000000..5a10edfe48feb95088a157d253fad7484178a3d6 --- /dev/null +++ b/ee/app/graphql/types/ai/additional_context_input_type.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module Types + module Ai + class AdditionalContextInputType < BaseInputObject + graphql_name 'AiAdditionalContextInput' + + MAX_BODY_SIZE = ::API::CodeSuggestions::MAX_BODY_SIZE + MAX_CONTEXT_NAME_SIZE = ::API::CodeSuggestions::MAX_CONTEXT_NAME_SIZE + + argument :type, Types::Ai::AdditionalContextTypeEnum, + required: true, + description: 'Type of the additional context.' + + argument :name, GraphQL::Types::String, + required: true, + description: 'Name of the additional context.', + validates: { length: { maximum: MAX_CONTEXT_NAME_SIZE } } + + argument :content, GraphQL::Types::String, + required: true, + description: 'Content of the additional context.', + validates: { length: { maximum: MAX_BODY_SIZE } } + end + end +end diff --git a/ee/app/graphql/types/ai/additional_context_type_enum.rb b/ee/app/graphql/types/ai/additional_context_type_enum.rb new file mode 100644 index 0000000000000000000000000000000000000000..69f7a24db1d49c6451bf1931cf45df739891ff0a --- /dev/null +++ b/ee/app/graphql/types/ai/additional_context_type_enum.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module Types + module Ai + class AdditionalContextTypeEnum < BaseEnum + graphql_name 'AiAdditionalContextType' + description 'The type of additional context' + + ::CodeSuggestions::Prompts::CodeGeneration::AnthropicMessages::CONTENT_TYPES.each_value do |type| + value type.upcase, description: "#{type.capitalize} content type.", value: type + end + end + end +end diff --git a/ee/app/graphql/types/ai/chat_input_type.rb b/ee/app/graphql/types/ai/chat_input_type.rb index 23682536c86aabf077e718986568cd4b36b219ee..8bce0784d70723d997bfebddf9337b3f2d60d83b 100644 --- a/ee/app/graphql/types/ai/chat_input_type.rb +++ b/ee/app/graphql/types/ai/chat_input_type.rb @@ -28,6 +28,10 @@ class ChatInputType < BaseMethodInputType argument :current_file, ::Types::Ai::CurrentFileInputType, required: false, description: 'Information about currently selected text which can be passed for additional context.' + + argument :additional_context, [::Types::Ai::AdditionalContextInputType], + required: false, + description: 'Additional context to be passed for the chat.' end end end diff --git a/ee/config/feature_flags/wip/duo_additional_context.yml b/ee/config/feature_flags/wip/duo_additional_context.yml new file mode 100644 index 0000000000000000000000000000000000000000..65242c2824de885ee1b478ded1366e89007bb783 --- /dev/null +++ b/ee/config/feature_flags/wip/duo_additional_context.yml @@ -0,0 +1,9 @@ +--- +name: duo_additional_context +feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/477258 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/161898 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/477503 +milestone: '17.3' +group: group::code creation +type: wip +default_enabled: false diff --git a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb index 7129de944049ad2b244e3c2c67e3bab6a26d5e16..1fa647fac7f58609f8efddd4d3beacd0456b89e6 100644 --- a/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb +++ b/ee/lib/gitlab/llm/chain/agents/single_action_executor.rb @@ -124,7 +124,8 @@ def prompt_options current_resource_params: current_resource_params, current_file_params: current_file_params, model_metadata: model_metadata_params, - single_action_agent: true + single_action_agent: true, + additional_context: context.additional_context } end diff --git a/ee/lib/gitlab/llm/chain/gitlab_context.rb b/ee/lib/gitlab/llm/chain/gitlab_context.rb index d4d1c2cbb37f46c3a31dd3f3c7488c9522830454..72cfc31797d12371bf64e30583c2230c935b005f 100644 --- a/ee/lib/gitlab/llm/chain/gitlab_context.rb +++ b/ee/lib/gitlab/llm/chain/gitlab_context.rb @@ -5,14 +5,15 @@ module Llm module Chain class GitlabContext attr_accessor :current_user, :container, :resource, :ai_request, :tools_used, :extra_resource, :request_id, - :current_file, :agent_version + :current_file, :agent_version, :additional_context delegate :current_page_type, :current_page_sentence, :current_page_short_description, to: :authorized_resource, allow_nil: true + # rubocop:disable Metrics/ParameterLists -- we probably need to rethink this initializer def initialize( current_user:, container:, resource:, ai_request:, extra_resource: {}, request_id: nil, - current_file: {}, agent_version: nil + current_file: {}, agent_version: nil, additional_context: [] ) @current_user = current_user @container = container @@ -23,7 +24,9 @@ def initialize( @request_id = request_id @current_file = (current_file || {}).with_indifferent_access @agent_version = agent_version + @additional_context = additional_context end + # rubocop:enable Metrics/ParameterLists def resource_serialized(content_limit:) return '' unless authorized_resource diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb index 370eb33793ceb6a4bb3cbe9dd07354470fe6df1a..f354fb00616fc24d9401d309284ae553aa37ad3e 100644 --- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb +++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb @@ -165,7 +165,8 @@ def request_body_chat_2(prompt:, options: {}) steps: options[:agent_scratchpad] }, context: options[:current_resource_params], - current_file: options[:current_file_params] + current_file: options[:current_file_params], + additional_context: options[:additional_context] }.compact { diff --git a/ee/lib/gitlab/llm/completions/chat.rb b/ee/lib/gitlab/llm/completions/chat.rb index 4d869fefba605dfad535615ad4415a83c5de71ff..47516c1eee6fd607562ef8b58a880110fd81328e 100644 --- a/ee/lib/gitlab/llm/completions/chat.rb +++ b/ee/lib/gitlab/llm/completions/chat.rb @@ -43,7 +43,8 @@ def initialize(prompt_message, ai_prompt_class, options = {}) extra_resource: options.delete(:extra_resource) || {}, request_id: prompt_message.request_id, current_file: options.delete(:current_file), - agent_version: options[:agent_version_id] && ::Ai::AgentVersion.find_by_id(options[:agent_version_id]) + agent_version: options[:agent_version_id] && ::Ai::AgentVersion.find_by_id(options[:agent_version_id]), + additional_context: ::CodeSuggestions::Context.new(Array.wrap(options.delete(:additional_context))).trimmed ) end diff --git a/ee/spec/graphql/types/ai/additional_context_input_type_spec.rb b/ee/spec/graphql/types/ai/additional_context_input_type_spec.rb new file mode 100644 index 0000000000000000000000000000000000000000..8340a8c9ba03c36dca61b85e4ed6fec2085f1713 --- /dev/null +++ b/ee/spec/graphql/types/ai/additional_context_input_type_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['AiAdditionalContextInput'], feature_category: :duo_chat do + include GraphqlHelpers + + it { expect(described_class.graphql_name).to eq('AiAdditionalContextInput') } + + it 'has the expected fields' do + expected_fields = %w[type name content] + + expect(described_class.arguments.keys).to match_array(expected_fields) + end +end diff --git a/ee/spec/graphql/types/ai/additional_context_type_enum_spec.rb b/ee/spec/graphql/types/ai/additional_context_type_enum_spec.rb new file mode 100644 index 0000000000000000000000000000000000000000..9cf4c56dbe3d748105c6ad2902b28a115872f0b0 --- /dev/null +++ b/ee/spec/graphql/types/ai/additional_context_type_enum_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe GitlabSchema.types['AiAdditionalContextType'], feature_category: :duo_chat do + it 'exposes all additional context types' do + expect(described_class.values.keys).to match_array(%w[FILE SNIPPET]) + end +end diff --git a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb index 0fc22464fa841f98fc87da797005bfc2756aca81..f3e0f6eeebc1c67bcb4dc815469a4f864be8941f 100644 --- a/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/agents/single_action_executor_spec.rb @@ -83,6 +83,7 @@ { prompt: user_input, options: { + additional_context: [], agent_scratchpad: [], conversation: "", single_action_agent: true, @@ -217,6 +218,7 @@ { prompt: user_input, options: { + additional_context: [], agent_scratchpad: [], conversation: "", single_action_agent: true, diff --git a/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb b/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb index 43c3990ef3d52f54debabd5b6dd3d4e290758255..c377067e05d4f2d7981889bbe0f36c077cd93712 100644 --- a/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb +++ b/ee/spec/lib/gitlab/llm/chain/gitlab_context_spec.rb @@ -8,10 +8,15 @@ let_it_be(:project) { create(:project, group: group) } let(:resource) { nil } let(:ai_request) { instance_double(Gitlab::Llm::Chain::Requests::Anthropic) } + let(:additional_context) do + [ + { type: 'snippet', name: 'hello world', content: 'puts "Hello, world"' } + ] + end subject(:context) do described_class.new(current_user: user, container: nil, resource: resource, ai_request: ai_request, - agent_version: instance_double(Ai::AgentVersion)) + agent_version: instance_double(Ai::AgentVersion), additional_context: additional_context) end before_all do diff --git a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb index 8e99cf79d31701ddaf6057ea2ca8603a5975ead9..9d6bc8579e2ec50a107b7c1fce6f9a4dbd976935 100644 --- a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb +++ b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb @@ -26,8 +26,20 @@ } end + let(:additional_context) do + [ + { type: 'snippet', name: 'hello world', content: 'puts "Hello, world"' } + ] + end + let(:options) do - { content: content, extra_resource: extra_resource, current_file: current_file, agent_version_id: agent_version.id } + { + content: content, + extra_resource: extra_resource, + current_file: current_file, + agent_version_id: agent_version.id, + additional_context: additional_context + } end let(:container) { group } @@ -39,7 +51,8 @@ request_id: 'uuid', ai_request: ai_request, current_file: current_file, - agent_version: agent_version + agent_version: agent_version, + additional_context: additional_context ) end @@ -89,7 +102,8 @@ .and_return(response_handler) expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, - extra_resource: extra_resource, request_id: 'uuid', current_file: current_file, agent_version: agent_version) + extra_resource: extra_resource, request_id: 'uuid', current_file: current_file, agent_version: agent_version, + additional_context: additional_context) .and_return(context) expect(categorize_service).to receive(:execute) expect(::Llm::ExecuteMethodService).to receive(:new) @@ -182,6 +196,16 @@ end end + describe '.initialize' do + subject { described_class.new(prompt_message, nil, **options) } + + it 'trims additional context' do + expect(::CodeSuggestions::Context).to receive(:new).with(additional_context).and_call_original + + subject + end + end + describe '#execute' do before do allow(Gitlab::Llm::Chain::Requests::AiGateway).to receive(:new).and_return(ai_request) @@ -228,7 +252,7 @@ expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, extra_resource: extra_resource, request_id: 'uuid', - current_file: current_file, agent_version: agent_version) + current_file: current_file, agent_version: agent_version, additional_context: additional_context) .and_return(context) # This is temporarily commented out due to the following production issue: # https://gitlab.com/gitlab-com/gl-infra/production/-/issues/18191 @@ -380,7 +404,7 @@ allow(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, extra_resource: extra_resource, request_id: 'uuid', current_file: current_file, - agent_version: agent_version) + agent_version: agent_version, additional_context: additional_context) .and_return(context) expect(categorize_service).not_to receive(:execute) @@ -413,7 +437,7 @@ expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new) .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request, extra_resource: extra_resource, request_id: 'uuid', current_file: current_file, - agent_version: agent_version) + agent_version: agent_version, additional_context: additional_context) .and_return(context) expect(categorize_service).to receive(:execute) expect(::Llm::ExecuteMethodService).to receive(:new) diff --git a/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb index d0b8fa3394b70e24a3a2118961d6005359d3952b..78ace2ba9a4399fd7a1dee778b9d19d28c57cd34 100644 --- a/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb +++ b/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb @@ -105,4 +105,37 @@ expect(graphql_mutation_response(:ai_action)['errors']).to eq([]) end end + + context 'when additional_context is present' do + let(:additional_context) do + [ + { type: 'SNIPPET', name: 'hello world', content: 'puts "Hello, world"' } + ] + end + + let(:expected_additional_context) do + [ + { type: 'snippet', name: 'hello world', content: 'puts "Hello, world"' } + ] + end + + let(:params) do + { chat: { resource_id: resource&.to_gid, content: "summarize", additional_context: additional_context } } + end + + it 'successfully performs a chat request' do + expect(Llm::CompletionWorker).to receive(:perform_for).with( + an_object_having_attributes( + user: current_user, + resource: resource, + ai_action: :chat, + content: "summarize"), + hash_including(additional_context: expected_additional_context) + ) + + post_graphql_mutation(mutation, current_user: current_user) + + expect(graphql_mutation_response(:ai_action)['errors']).to eq([]) + end + end end