diff --git a/ee/lib/gitlab/duo/chat/react_executor.rb b/ee/lib/gitlab/duo/chat/react_executor.rb
index 8b018c70f010ce04f5dd96458694cdde5099a895..42e2fef71065ad29065c03ee347fe683f75fd981 100644
--- a/ee/lib/gitlab/duo/chat/react_executor.rb
+++ b/ee/lib/gitlab/duo/chat/react_executor.rb
@@ -296,26 +296,7 @@ def unavailable_resources_params
         attr_reader :stream_response_handler
 
         def model_metadata_params
-          if chat_feature_setting&.self_hosted?
-
-            self_hosted_model = chat_feature_setting.self_hosted_model
-
-            return {
-              provider: :openai, # for self-hosted models we support Messages API format at the moment
-              name: self_hosted_model.model,
-              endpoint: self_hosted_model.endpoint,
-              api_key: self_hosted_model.api_token,
-              identifier: self_hosted_model.identifier
-            }
-          end
-
-          return unless ::Ai::AmazonQ.connected?
-
-          {
-            provider: :amazon_q,
-            name: :amazon_q,
-            role_arn: ::Ai::Setting.instance.amazon_q_role_arn
-          }
+          ::Gitlab::Llm::AiGateway::ModelMetadata.new(feature_setting: chat_feature_setting).to_params
         end
 
         def conversation
diff --git a/ee/lib/gitlab/llm/ai_gateway/completions/base.rb b/ee/lib/gitlab/llm/ai_gateway/completions/base.rb
index 6b13e04b838b1533dd179d5ab0cfa775eeec6af8..88c0cde5f3344369079eaa3d75b089cd49cce803 100644
--- a/ee/lib/gitlab/llm/ai_gateway/completions/base.rb
+++ b/ee/lib/gitlab/llm/ai_gateway/completions/base.rb
@@ -44,16 +44,7 @@ def post_process(response)
           end
 
           def request!
-            ai_client = ::Gitlab::Llm::AiGateway::Client.new(user, service_name: service_name,
-              tracking_context: tracking_context)
-
-            request_body = { 'inputs' => inputs }
-            request_body['prompt_version'] = prompt_version unless prompt_version.nil?
-
-            response = ai_client.complete(
-              url: "#{::Gitlab::AiGateway.url}/v1/prompts/#{prompt_message.ai_action}",
-              body: request_body
-            )
+            response = perform_ai_gateway_request!
 
             return if response&.body.blank?
             return Gitlab::Json.parse(response.body) if response&.success?
@@ -67,6 +58,22 @@ def request!
             { 'detail' => DEFAULT_ERROR }
           end
 
+          def perform_ai_gateway_request!
+            ai_client = ::Gitlab::Llm::AiGateway::Client.new(user, service_name: service_name,
+              tracking_context: tracking_context)
+
+            request_body = { 'inputs' => inputs }
+            request_body['prompt_version'] = prompt_version unless prompt_version.nil?
+
+            model_metadata_params = ::Gitlab::Llm::AiGateway::ModelMetadata.new.to_params
+            request_body['model_metadata'] = model_metadata_params if model_metadata_params.present?
+
+            ai_client.complete(
+              url: "#{::Gitlab::AiGateway.url}/v1/prompts/#{prompt_message.ai_action}",
+              body: request_body
+            )
+          end
+
           def service_name
             prompt_message.ai_action.to_sym
           end
diff --git a/ee/lib/gitlab/llm/ai_gateway/completions/resolve_vulnerability.rb b/ee/lib/gitlab/llm/ai_gateway/completions/resolve_vulnerability.rb
index 8bff12a147f467fd05a37bf4e7b039b90742505a..41800a443e5b177d126158ff9bd6f244910ae4e3 100644
--- a/ee/lib/gitlab/llm/ai_gateway/completions/resolve_vulnerability.rb
+++ b/ee/lib/gitlab/llm/ai_gateway/completions/resolve_vulnerability.rb
@@ -53,12 +53,7 @@ def inputs
           private
 
           def request!
-            ai_client = ::Gitlab::Llm::AiGateway::Client.new(user, service_name: service_name,
-              tracking_context: tracking_context)
-            response = ai_client.complete(
-              url: "#{::Gitlab::AiGateway.url}/v1/prompts/#{prompt_message.ai_action}",
-              body: { 'inputs' => inputs }
-            )
+            response = perform_ai_gateway_request!
 
             raise EmptyResponseError, "Empty response from LLM" if response&.body.blank?
 
diff --git a/ee/lib/gitlab/llm/ai_gateway/model_metadata.rb b/ee/lib/gitlab/llm/ai_gateway/model_metadata.rb
new file mode 100644
index 0000000000000000000000000000000000000000..5ed01b5bb13d9867ff1d46f5730db812588f66eb
--- /dev/null
+++ b/ee/lib/gitlab/llm/ai_gateway/model_metadata.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Gitlab
+  module Llm
+    module AiGateway
+      class ModelMetadata
+        def initialize(feature_setting: nil)
+          @feature_setting = feature_setting
+        end
+
+        def to_params
+          return self_hosted_params if feature_setting&.self_hosted?
+
+          amazon_q_params if ::Ai::AmazonQ.connected?
+        end
+
+        private
+
+        attr_reader :feature_setting
+
+        def self_hosted_params
+          self_hosted_model = feature_setting.self_hosted_model
+
+          {
+            provider: self_hosted_model.provider,
+            name: self_hosted_model.model,
+            endpoint: self_hosted_model.endpoint,
+            api_key: self_hosted_model.api_token,
+            identifier: self_hosted_model.identifier
+          }
+        end
+
+        def amazon_q_params
+          {
+            provider: :amazon_q,
+            name: :amazon_q,
+            role_arn: ::Ai::Setting.instance.amazon_q_role_arn
+          }
+        end
+      end
+    end
+  end
+end
diff --git a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb
index 6436c5ac2010ce009ce17a5f0352b94e2342170f..6d7ac31e16e1fb07272715305b81185c4b4b1eb1 100644
--- a/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb
+++ b/ee/lib/gitlab/llm/chain/requests/ai_gateway.rb
@@ -133,25 +133,9 @@ def request_body_agent(inputs:, unit_primitive: nil, prompt_version: nil)
 
             feature_setting = chat_feature_setting(unit_primitive: unit_primitive)
 
-            if feature_setting&.self_hosted?
-              self_hosted_model = feature_setting.self_hosted_model
-
-              params[:model_metadata] = {
-                provider: self_hosted_model.provider,
-                name: self_hosted_model.model,
-                endpoint: self_hosted_model.endpoint,
-                api_key: self_hosted_model.api_token,
-                identifier: self_hosted_model.identifier
-              }
-            end
-
-            if ::Ai::AmazonQ.connected?
-              params[:model_metadata] = {
-                provider: :amazon_q,
-                name: :amazon_q,
-                role_arn: ::Ai::Setting.instance.amazon_q_role_arn
-              }
-            end
+            model_metadata_params =
+              ::Gitlab::Llm::AiGateway::ModelMetadata.new(feature_setting: feature_setting).to_params
+            params[:model_metadata] = model_metadata_params if model_metadata_params.present?
 
             params
           end
diff --git a/ee/spec/lib/gitlab/llm/ai_gateway/completions/base_spec.rb b/ee/spec/lib/gitlab/llm/ai_gateway/completions/base_spec.rb
index 3f3701b7ac6b6472e5f7157006e081bd3bf90f4a..14d262845857ef6e446041546dd685ab1aa4f22e 100644
--- a/ee/spec/lib/gitlab/llm/ai_gateway/completions/base_spec.rb
+++ b/ee/spec/lib/gitlab/llm/ai_gateway/completions/base_spec.rb
@@ -97,6 +97,7 @@
       let(:processed_response) { { 'detail' => 'An unexpected error has occurred.' } }
 
       before do
+        allow(::Ai::AmazonQ).to receive(:connected?).and_return(false)
         allow(Gitlab::Json).to receive(:parse).and_raise(StandardError)
       end
 
@@ -143,5 +144,33 @@
         is_expected.to be(result)
       end
     end
+
+    context 'when model_metadata is provided' do
+      it 'includes model_metadata in the request body' do
+        params = {
+          provider: 'provider',
+          name: 'model',
+          endpoint: 'http://example.com',
+          identifier: 'identifier'
+        }
+
+        expect_next_instance_of(::Gitlab::Llm::AiGateway::ModelMetadata) do |instance|
+          expect(instance).to receive(:to_params).and_return(params)
+        end
+
+        expect(client).to receive(:complete).with(
+          url: "#{Gitlab::AiGateway.url}/v1/prompts/#{ai_action}",
+          body: { 'inputs' => inputs, 'model_metadata' => params }
+        ).and_return(http_response)
+
+        expect(response_modifier_class).to receive(:new).with(processed_response)
+          .and_return(response_modifier)
+        expect(Gitlab::Llm::GraphqlSubscriptionResponseService).to receive(:new)
+          .with(user, resource, response_modifier, options: response_options).and_return(response_service)
+        expect(response_service).to receive(:execute).and_return(result)
+
+        is_expected.to be(result)
+      end
+    end
   end
 end
diff --git a/ee/spec/lib/gitlab/llm/ai_gateway/model_metadata_spec.rb b/ee/spec/lib/gitlab/llm/ai_gateway/model_metadata_spec.rb
new file mode 100644
index 0000000000000000000000000000000000000000..6d4cb924d2f9da1835e6e317062959c568ddbec2
--- /dev/null
+++ b/ee/spec/lib/gitlab/llm/ai_gateway/model_metadata_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Llm::AiGateway::ModelMetadata, feature_category: :ai_abstraction_layer do
+  let_it_be(:self_hosted_model) { create(:ai_self_hosted_model) }
+  let_it_be(:feature_setting) { create(:ai_feature_setting, self_hosted_model: self_hosted_model) }
+
+  describe '#to_params' do
+    context 'when feature_setting is self-hosted' do
+      it 'returns self-hosted params' do
+        model_metadata = described_class.new(feature_setting: feature_setting)
+
+        expect(model_metadata.to_params).to eq({
+          provider: self_hosted_model.provider,
+          name: self_hosted_model.model,
+          endpoint: self_hosted_model.endpoint,
+          api_key: self_hosted_model.api_token,
+          identifier: self_hosted_model.identifier
+        })
+      end
+    end
+
+    context 'when feature_setting is not self-hosted and Ai::AmazonQ is connected' do
+      before do
+        allow(::Ai::AmazonQ).to receive(:connected?).and_return(true)
+        ::Ai::Setting.instance.update!(amazon_q_role_arn: "role-arn")
+      end
+
+      it 'returns amazon_q params' do
+        model_metadata = described_class.new
+
+        expect(model_metadata.to_params).to eq({
+          provider: :amazon_q,
+          name: :amazon_q,
+          role_arn: "role-arn"
+        })
+      end
+    end
+
+    context 'when feature_setting is not self-hosted and Ai::AmazonQ is not connected' do
+      let(:self_hosted) { false }
+
+      before do
+        allow(::Ai::AmazonQ).to receive(:connected?).and_return(false)
+      end
+
+      it 'returns nil' do
+        model_metadata = described_class.new
+
+        expect(model_metadata.to_params).to be_nil
+      end
+    end
+  end
+end