diff --git a/ee/lib/api/chat.rb b/ee/lib/api/chat.rb
index 2e358ba9587c9b20692302beb05b7189ce4c0eee..4496950f8a807e9f7d9dc5e752f55e8c2f0c36df 100644
--- a/ee/lib/api/chat.rb
+++ b/ee/lib/api/chat.rb
@@ -28,6 +28,12 @@ def find_resource(parameters)
         object = parameters[:resource_type].camelize.safe_constantize
         object.find(parameters[:resource_id])
       end
+
+      def reset_chat(action_name, message_attributes)
+        message_attributes[:content] = '/reset'
+        prompt_message = ::Gitlab::Llm::AiMessage.for(action: action_name).new(message_attributes)
+        prompt_message.save!
+      end
     end
 
     namespace 'chat' do
@@ -38,6 +44,8 @@ def find_resource(parameters)
           optional :resource_id, type: Integer, desc: 'ID of resource.'
           optional :referer_url, type: String, limit: 1000, desc: 'Referer URL'
           optional :client_subscription_id, type: String, limit: 500, desc: 'Client Subscription ID'
+          optional :with_clean_history, type: Boolean,
+            desc: 'Indicates if we need to reset the history before and after the request'
         end
         post do
           safe_params = declared_params(include_missing: false)
@@ -46,9 +54,9 @@ def find_resource(parameters)
           not_found! unless user_allowed?(resource)
           action_name = 'chat'
 
+          options = safe_params.slice(:referer_url)
           message_attributes = {
             request_id: SecureRandom.uuid,
-            content: safe_params[:content],
             role: ::Gitlab::Llm::AiMessage::ROLE_USER,
             ai_action: action_name,
             user: current_user,
@@ -56,10 +64,15 @@ def find_resource(parameters)
             client_subscription_id: safe_params[:client_subscription_id]
           }
 
+          reset_chat(action_name, message_attributes) if safe_params[:with_clean_history]
+
+          message_attributes[:content] = safe_params[:content]
+
           prompt_message = ::Gitlab::Llm::AiMessage.for(action: action_name).new(message_attributes)
-          options = safe_params.slice(:referer_url)
           ai_response = Llm::Internal::CompletionService.new(prompt_message, options).execute
 
+          reset_chat(action_name, message_attributes) if safe_params[:with_clean_history]
+
           present ai_response.response_body
         end
       end
diff --git a/ee/spec/requests/api/chat_spec.rb b/ee/spec/requests/api/chat_spec.rb
index a585da5284a0531b16f2f962904dfc62dcd26040..284c5f91dbba0163bb9298206b97e4a67ca47e66 100644
--- a/ee/spec/requests/api/chat_spec.rb
+++ b/ee/spec/requests/api/chat_spec.rb
@@ -225,6 +225,25 @@
             post_api
           end
         end
+
+        context 'with reset_history' do
+          let(:params) { { content: content, with_clean_history: true } }
+          let(:resource) { current_user }
+          let(:reset_message) { instance_double(Gitlab::Llm::ChatMessage) }
+
+          it 'sends resource to the chat' do
+            reset_params = chat_message_params.dup
+            reset_params[:content] = '/reset'
+
+            expect(Gitlab::Llm::ChatMessage).to receive(:new).with(reset_params).twice.and_return(reset_message)
+            expect(reset_message).to receive(:save!).twice
+            expect(Gitlab::Llm::ChatMessage).to receive(:new).with(chat_message_params).and_return(chat_message)
+            expect(Llm::Internal::CompletionService).to receive(:new).with(chat_message, options).and_return(chat)
+            expect(chat).to receive(:execute)
+
+            post_api
+          end
+        end
       end
     end
   end