diff --git a/ee/app/assets/javascripts/ai/tanuki_bot/components/app.vue b/ee/app/assets/javascripts/ai/tanuki_bot/components/app.vue
index abbcba8fa28030f52594ceed9ab0975b35358d9b..3b6c00b1da81e569f5eb79e110e3dee726a6b7d1 100644
--- a/ee/app/assets/javascripts/ai/tanuki_bot/components/app.vue
+++ b/ee/app/assets/javascripts/ai/tanuki_bot/components/app.vue
@@ -10,6 +10,7 @@ import getAiMessages from 'ee/ai/graphql/get_ai_messages.query.graphql';
 import chatMutation from 'ee/ai/graphql/chat.mutation.graphql';
 import tanukiBotMutation from 'ee/ai/graphql/tanuki_bot.mutation.graphql';
 import UserFeedback from 'ee/ai/components/user_feedback.vue';
+import Tracking from '~/tracking';
 import { i18n, GENIE_CHAT_RESET_MESSAGE } from 'ee/ai/constants';
 import AiGenieChat from 'ee/ai/components/ai_genie_chat.vue';
 import { SOURCE_TYPES, TANUKI_BOT_TRACKING_EVENT_NAME } from '../constants';
@@ -39,7 +40,7 @@ export default {
     GlLink,
     UserFeedback,
   },
-  mixins: [glFeatureFlagMixin()],
+  mixins: [glFeatureFlagMixin(), Tracking.mixin()],
   props: {
     userId: {
       type: String,
@@ -110,6 +111,9 @@ export default {
           },
         })
         .then(({ data: { aiAction = {} } = {} }) => {
+          this.track('submit_gitlab_duo_question', {
+            property: aiAction.requestId,
+          });
           this.addDuoChatMessage({
             ...aiAction,
             content: question,
diff --git a/ee/config/events/20230808101624__submit_gitlab_duo_question.yml b/ee/config/events/20230808101624__submit_gitlab_duo_question.yml
new file mode 100644
index 0000000000000000000000000000000000000000..5a22877324db269db8493af67f246d9c0b4c6586
--- /dev/null
+++ b/ee/config/events/20230808101624__submit_gitlab_duo_question.yml
@@ -0,0 +1,23 @@
+---
+description: This event represents a successful submission of a GitLab Duo question by a user.
+category: default
+action: submit_gitlab_duo_question
+label_description:
+property_description: Ai Action request Id that can be used to connect this event to others that happen when request is resolved.
+value_description:
+extra_properties:
+identifiers:
+- project
+- user
+- namespace
+product_section: data-science
+product_stage: modelops
+product_group: "group::ai framework"
+milestone: "16.3"
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128503
+distributions:
+- ee
+tiers: 
+- premium
+- ultimate
+
diff --git a/ee/config/events/20230808111529_Gitlab__Llm__Completions__Chat_process_gitlab_duo_question.yml b/ee/config/events/20230808111529_Gitlab__Llm__Completions__Chat_process_gitlab_duo_question.yml
new file mode 100644
index 0000000000000000000000000000000000000000..51c6ddb47b99e76175396107853aa2c9ad1cab44
--- /dev/null
+++ b/ee/config/events/20230808111529_Gitlab__Llm__Completions__Chat_process_gitlab_duo_question.yml
@@ -0,0 +1,23 @@
+---
+description: This event represents a single tool used to answer a user question, on the span of a single request Id there can be multiple event records marking that array of tools was used to answer one question.
+category: Gitlab::Llm::Completions::Chat
+action: process_gitlab_duo_question
+label_description: Name of GitLab Duo tool used to answer user question
+property_description: Ai Action request Id that can be used to connect this event to others that happen when the request is resolved.
+value_description: Boolean flag indicating if chat response had status "ok"
+extra_properties:
+identifiers:
+- user
+- namespace
+product_section: data-science
+product_stage: modelops
+product_group: "group::ai framework"
+milestone: "16.3"
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128503
+distributions:
+  - ee
+tiers:
+  - premium
+  - ultimate
+
+
diff --git a/ee/lib/gitlab/llm/completions/chat.rb b/ee/lib/gitlab/llm/completions/chat.rb
index cd61f3feb803578fd81428a8679c8ad99da4646c..777a0fd4d781ff230343f2372611e94e4fb8cbc9 100644
--- a/ee/lib/gitlab/llm/completions/chat.rb
+++ b/ee/lib/gitlab/llm/completions/chat.rb
@@ -38,6 +38,18 @@ def execute(user, resource, options)
           ::Gitlab::Llm::GraphqlSubscriptionResponseService
             .new(user, resource, response_modifier, options: response_options)
             .execute
+
+          context.tools_used.each do |tool|
+            Gitlab::Tracking.event(
+              self.class.to_s,
+              'process_gitlab_duo_question',
+              label: tool::NAME,
+              property: params[:request_id],
+              namespace: context.container,
+              user: user,
+              value: response.status == :ok ? 1 : 0
+            )
+          end
         end
 
         def tools(user)
diff --git a/ee/spec/frontend/ai/tanuki_bot/components/app_spec.js b/ee/spec/frontend/ai/tanuki_bot/components/app_spec.js
index c2a123fddb14743ea38b1f145de0595085e7a1d4..7bd26e42cf56c2b01567d021d6f37e129489a0e4 100644
--- a/ee/spec/frontend/ai/tanuki_bot/components/app_spec.js
+++ b/ee/spec/frontend/ai/tanuki_bot/components/app_spec.js
@@ -17,6 +17,8 @@ import getAiMessages from 'ee/ai/graphql/get_ai_messages.query.graphql';
 import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
 import createMockApollo from 'helpers/mock_apollo_helper';
 import { getMarkdown } from '~/rest_api';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import waitForPromises from 'helpers/wait_for_promises';
 import { helpCenterState } from '~/super_sidebar/constants';
 import {
   MOCK_USER_MESSAGE,
@@ -206,6 +208,26 @@ describe('GitLab Duo Chat', () => {
             MOCK_TANUKI_SUCCESS_RES.data.aiCompletionResponse,
           );
         });
+
+        describe('snowplow tracking', () => {
+          let trackingSpy;
+
+          beforeEach(() => {
+            trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+          });
+
+          afterEach(() => {
+            unmockTracking();
+          });
+
+          it('tracks the snowplow event on successful mutation for chat', async () => {
+            createComponent();
+            findGenieChat().vm.$emit('send-chat-prompt', MOCK_USER_MESSAGE.msg);
+
+            await waitForPromises();
+            expect(trackingSpy).toHaveBeenCalled();
+          });
+        });
       });
     });
   });
diff --git a/ee/spec/frontend/ai/tanuki_bot/mock_data.js b/ee/spec/frontend/ai/tanuki_bot/mock_data.js
index 135e81e6ef5427ed165566b179180ba52be9f425..797f5abb75c47646095843689f9f57b9b6048a38 100644
--- a/ee/spec/frontend/ai/tanuki_bot/mock_data.js
+++ b/ee/spec/frontend/ai/tanuki_bot/mock_data.js
@@ -75,7 +75,9 @@ export const MOCK_CHAT_CACHED_MESSAGES_RES = {
   },
 };
 
-export const MOCK_TANUKI_BOT_MUTATATION_RES = { data: { aiAction: { errors: [] } } };
+export const MOCK_TANUKI_BOT_MUTATATION_RES = {
+  data: { aiAction: { errors: [], requestId: '123' } },
+};
 
 export const MOCK_USER_ID = 'gid://gitlab/User/1';
 export const MOCK_RESOURCE_ID = 'gid://gitlab/Issue/1';
diff --git a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb
index 63637ee5b3d1481e61bbe703d9e4f9f4bffec84a..f3081837aa334970793465c29fb86deeee3af8cc 100644
--- a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb
+++ b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb
@@ -11,12 +11,13 @@
   let(:expected_container) { group }
   let(:content) { 'Summarize issue' }
   let(:ai_request) { instance_double(Gitlab::Llm::Chain::Requests::Anthropic) }
-  let(:options) { { request_id: 'uuid', content: content } }
+  let(:options) { { content: content } }
   let(:container) { group }
   let(:context) do
     instance_double(
       Gitlab::Llm::Chain::GitlabContext,
-      tools_used: [::Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor]
+      tools_used: [::Gitlab::Llm::Chain::Tools::IssueIdentifier::Executor],
+      container: container
     )
   end
 
@@ -26,10 +27,10 @@
     )
   end
 
-  subject { described_class.new(nil).execute(user, resource, options) }
+  subject { described_class.new(nil, request_id: 'uuid').execute(user, resource, options) }
 
   shared_examples 'success' do
-    it 'calls the ZeroShot Agent with the right parameters' do
+    it 'calls the ZeroShot Agent with the right parameters', :snowplow do
       tools = [
         ::Gitlab::Llm::Chain::Tools::IssueIdentifier,
         ::Gitlab::Llm::Chain::Tools::JsonReader,
@@ -54,6 +55,45 @@
         .and_return(context)
 
       subject
+
+      expect_snowplow_event(
+        category: described_class.to_s,
+        label: "IssueIdentifier",
+        action: 'process_gitlab_duo_question',
+        property: 'uuid',
+        namespace: container,
+        user: user,
+        value: 1
+      )
+    end
+
+    context 'with unsuccessful response' do
+      let(:answer) do
+        ::Gitlab::Llm::Chain::Answer.new(
+          status: :error, context: context, content: content, tool: nil, is_final: true
+        )
+      end
+
+      it 'sends process_gitlab_duo_question snowplow event with value eql 0' do
+        allow_next_instance_of(::Gitlab::Llm::Chain::Agents::ZeroShot::Executor) do |instance|
+          expect(instance).to receive(:execute).and_return(answer)
+        end
+
+        allow(Gitlab::Metrics::Sli::Apdex[:llm_chat_answers]).to receive(:increment)
+        allow(::Gitlab::Llm::Chain::GitlabContext).to receive(:new).and_return(context)
+
+        subject
+
+        expect_snowplow_event(
+          category: described_class.to_s,
+          label: "IssueIdentifier",
+          action: 'process_gitlab_duo_question',
+          property: 'uuid',
+          namespace: container,
+          user: user,
+          value: 0
+        )
+      end
     end
   end