diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 501e58fb3b390756bf1bf136c136548ab20f5204..8029aba9d06bcdb17c0568d1006b634310a01d1e 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -28678,7 +28678,8 @@ see the associated mutation type above.
 | Name | Type | Description |
 | ---- | ---- | ----------- |
 | <a id="aichatinputcontent"></a>`content` | [`String!`](#string) | Content of the message. |
-| <a id="aichatinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
+| <a id="aichatinputnamespaceid"></a>`namespaceId` | [`NamespaceID`](#namespaceid) | Global ID of the namespace the user is acting on. |
+| <a id="aichatinputresourceid"></a>`resourceId` | [`AiModelID`](#aimodelid) | Global ID of the resource to mutate. |
 
 ### `AiExplainCodeInput`
 
diff --git a/ee/app/graphql/mutations/ai/action.rb b/ee/app/graphql/mutations/ai/action.rb
index 57347f3c44e8a2c5e27f21812834228c3d5a8a68..0c03e933aad1febf46fc6968d2b0d2e32f35e54d 100644
--- a/ee/app/graphql/mutations/ai/action.rb
+++ b/ee/app/graphql/mutations/ai/action.rb
@@ -34,7 +34,7 @@ def resolve(**attributes)
         verify_rate_limit!
 
         resource_id, method, options = extract_method_params!(attributes)
-        resource = authorized_find!(id: resource_id)
+        resource = resource_id&.then { |id| authorized_find!(id: id) }
 
         response = Llm::ExecuteMethodService.new(current_user, resource, method, options).execute
 
diff --git a/ee/app/graphql/subscriptions/ai_completion_response.rb b/ee/app/graphql/subscriptions/ai_completion_response.rb
index 0aacca8457e04bc091b6e12d8a62e89400863b4d..6676933a293f89f8c30264b426c321d58a9292f5 100644
--- a/ee/app/graphql/subscriptions/ai_completion_response.rb
+++ b/ee/app/graphql/subscriptions/ai_completion_response.rb
@@ -6,7 +6,7 @@ class AiCompletionResponse < BaseSubscription
     payload_type ::Types::Ai::AiResponseType
 
     argument :resource_id, Types::GlobalIDType[::Ai::Model],
-      required: true,
+      required: false,
       description: 'ID of the resource.'
 
     argument :user_id, ::Types::GlobalIDType[::User],
diff --git a/ee/app/graphql/types/ai/chat_input_type.rb b/ee/app/graphql/types/ai/chat_input_type.rb
index 221ec3db0b2130c2cc14eaaafeba7ce7a433c6d0..b0dc2b8b27c404c51a5cfd28e2b769545c4009ce 100644
--- a/ee/app/graphql/types/ai/chat_input_type.rb
+++ b/ee/app/graphql/types/ai/chat_input_type.rb
@@ -5,6 +5,16 @@ module Ai
     class ChatInputType < BaseMethodInputType
       graphql_name 'AiChatInput'
 
+      argument :resource_id,
+        ::Types::GlobalIDType[::Ai::Model],
+        required: false,
+        description: "Global ID of the resource to mutate."
+
+      argument :namespace_id,
+        ::Types::GlobalIDType[::Namespace],
+        required: false,
+        description: "Global ID of the namespace the user is acting on."
+
       argument :content, GraphQL::Types::String,
         required: true,
         validates: { allow_blank: false },
diff --git a/ee/app/services/llm/base_service.rb b/ee/app/services/llm/base_service.rb
index f8ad6442a2dc3db604723789c728c32675a3dcac..33cc1ef275ee888583e892f97e43c04dfd800095 100644
--- a/ee/app/services/llm/base_service.rb
+++ b/ee/app/services/llm/base_service.rb
@@ -23,12 +23,7 @@ def execute
     def valid?
       return false if resource.respond_to?(:resource_parent) && !resource.resource_parent.member?(user)
 
-      case resource
-      when User
-        ai_integration_enabled? && user == resource && user_can_send_to_ai?
-      else
-        ai_integration_enabled?
-      end
+      ai_integration_enabled? && user_can_send_to_ai?
     end
 
     private
@@ -51,19 +46,19 @@ def worker_perform(user, resource, action_name, options)
       logger.debug(
         message: "Enqueuing CompletionWorker",
         user_id: user.id,
-        resource_id: resource.id,
-        resource_class: resource.class.name,
+        resource_id: resource&.id,
+        resource_class: resource&.class&.name,
         request_id: request_id,
         action_name: action_name
       )
 
       if options[:sync] == true
         response_data = ::Llm::CompletionWorker.new.perform(
-          user.id, resource.id, resource.class.name, action_name, options
+          user.id, resource&.id, resource&.class&.name, action_name, options
         )
         payload.merge!(response_data)
       else
-        ::Llm::CompletionWorker.perform_async(user.id, resource.id, resource.class.name, action_name, options)
+        ::Llm::CompletionWorker.perform_async(user.id, resource&.id, resource&.class&.name, action_name, options)
       end
 
       success(payload)
@@ -73,7 +68,6 @@ def ai_integration_enabled?
       Feature.enabled?(:openai_experimentation)
     end
 
-    # This check is used for features that do not act on a specific namespace, and the `resource` is a `User`.
     # https://gitlab.com/gitlab-org/gitlab/-/issues/413520
     def user_can_send_to_ai?
       return true unless ::Gitlab.com?
diff --git a/ee/app/workers/llm/completion_worker.rb b/ee/app/workers/llm/completion_worker.rb
index f9e9533dcde88fd17440ed6770662fd138e06ea5..4bfc1c01f9d1a85895387a5524020bf538657586 100644
--- a/ee/app/workers/llm/completion_worker.rb
+++ b/ee/app/workers/llm/completion_worker.rb
@@ -26,8 +26,7 @@ def perform(user_id, resource_id, resource_class, ai_action_name, options = {})
       return unless user
 
       resource = find_resource(resource_id, resource_class)
-      return unless resource
-      return unless user.can?("read_#{resource.to_ability_name}", resource)
+      return if resource && !user.can?("read_#{resource.to_ability_name}", resource)
 
       params = options.extract!(:request_id, :internal_request)
       ai_completion = ::Gitlab::Llm::CompletionsFactory.completion(ai_action_name.to_sym, params)
@@ -43,6 +42,8 @@ def logger
     end
 
     def find_resource(resource_id, resource_class)
+      return unless resource_id
+
       resource_class.classify.constantize.find_by_id(resource_id)
     end
   end
diff --git a/ee/lib/gitlab/llm/graphql_subscription_response_service.rb b/ee/lib/gitlab/llm/graphql_subscription_response_service.rb
index 07462e5fd61512871eadebec65588268278c6c9d..768e079916d5022a94547c0df63b5b697ef31b4f 100644
--- a/ee/lib/gitlab/llm/graphql_subscription_response_service.rb
+++ b/ee/lib/gitlab/llm/graphql_subscription_response_service.rb
@@ -17,7 +17,7 @@ def execute
         data = {
           id: SecureRandom.uuid,
           request_id: options[:request_id],
-          model_name: resource.class.name,
+          model_name: resource&.class&.name,
           # todo: do we need to sanitize/refine this response in any ways?
           response_body: generate_response_body(response_modifier.response_body),
           errors: response_modifier.errors,
@@ -32,8 +32,8 @@ def execute
         response_data = data.slice(:request_id, :errors, :role).merge(content: data[:response_body])
 
         unless options[:internal_request]
-          Gitlab::Llm::Cache.new(user).add(response_data)
-          GraphqlTriggers.ai_completion_response(user.to_global_id, resource.to_global_id, data)
+          Gitlab::Llm::Cache.new(user).add(response_data) unless options[:skip_cache]
+          GraphqlTriggers.ai_completion_response(user.to_global_id, resource&.to_global_id, data)
         end
 
         response_data
@@ -44,7 +44,7 @@ def execute
       attr_reader :user, :resource, :response_modifier, :options, :logger
 
       def generate_response_body(response_body)
-        return response_body if options[:markup_format].nil? || options[:markup_format].to_sym == :raw
+        return response_body if options[:markup_format].nil? || options[:markup_format].to_sym == :raw || resource.nil?
 
         banzai_options = { only_path: false, pipeline: :full, current_user: user }
 
diff --git a/ee/spec/graphql/mutations/ai/action_spec.rb b/ee/spec/graphql/mutations/ai/action_spec.rb
index 1ac9b6190ca64dd7fbed4becaca524b003043bc9..29a3bfdda9013c9ead0aa614f6f1bb3f1516b5fa 100644
--- a/ee/spec/graphql/mutations/ai/action_spec.rb
+++ b/ee/spec/graphql/mutations/ai/action_spec.rb
@@ -126,6 +126,28 @@
           expect(subject[:request_id]).to eq(request_id)
         end
 
+        context 'when resource is null' do
+          let(:input) { { chat: { resource_id: nil } } }
+          let(:expected_options) { {} }
+
+          it 'calls Llm::ExecuteMethodService' do
+            expect_next_instance_of(
+              Llm::ExecuteMethodService,
+              user,
+              nil,
+              :chat,
+              expected_options
+            ) do |svc|
+              expect(svc)
+                .to receive(:execute)
+                .and_return(ServiceResponse.success(payload: { request_id: request_id }))
+            end
+
+            expect(subject[:errors]).to be_empty
+            expect(subject[:request_id]).to eq(request_id)
+          end
+        end
+
         context 'when Llm::ExecuteMethodService errors out' do
           it 'returns errors' do
             expect_next_instance_of(
diff --git a/ee/spec/lib/gitlab/llm/chain/tools/summarize_comments/executor_spec.rb b/ee/spec/lib/gitlab/llm/chain/tools/summarize_comments/executor_spec.rb
index 7ca56c4769aef1484fb37a5cf07adcc7c8a92775..65e02c7220e2fef8fbe70e3483976c1c8d0515ba 100644
--- a/ee/spec/lib/gitlab/llm/chain/tools/summarize_comments/executor_spec.rb
+++ b/ee/spec/lib/gitlab/llm/chain/tools/summarize_comments/executor_spec.rb
@@ -23,7 +23,7 @@
 
   describe '#execute', :saas do
     let_it_be(:user) { create(:user) }
-    let_it_be(:group) { create(:group_with_plan, plan: :ultimate_plan) }
+    let_it_be_with_reload(:group) { create(:group_with_plan, plan: :ultimate_plan) }
     let_it_be(:project) { create(:project, group: group) }
     let_it_be(:issue1) { create(:issue, project: project) }
 
@@ -31,8 +31,8 @@
       stub_application_setting(check_namespace_plan: true)
       stub_licensed_features(summarize_notes: true, ai_features: true)
 
-      project.add_developer(user)
-      project.root_ancestor.update!(experiment_features_enabled: true, third_party_ai_features_enabled: true)
+      group.add_developer(user)
+      group.update!(experiment_features_enabled: true, third_party_ai_features_enabled: true)
     end
 
     context 'when issue is identified' do
diff --git a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb
index 53e1258387afeaa15a39079957ba97d0c8c059b0..6442c918a6a0e9620c10ffe12ab462e5d3d8e3b9 100644
--- a/ee/spec/lib/gitlab/llm/completions/chat_spec.rb
+++ b/ee/spec/lib/gitlab/llm/completions/chat_spec.rb
@@ -8,6 +8,7 @@
   let_it_be(:project) { create(:project, group: group) }
   let_it_be(:resource) { create(:issue, project: project) }
 
+  let(:expected_container) { group }
   let(:content) { 'Summarize issue' }
   let(:ai_request) { instance_double(Gitlab::Llm::Chain::Requests::Anthropic) }
   let(:context) { instance_double(Gitlab::Llm::Chain::GitlabContext) }
@@ -41,7 +42,8 @@
       end
 
       expect(::Gitlab::Llm::Chain::GitlabContext).to receive(:new)
-        .with(current_user: user, container: container, resource: resource, ai_request: ai_request).and_return(context)
+        .with(current_user: user, container: expected_container, resource: resource, ai_request: ai_request)
+        .and_return(context)
 
       subject
     end
@@ -58,9 +60,17 @@
 
     context 'when resource is a user' do
       let(:container) { nil }
+      let(:expected_container) { nil }
       let_it_be(:resource) { user }
 
       it_behaves_like 'success'
     end
+
+    context 'when resource is nil' do
+      let(:resource) { nil }
+      let(:expected_container) { nil }
+
+      it_behaves_like 'success'
+    end
   end
 end
diff --git a/ee/spec/lib/gitlab/llm/graphql_subscription_response_service_spec.rb b/ee/spec/lib/gitlab/llm/graphql_subscription_response_service_spec.rb
index 001732cab2fd584590f40ab519ee8f880eaefd4c..7123500aa92633fd8cab7689adf27d83dd33edac 100644
--- a/ee/spec/lib/gitlab/llm/graphql_subscription_response_service_spec.rb
+++ b/ee/spec/lib/gitlab/llm/graphql_subscription_response_service_spec.rb
@@ -38,7 +38,7 @@
     let(:payload) do
       {
         id: uuid,
-        model_name: resource.class.name,
+        model_name: expected_resource_class_name,
         response_body: response_body,
         request_id: 'uuid',
         role: 'assistant',
@@ -53,7 +53,7 @@
     it 'triggers subscription' do
       expect(GraphqlTriggers)
         .to receive(:ai_completion_response)
-        .with(user.to_global_id, resource.to_global_id, payload)
+        .with(user.to_global_id, expected_resource_gid, payload)
 
       subject
     end
@@ -81,6 +81,9 @@
 
     let_it_be(:resource) { create(:merge_request, source_project: project) }
 
+    let(:expected_resource_class_name) { resource.class.name }
+    let(:expected_resource_gid) { resource.to_global_id }
+
     context 'without user' do
       let(:user) { nil }
 
@@ -127,5 +130,14 @@
         expect(subject[:content]).to eq(response_body)
       end
     end
+
+    context 'for an empty resource' do
+      let_it_be(:resource) { nil }
+
+      let(:expected_resource_class_name) { nil }
+      let(:expected_resource_gid) { nil }
+
+      it_behaves_like 'graphql subscription response'
+    end
   end
 end
diff --git a/ee/spec/requests/api/ai/llm/git_command_spec.rb b/ee/spec/requests/api/ai/llm/git_command_spec.rb
index 59655a822ba631dc297b4f09eff8592ac2358d23..d4e8fc3e119e4ae5cea54c1895b056c8a95075d7 100644
--- a/ee/spec/requests/api/ai/llm/git_command_spec.rb
+++ b/ee/spec/requests/api/ai/llm/git_command_spec.rb
@@ -2,7 +2,7 @@
 
 require 'spec_helper'
 
-RSpec.describe API::Ai::Llm::GitCommand, feature_category: :source_code_management do
+RSpec.describe API::Ai::Llm::GitCommand, :saas, feature_category: :source_code_management do
   let_it_be(:current_user) { create :user }
 
   let(:header) { { 'Authorization' => ['Bearer test-key'], 'Content-Type' => ['application/json'] } }
@@ -12,6 +12,7 @@
   before do
     stub_application_setting(openai_api_key: 'test-key')
     stub_licensed_features(ai_git_command: true)
+    stub_ee_application_setting(should_check_namespace_plan: true)
   end
 
   describe 'POST /ai/llm/git_command', :saas do
diff --git a/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb
index ef2ef2692d24897a1846bca5ec216316ac80ecad..ebd4bfec8592b30a4695da8fa8f433fcb5004346 100644
--- a/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb
+++ b/ee/spec/requests/api/graphql/mutations/projects/chat_spec.rb
@@ -9,9 +9,10 @@
   let_it_be(:project) { create(:project, :public, group: group) }
   let_it_be(:current_user) { create(:user, developer_projects: [project]) }
   let_it_be(:resource) { create(:issue, project: project) }
+  let(:resource_id) { resource.to_gid }
 
   let(:mutation) do
-    params = { chat: { resource_id: resource.to_gid, content: "summarize" } }
+    params = { chat: { resource_id: resource_id, content: "summarize" } }
 
     graphql_mutation(:ai_action, params) do
       <<-QL.strip_heredoc
@@ -26,6 +27,20 @@
 
   include_context 'with ai features enabled for group'
 
+  context 'when resource is nil' do
+    let(:resource_id) { nil }
+
+    it 'successfully performs a chat request' do
+      expect(Llm::CompletionWorker).to receive(:perform_async).with(
+        current_user.id, nil, nil, :chat, {
+          content: "summarize", markup_format: :raw, request_id: an_instance_of(String)
+        }
+      )
+
+      post_graphql_mutation(mutation, current_user: current_user)
+    end
+  end
+
   context 'when resource is an issue' do
     it 'successfully performs a request' do
       expect(Llm::CompletionWorker).to receive(:perform_async).with(
diff --git a/ee/spec/requests/api/graphql/mutations/projects/explain_code_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/explain_code_spec.rb
index 281f3a83cff70f61c8455f5ab442f5cd0a54f774..296f486557b5b76d5b90d4b6fdb89286837c4f7d 100644
--- a/ee/spec/requests/api/graphql/mutations/projects/explain_code_spec.rb
+++ b/ee/spec/requests/api/graphql/mutations/projects/explain_code_spec.rb
@@ -43,6 +43,10 @@
     )
   end
 
+  before_all do
+    group.add_developer(current_user)
+  end
+
   it 'successfully performs an explain code request' do
     allow(SecureRandom).to receive(:uuid).and_return(uuid)
     expect(Llm::CompletionWorker).to receive(:perform_async).with(
diff --git a/ee/spec/requests/api/graphql/mutations/projects/fill_in_merge_request_template_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/fill_in_merge_request_template_spec.rb
index 367d2d8ffa6dc8543d40d79ec13bd2ff03b20847..aa3489cdf6be75531f4536d7487f01368bccf47e 100644
--- a/ee/spec/requests/api/graphql/mutations/projects/fill_in_merge_request_template_spec.rb
+++ b/ee/spec/requests/api/graphql/mutations/projects/fill_in_merge_request_template_spec.rb
@@ -35,6 +35,10 @@
     group.namespace_settings.update!(third_party_ai_features_enabled: true, experiment_features_enabled: true)
   end
 
+  before_all do
+    group.add_developer(current_user)
+  end
+
   it 'successfully performs an explain code request' do
     expect(Llm::CompletionWorker).to receive(:perform_async).with(
       current_user.id,
diff --git a/ee/spec/requests/api/graphql/mutations/projects/generate_commit_message_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/generate_commit_message_spec.rb
index 0068ee8a6309e9a8dac8f72a91ee76aeef78dfd8..257bc4b27850c108251b14b444d985cdb1edff5b 100644
--- a/ee/spec/requests/api/graphql/mutations/projects/generate_commit_message_spec.rb
+++ b/ee/spec/requests/api/graphql/mutations/projects/generate_commit_message_spec.rb
@@ -27,7 +27,11 @@
     group.namespace_settings.update!(third_party_ai_features_enabled: true, experiment_features_enabled: true)
   end
 
-  it 'successfully performs an explain code request' do
+  before_all do
+    group.add_developer(current_user)
+  end
+
+  it 'successfully performs an generate commit message request' do
     expect(Llm::CompletionWorker).to receive(:perform_async).with(
       current_user.id, merge_request.id, "MergeRequest", :generate_commit_message, {
         markup_format: :raw, request_id: an_instance_of(String)
diff --git a/ee/spec/requests/api/graphql/mutations/projects/generate_test_file_spec.rb b/ee/spec/requests/api/graphql/mutations/projects/generate_test_file_spec.rb
index 589b673049ea2feee362b55be66c19b3288a99ba..08cd157b00269c75d4d86a5222de340ef4efe1f6 100644
--- a/ee/spec/requests/api/graphql/mutations/projects/generate_test_file_spec.rb
+++ b/ee/spec/requests/api/graphql/mutations/projects/generate_test_file_spec.rb
@@ -28,6 +28,10 @@
     group.namespace_settings.update!(third_party_ai_features_enabled: true, experiment_features_enabled: true)
   end
 
+  before_all do
+    group.add_developer(current_user)
+  end
+
   it 'successfully performs an explain code request' do
     expect(Llm::CompletionWorker).to receive(:perform_async).with(
       current_user.id, merge_request.id, "MergeRequest", :generate_test_file, {
diff --git a/ee/spec/services/llm/analyze_ci_job_failure_service_spec.rb b/ee/spec/services/llm/analyze_ci_job_failure_service_spec.rb
index 1dc6f3902c4b87d1e0b7546691b31cdf637dc17f..4d64269bdc4ca2639f7d5a7ac60f69959662bbcf 100644
--- a/ee/spec/services/llm/analyze_ci_job_failure_service_spec.rb
+++ b/ee/spec/services/llm/analyze_ci_job_failure_service_spec.rb
@@ -9,7 +9,8 @@
 
   describe '#perform', :saas do
     let_it_be(:user) { create(:user) }
-    let_it_be(:project) { create(:project) }
+    let_it_be_with_reload(:group) { create(:group_with_plan, plan: :ultimate_plan) }
+    let_it_be(:project) { create(:project, group: group) }
     let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
     let_it_be(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
 
@@ -21,7 +22,7 @@
 
         if has_permission
           allow(job).to receive(:debug_mode?).and_return(false)
-          project.add_maintainer(user)
+          group.add_developer(user)
         end
 
         allow(Gitlab::Llm::StageCheck).to receive(:available?).and_return(stage_avalible)
@@ -34,6 +35,7 @@
       end
 
       with_them do
+        include_context 'with ai features enabled for group'
         include_context 'with prerequisites'
 
         it 'is successful' do
diff --git a/ee/spec/services/llm/base_service_spec.rb b/ee/spec/services/llm/base_service_spec.rb
index 1c05745b85a330407d9abf7dbf366f28465363d9..00d2d755bb10f25b13cfde0a63328632f0fb330b 100644
--- a/ee/spec/services/llm/base_service_spec.rb
+++ b/ee/spec/services/llm/base_service_spec.rb
@@ -43,6 +43,14 @@ def perform
 
       expect(subject.execute).to be_success
     end
+
+    context 'when resource is nil' do
+      let(:resource) { nil }
+
+      it 'is successful' do
+        expect(subject.execute).to be_success
+      end
+    end
   end
 
   context 'when user has no access' do
@@ -55,9 +63,7 @@ def perform
       group.add_developer(user)
     end
 
-    it_behaves_like 'raises a NotImplementedError'
-
-    context 'when ai integration is not enabled' do
+    context 'when openai_experimentation feature flag is not enabled' do
       before do
         stub_feature_flags(openai_experimentation: false)
       end
@@ -65,6 +71,18 @@ def perform
       it_behaves_like 'returns an error'
     end
 
+    context 'when experimental features are disabled for the group' do
+      include_context 'with experiment features disabled for group'
+
+      it_behaves_like 'returns an error'
+    end
+
+    context 'when third party features are disabled for the group' do
+      include_context 'with third party features disabled for group'
+
+      it_behaves_like 'returns an error'
+    end
+
     context 'when ai features are enabled' do
       let(:expected_resource_id) { resource.id }
       let(:expected_resource_class) { resource.class.name.to_s }
@@ -85,26 +103,12 @@ def perform
         it_behaves_like 'success when implemented'
       end
 
-      context 'when resource is not the current user' do
-        let_it_be(:resource) { create(:user) }
-
-        it_behaves_like 'returns an error'
-      end
-    end
-
-    context 'when resource is a user' do
-      let_it_be(:resource) { user }
+      context 'when resource is nil' do
+        let_it_be(:resource) { nil }
+        let(:expected_resource_id) { nil }
+        let(:expected_resource_class) { nil }
 
-      context 'when third party features are disabled' do
-        include_context 'with third party features disabled for group'
-
-        it_behaves_like 'returns an error'
-      end
-
-      context 'when experiment features are disabled' do
-        include_context 'with experiment features disabled for group'
-
-        it_behaves_like 'returns an error'
+        it_behaves_like 'success when implemented'
       end
     end
   end
diff --git a/ee/spec/services/llm/chat_service_spec.rb b/ee/spec/services/llm/chat_service_spec.rb
index 4c6e6a8c4a4aac3fb0c838a3f209405253657b8c..bef341fb99c059ae596a7a3d26bd7d5fd55e8c01 100644
--- a/ee/spec/services/llm/chat_service_spec.rb
+++ b/ee/spec/services/llm/chat_service_spec.rb
@@ -23,6 +23,11 @@
     context 'when ai features are enabled for the group' do
       include_context 'with ai features enabled for group'
 
+      before do
+        stub_feature_flags(gitlab_duo: user)
+        allow(Gitlab::Llm::StageCheck).to receive(:available?).with(group, :chat).and_return(stage_check_available)
+      end
+
       context 'when user is part of the group' do
         before do
           group.add_developer(user)
@@ -60,7 +65,6 @@
       context 'when user is not part of the group' do
         it 'returns an error' do
           expect(Llm::CompletionWorker).not_to receive(:perform_async)
-
           expect(subject.execute).to be_error
         end
       end
diff --git a/ee/spec/services/llm/execute_method_service_spec.rb b/ee/spec/services/llm/execute_method_service_spec.rb
index 6de3bff63bc5b201ace38a3564711f5295a4c158..7ee8080ca88765948b6e16ced6d98bbc645a8c96 100644
--- a/ee/spec/services/llm/execute_method_service_spec.rb
+++ b/ee/spec/services/llm/execute_method_service_spec.rb
@@ -134,6 +134,13 @@
         it_behaves_like 'successful tracking'
       end
 
+      context 'when resource is nil' do
+        let(:resource) { nil }
+        let(:expected_params) { default_params.merge(namespace: nil, project: nil) }
+
+        it_behaves_like 'successful tracking'
+      end
+
       context 'when service responds with an error' do
         let(:success) { false }
         let(:expected_params) { default_params.merge(property: "error") }
diff --git a/ee/spec/services/llm/explain_code_service_spec.rb b/ee/spec/services/llm/explain_code_service_spec.rb
index 859bb4d89c7a75b30c4ed10adba8a89f15dba43c..9e7ead95b43fe4858f3c9f822bf7861f25a8ffef 100644
--- a/ee/spec/services/llm/explain_code_service_spec.rb
+++ b/ee/spec/services/llm/explain_code_service_spec.rb
@@ -3,7 +3,7 @@
 require 'spec_helper'
 
 RSpec.describe Llm::ExplainCodeService, :saas, feature_category: :source_code_management do
-  let_it_be(:group) { create(:group_with_plan, plan: :ultimate_plan) }
+  let_it_be_with_reload(:group) { create(:group_with_plan, plan: :ultimate_plan) }
   let_it_be(:user) { create(:user) }
   let_it_be(:project) { create(:project, group: group) }
 
@@ -24,84 +24,89 @@
   before do
     stub_application_setting(check_namespace_plan: true)
     stub_licensed_features(explain_code: true, ai_features: true)
-    project.add_guest(user)
-    project.root_ancestor.update!(experiment_features_enabled: experiment_features_enabled,
+    group.update!(experiment_features_enabled: experiment_features_enabled,
       third_party_ai_features_enabled: third_party_features_enabled)
   end
 
   describe '#perform' do
-    it_behaves_like 'completion worker sync and async' do
-      let(:resource) { project }
-      let(:action_name) { :explain_code }
-      let(:content) { 'Explain code' }
-    end
-
-    context 'when explain_code_vertex_ai feature flag is disabled' do
+    context 'when is a member of the group' do
       before do
-        stub_feature_flags(explain_code_vertex_ai: false)
+        group.add_developer(user)
       end
 
       it_behaves_like 'completion worker sync and async' do
         let(:resource) { project }
-        let(:action_name) { :explain_code_open_ai }
+        let(:action_name) { :explain_code }
         let(:content) { 'Explain code' }
       end
-    end
 
-    context 'when explain_code_snippet feature flag is disabled' do
-      before do
-        stub_feature_flags(explain_code_snippet: false)
+      context 'when explain_code_vertex_ai feature flag is disabled' do
+        before do
+          stub_feature_flags(explain_code_vertex_ai: false)
+        end
+
+        it_behaves_like 'completion worker sync and async' do
+          let(:resource) { project }
+          let(:action_name) { :explain_code_open_ai }
+          let(:content) { 'Explain code' }
+        end
       end
 
-      it 'returns an error' do
-        expect(Llm::CompletionWorker).not_to receive(:perform_async)
+      context 'when explain_code_snippet feature flag is disabled' do
+        before do
+          stub_feature_flags(explain_code_snippet: false)
+        end
 
-        expect(subject.execute).to be_error
-      end
-    end
+        it 'returns an error' do
+          expect(Llm::CompletionWorker).not_to receive(:perform_async)
 
-    context 'when explain_code licensed feature is disabled' do
-      before do
-        stub_licensed_features(explain_code: false)
+          expect(subject.execute).to be_error
+        end
       end
 
-      it 'returns an error' do
-        expect(Llm::CompletionWorker).not_to receive(:perform_async)
+      context 'when explain_code licensed feature is disabled' do
+        before do
+          stub_licensed_features(explain_code: false)
+        end
 
-        expect(subject.execute).to be_error
+        it 'returns an error' do
+          expect(Llm::CompletionWorker).not_to receive(:perform_async)
+
+          expect(subject.execute).to be_error
+        end
       end
-    end
 
-    it 'returns an error when messages are too big' do
-      stub_const("#{described_class}::INPUT_CONTENT_LIMIT", 4)
+      it 'returns an error when messages are too big' do
+        stub_const("#{described_class}::INPUT_CONTENT_LIMIT", 4)
 
-      expect(Llm::CompletionWorker).not_to receive(:perform_async)
+        expect(Llm::CompletionWorker).not_to receive(:perform_async)
 
-      expect(subject.execute).to be_error.and have_attributes(message: eq('The messages are too big'))
-    end
+        expect(subject.execute).to be_error.and have_attributes(message: eq('The messages are too big'))
+      end
 
-    it 'returns an error if user is not a member of the project' do
-      project.team.truncate
+      context 'when experimental features are not enabled' do
+        let(:experiment_features_enabled) { false }
 
-      expect(Llm::CompletionWorker).not_to receive(:perform_async)
+        it 'returns an error' do
+          expect(Llm::CompletionWorker).not_to receive(:perform_async)
 
-      expect(subject.execute).to be_error
-    end
+          expect(subject.execute).to be_error
+        end
+      end
 
-    context 'when experimental features are not enabled' do
-      let(:experiment_features_enabled) { false }
+      context 'when third-party features are not enabled' do
+        let(:third_party_features_enabled) { false }
 
-      it 'returns an error' do
-        expect(Llm::CompletionWorker).not_to receive(:perform_async)
+        it 'returns an error' do
+          expect(Llm::CompletionWorker).not_to receive(:perform_async)
 
-        expect(subject.execute).to be_error
+          expect(subject.execute).to be_error
+        end
       end
     end
 
-    context 'when third-party features are not enabled' do
-      let(:third_party_features_enabled) { false }
-
-      it 'returns an error' do
+    context 'when is not a member' do
+      it 'returns an error if user is not a member of the project' do
         expect(Llm::CompletionWorker).not_to receive(:perform_async)
 
         expect(subject.execute).to be_error
diff --git a/ee/spec/services/llm/explain_vulnerability_service_spec.rb b/ee/spec/services/llm/explain_vulnerability_service_spec.rb
index f650148b16e34c65a17bc180ddecc77412968997..89f43338ce646e6d48fd793013d4bdb3b1f009e6 100644
--- a/ee/spec/services/llm/explain_vulnerability_service_spec.rb
+++ b/ee/spec/services/llm/explain_vulnerability_service_spec.rb
@@ -4,8 +4,8 @@
 
 RSpec.describe Llm::ExplainVulnerabilityService, :saas, feature_category: :vulnerability_management do
   let_it_be(:user) { create(:user) }
-  let_it_be(:namespace) { create(:group_with_plan, plan: :ultimate_plan) }
-  let_it_be(:project) { create(:project, :public, namespace: namespace) }
+  let_it_be_with_reload(:namespace) { create(:group_with_plan, plan: :ultimate_plan) }
+  let_it_be(:project) { create(:project, namespace: namespace) }
   let_it_be(:vulnerability) { create(:vulnerability, :with_finding, project: project) }
   let_it_be(:options) { { include_source_code: true } }
 
@@ -13,7 +13,6 @@
 
   before do
     stub_feature_flags(openai_experimentation: true)
-    vulnerability.project = project
   end
 
   describe '#execute' do
@@ -38,7 +37,7 @@
 
     context 'when the user is permitted to view the vulnerability' do
       before do
-        project.add_maintainer(user)
+        namespace.add_developer(user)
       end
 
       it_behaves_like 'completion worker sync and async' do
@@ -46,25 +45,21 @@
         let(:action_name) { :explain_vulnerability }
         let(:content) { 'Explain vulnerability' }
       end
-    end
 
-    context 'when the user is not permitted to view the vulnerability' do
-      before do
-        allow(project).to receive(:member?).with(user).and_return(false)
-      end
+      context 'when feature flag is disabled' do
+        before do
+          stub_feature_flags(explain_vulnerability: false)
+        end
 
-      it 'returns an error' do
-        expect(subject.execute).to be_error
+        it 'returns an error' do
+          expect(subject.execute).to be_error
 
-        expect(Llm::CompletionWorker).not_to have_received(:perform_async)
+          expect(Llm::CompletionWorker).not_to have_received(:perform_async)
+        end
       end
     end
 
-    context 'when feature flag is disabled' do
-      before do
-        stub_feature_flags(explain_vulnerability: false)
-      end
-
+    context 'when the user is not permitted to view the vulnerability' do
       it 'returns an error' do
         expect(subject.execute).to be_error
 
diff --git a/ee/spec/services/llm/generate_commit_message_service_spec.rb b/ee/spec/services/llm/generate_commit_message_service_spec.rb
index db7f925db79ef6e5f72e7bbde1cf6825feffd160..70bf8b52d25aa0f8340b24e12a68ecf35a1d52c2 100644
--- a/ee/spec/services/llm/generate_commit_message_service_spec.rb
+++ b/ee/spec/services/llm/generate_commit_message_service_spec.rb
@@ -3,7 +3,7 @@
 require 'spec_helper'
 
 RSpec.describe Llm::GenerateCommitMessageService, :saas, feature_category: :code_review_workflow do
-  let_it_be(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) }
+  let_it_be_with_reload(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) }
   let_it_be(:user) { create(:user) }
   let_it_be(:project) { create(:project, :public, group: group) }
   let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@@ -26,7 +26,7 @@
 
     context 'when the user is permitted to view the merge request' do
       before do
-        project.add_maintainer(user)
+        group.add_developer(user)
       end
 
       it 'schedules a job' do
@@ -79,7 +79,7 @@
 
     with_them do
       before do
-        project.add_maintainer(user)
+        group.add_maintainer(user)
         project.root_ancestor.namespace_settings.update!(
           third_party_ai_features_enabled: third_party_ai_features_enabled,
           experiment_features_enabled: experiment_features_enabled)
diff --git a/ee/spec/services/llm/generate_test_file_service_spec.rb b/ee/spec/services/llm/generate_test_file_service_spec.rb
index ca27c65bc322baa5e8a9ad74e194f33d5dc60851..6645420b1b9936e9fcacf7450b3e68c2bc9e1c7b 100644
--- a/ee/spec/services/llm/generate_test_file_service_spec.rb
+++ b/ee/spec/services/llm/generate_test_file_service_spec.rb
@@ -3,7 +3,7 @@
 require 'spec_helper'
 
 RSpec.describe Llm::GenerateTestFileService, :saas, feature_category: :code_review_workflow do
-  let_it_be(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) }
+  let_it_be_with_reload(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) }
   let_it_be(:user) { create(:user) }
   let_it_be(:project) { create(:project, :public, group: group) }
   let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@@ -18,15 +18,15 @@
 
   describe '#execute' do
     before do
-      project.root_ancestor.namespace_settings.update!(
+      group.namespace_settings.update!(
         third_party_ai_features_enabled: true,
         experiment_features_enabled: true)
       allow(Llm::CompletionWorker).to receive(:perform_async)
     end
 
-    context 'when the user is permitted to view the merge request' do
+    context 'when member of the group' do
       before do
-        project.add_maintainer(user)
+        group.add_developer(user)
       end
 
       it_behaves_like 'completion worker sync and async' do
@@ -34,23 +34,21 @@
         let(:action_name) { :generate_test_file }
         let(:content) { 'Generate test file' }
       end
-    end
 
-    context 'when the user is not permitted to view the merge request' do
-      it 'returns an error' do
-        project.team.truncate
+      context 'when feature flag is disabled' do
+        before do
+          stub_feature_flags(generate_test_file_flag: false)
+        end
 
-        expect(subject.execute).to be_error
+        it 'returns an error' do
+          expect(subject.execute).to be_error
 
-        expect(Llm::CompletionWorker).not_to have_received(:perform_async)
+          expect(Llm::CompletionWorker).not_to have_received(:perform_async)
+        end
       end
     end
 
-    context 'when feature flag is disabled' do
-      before do
-        stub_feature_flags(generate_test_file_flag: false)
-      end
-
+    context 'when the user is not permitted to view the merge request' do
       it 'returns an error' do
         expect(subject.execute).to be_error
 
@@ -71,7 +69,7 @@
 
     with_them do
       before do
-        project.add_maintainer(user)
+        group.add_developer(user)
         project.root_ancestor.namespace_settings.update!(
           third_party_ai_features_enabled: third_party_ai_features_enabled,
           experiment_features_enabled: experiment_features_enabled)
diff --git a/ee/spec/services/llm/merge_requests/summarize_review_service_spec.rb b/ee/spec/services/llm/merge_requests/summarize_review_service_spec.rb
index 0888c34312469808972dc99b96638bcce2829426..41f8eefca682720af852f120a605fafb3f6c8e39 100644
--- a/ee/spec/services/llm/merge_requests/summarize_review_service_spec.rb
+++ b/ee/spec/services/llm/merge_requests/summarize_review_service_spec.rb
@@ -5,7 +5,7 @@
 RSpec.describe Llm::MergeRequests::SummarizeReviewService, :saas, feature_category: :code_review_workflow do
   let_it_be(:user) { create(:user) }
   let_it_be(:user_2) { create(:user) }
-  let_it_be(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) }
+  let_it_be_with_reload(:group) { create(:group_with_plan, :public, plan: :ultimate_plan) }
   let_it_be(:project) { create(:project, :public, group: group) }
   let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: user) }
 
@@ -18,7 +18,7 @@
       stub_ee_application_setting(should_check_namespace_plan: true)
       stub_licensed_features(summarize_my_mr_code_review: true, ai_features: true)
 
-      project.add_developer(user)
+      group.add_developer(user)
 
       group.namespace_settings.update!(third_party_ai_features_enabled: true, experiment_features_enabled: true)
     end
diff --git a/ee/spec/workers/llm/completion_worker_spec.rb b/ee/spec/workers/llm/completion_worker_spec.rb
index 4bcc66342ec437d2bf6bf8980bd0a405a16b309b..15c8a55ec15ceab03cc2d974d09a80d3b94815d5 100644
--- a/ee/spec/workers/llm/completion_worker_spec.rb
+++ b/ee/spec/workers/llm/completion_worker_spec.rb
@@ -72,6 +72,15 @@
 
         it_behaves_like 'performs successfully'
       end
+
+      context 'when resource is nil' do
+        let(:resource) { nil }
+        let(:resource_id) { nil }
+        let(:resource_type) { nil }
+        let(:ai_action_name) { :chat }
+
+        it_behaves_like 'performs successfully'
+      end
     end
 
     context 'with invalid parameters' do