diff --git a/db/migrate/20201217132603_create_elastic_reindexing_subtasks.rb b/db/migrate/20201217132603_create_elastic_reindexing_subtasks.rb
new file mode 100644
index 0000000000000000000000000000000000000000..db084b885c2fca97ac99b09e17e48737ab371aba
--- /dev/null
+++ b/db/migrate/20201217132603_create_elastic_reindexing_subtasks.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+class CreateElasticReindexingSubtasks < ActiveRecord::Migration[6.0]
+  include Gitlab::Database::MigrationHelpers
+
+  DOWNTIME = false
+
+  disable_ddl_transaction!
+
+  class ReindexingTask < ActiveRecord::Base
+    self.table_name = 'elastic_reindexing_tasks'
+  end
+
+  class ReindexingSubtask < ActiveRecord::Base
+    self.table_name = 'elastic_reindexing_subtasks'
+  end
+
+  def up
+    unless table_exists?(:elastic_reindexing_subtasks)
+      create_table :elastic_reindexing_subtasks do |t|
+        t.references :elastic_reindexing_task, foreign_key: { on_delete: :cascade }, null: false
+        t.text :alias_name, null: false
+        t.text :index_name_from, null: false
+        t.text :index_name_to, null: false
+        t.text :elastic_task, null: false
+        t.integer :documents_count_target
+        t.integer :documents_count
+        t.timestamps_with_timezone null: false
+      end
+    end
+
+    add_text_limit :elastic_reindexing_subtasks, :index_name_from, 255
+    add_text_limit :elastic_reindexing_subtasks, :index_name_to, 255
+    add_text_limit :elastic_reindexing_subtasks, :elastic_task, 255
+    add_text_limit :elastic_reindexing_subtasks, :alias_name, 255
+
+    ReindexingTask.find_each do |task|
+      next if task.index_name_from.blank? || task.index_name_to.blank? || task.elastic_task.blank?
+      next if ReindexingSubtask.where(elastic_reindexing_task_id: task.id).exists?
+
+      ReindexingSubtask.create(
+        elastic_reindexing_task_id: task.id,
+        documents_count_target: task.documents_count_target,
+        documents_count: task.documents_count,
+        alias_name: 'gitlab-production',
+        index_name_from: task.index_name_from,
+        index_name_to: task.index_name_to,
+        elastic_task: task.elastic_task
+      )
+    end
+  end
+
+  def down
+    drop_table :elastic_reindexing_subtasks
+  end
+end
diff --git a/db/schema_migrations/20201217132603 b/db/schema_migrations/20201217132603
new file mode 100644
index 0000000000000000000000000000000000000000..d1db386cbf51998bd78415f801e2b717d9b309c5
--- /dev/null
+++ b/db/schema_migrations/20201217132603
@@ -0,0 +1 @@
+164bcc838beb7d51775f8b813b92d3ec7080d4c7937d6ad16cf973131b45359e
\ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 27c5d33f773ba3a71a5f4a501ba86ec9bca6aeb2..58a1f4522096b569310464973737c6e91222f5cb 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -11947,6 +11947,32 @@ CREATE SEQUENCE draft_notes_id_seq
 
 ALTER SEQUENCE draft_notes_id_seq OWNED BY draft_notes.id;
 
+CREATE TABLE elastic_reindexing_subtasks (
+    id bigint NOT NULL,
+    elastic_reindexing_task_id bigint NOT NULL,
+    alias_name text NOT NULL,
+    index_name_from text NOT NULL,
+    index_name_to text NOT NULL,
+    elastic_task text NOT NULL,
+    documents_count_target integer,
+    documents_count integer,
+    created_at timestamp with time zone NOT NULL,
+    updated_at timestamp with time zone NOT NULL,
+    CONSTRAINT check_4910adc798 CHECK ((char_length(elastic_task) <= 255)),
+    CONSTRAINT check_88f56216a4 CHECK ((char_length(alias_name) <= 255)),
+    CONSTRAINT check_a1fbd9faa9 CHECK ((char_length(index_name_from) <= 255)),
+    CONSTRAINT check_f456494bd8 CHECK ((char_length(index_name_to) <= 255))
+);
+
+CREATE SEQUENCE elastic_reindexing_subtasks_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1;
+
+ALTER SEQUENCE elastic_reindexing_subtasks_id_seq OWNED BY elastic_reindexing_subtasks.id;
+
 CREATE TABLE elastic_reindexing_tasks (
     id bigint NOT NULL,
     created_at timestamp with time zone NOT NULL,
@@ -18387,6 +18413,8 @@ ALTER TABLE ONLY diff_note_positions ALTER COLUMN id SET DEFAULT nextval('diff_n
 
 ALTER TABLE ONLY draft_notes ALTER COLUMN id SET DEFAULT nextval('draft_notes_id_seq'::regclass);
 
+ALTER TABLE ONLY elastic_reindexing_subtasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_subtasks_id_seq'::regclass);
+
 ALTER TABLE ONLY elastic_reindexing_tasks ALTER COLUMN id SET DEFAULT nextval('elastic_reindexing_tasks_id_seq'::regclass);
 
 ALTER TABLE ONLY emails ALTER COLUMN id SET DEFAULT nextval('emails_id_seq'::regclass);
@@ -19548,6 +19576,9 @@ ALTER TABLE ONLY diff_note_positions
 ALTER TABLE ONLY draft_notes
     ADD CONSTRAINT draft_notes_pkey PRIMARY KEY (id);
 
+ALTER TABLE ONLY elastic_reindexing_subtasks
+    ADD CONSTRAINT elastic_reindexing_subtasks_pkey PRIMARY KEY (id);
+
 ALTER TABLE ONLY elastic_reindexing_tasks
     ADD CONSTRAINT elastic_reindexing_tasks_pkey PRIMARY KEY (id);
 
@@ -21393,6 +21424,8 @@ CREATE INDEX index_draft_notes_on_discussion_id ON draft_notes USING btree (disc
 
 CREATE INDEX index_draft_notes_on_merge_request_id ON draft_notes USING btree (merge_request_id);
 
+CREATE INDEX index_elastic_reindexing_subtasks_on_elastic_reindexing_task_id ON elastic_reindexing_subtasks USING btree (elastic_reindexing_task_id);
+
 CREATE UNIQUE INDEX index_elastic_reindexing_tasks_on_in_progress ON elastic_reindexing_tasks USING btree (in_progress) WHERE in_progress;
 
 CREATE INDEX index_elastic_reindexing_tasks_on_state ON elastic_reindexing_tasks USING btree (state);
@@ -25361,6 +25394,9 @@ ALTER TABLE ONLY requirements
 ALTER TABLE ONLY snippet_repositories
     ADD CONSTRAINT fk_rails_f21f899728 FOREIGN KEY (shard_id) REFERENCES shards(id) ON DELETE RESTRICT;
 
+ALTER TABLE ONLY elastic_reindexing_subtasks
+    ADD CONSTRAINT fk_rails_f2cc190164 FOREIGN KEY (elastic_reindexing_task_id) REFERENCES elastic_reindexing_tasks(id) ON DELETE CASCADE;
+
 ALTER TABLE ONLY ci_pipeline_chat_data
     ADD CONSTRAINT fk_rails_f300456b63 FOREIGN KEY (chat_name_id) REFERENCES chat_names(id) ON DELETE CASCADE;
 
diff --git a/doc/integration/elasticsearch.md b/doc/integration/elasticsearch.md
index 05b3eec0106096020d9f495aaf8e05903c463aa8..fb5b976fe2fb8c079ef7d8cbe95d7ef77557a103 100644
--- a/doc/integration/elasticsearch.md
+++ b/doc/integration/elasticsearch.md
@@ -283,7 +283,7 @@ To disable the Elasticsearch integration:
 1. Expand the **Advanced Search** section and uncheck **Elasticsearch indexing**
    and **Search with Elasticsearch enabled**.
 1. Click **Save changes** for the changes to take effect.
-1. (Optional) Delete the existing index:
+1. (Optional) Delete the existing indexes:
 
    ```shell
    # Omnibus installations
@@ -347,7 +347,8 @@ To reclaim the `gitlab-production` index name, you need to first create a `secon
 
 To create a secondary index, run the following Rake task. The `SKIP_ALIAS`
 environment variable will disable the automatic creation of the Elasticsearch
-alias, which would conflict with the existing index under `$PRIMARY_INDEX`:
+alias, which would conflict with the existing index under `$PRIMARY_INDEX`, and will
+not create a separate Issue index:
 
 ```shell
 # Omnibus installation
@@ -523,8 +524,8 @@ The following are some available Rake tasks:
 | [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)                   | Iterates over all projects and queues Sidekiq jobs to index them in the background.                                                                                                       |
 | [`sudo gitlab-rake gitlab:elastic:index_projects_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)            | Determines the overall status of the indexing. It is done by counting the total number of indexed projects, dividing by a count of the total number of projects, then multiplying by 100. |
 | [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)               | Deletes all instances of IndexStatus for all projects. Note that this command will result in a complete wipe of the index, and it should be used with caution.                                                                                              |
-| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates an empty index and assigns an alias for it on the Elasticsearch side only if it doesn't already exist.                                                                                                      |
-| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)       | Removes the GitLab index and alias (if exists) on the Elasticsearch instance.                                                                                                                                   |
+| [`sudo gitlab-rake gitlab:elastic:create_empty_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates empty indexes (the default index and a separate issues index) and assigns an alias for each on the Elasticsearch side only if it doesn't already exist.                                                                                                      |
+| [`sudo gitlab-rake gitlab:elastic:delete_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)       | Removes the GitLab indexes and aliases (if they exist) on the Elasticsearch instance.                                                                                                                                   |
 | [`sudo gitlab-rake gitlab:elastic:recreate_index[<TARGET_NAME>]`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)     | Wrapper task for `gitlab:elastic:delete_index[<TARGET_NAME>]` and `gitlab:elastic:create_empty_index[<TARGET_NAME>]`.                                                                       |
 | [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)                   | Performs an Elasticsearch import that indexes the snippets data.                                                                                                                          |
 | [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/blob/master/ee/lib/tasks/gitlab/elastic.rake)             | Displays which projects are not indexed.                                                                                                                                                  |
diff --git a/ee/app/models/concerns/elastic/application_versioned_search.rb b/ee/app/models/concerns/elastic/application_versioned_search.rb
index 065a9f52f7e188c095b604bf57390b92d97efe16..cda61b7bec95ad67e7c2976e741b2f42533674f2 100644
--- a/ee/app/models/concerns/elastic/application_versioned_search.rb
+++ b/ee/app/models/concerns/elastic/application_versioned_search.rb
@@ -6,9 +6,15 @@ module ApplicationVersionedSearch
     FORWARDABLE_INSTANCE_METHODS = [:es_id, :es_parent].freeze
     FORWARDABLE_CLASS_METHODS = [:elastic_search, :es_import, :es_type, :index_name, :document_type, :mapping, :mappings, :settings, :import].freeze
 
+    # rubocop:disable Gitlab/ModuleWithInstanceVariables
     def __elasticsearch__(&block)
-      @__elasticsearch__ ||= ::Elastic::MultiVersionInstanceProxy.new(self)
+      if self.class.use_separate_indices?
+        @__elasticsearch_separate__ ||= ::Elastic::MultiVersionInstanceProxy.new(self, use_separate_indices: true)
+      else
+        @__elasticsearch__ ||= ::Elastic::MultiVersionInstanceProxy.new(self)
+      end
     end
+    # rubocop:enable Gitlab/ModuleWithInstanceVariables
 
     # Should be overridden in the models where some records should be skipped
     def searchable?
@@ -77,7 +83,15 @@ def associations_needing_elasticsearch_update(updated_attributes)
 
     class_methods do
       def __elasticsearch__
-        @__elasticsearch__ ||= ::Elastic::MultiVersionClassProxy.new(self)
+        if use_separate_indices?
+          @__elasticsearch_separate__ ||= ::Elastic::MultiVersionClassProxy.new(self, use_separate_indices: true)
+        else
+          @__elasticsearch__ ||= ::Elastic::MultiVersionClassProxy.new(self)
+        end
+      end
+
+      def use_separate_indices?
+        Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.include?(self) && Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
       end
 
       # Mark a dependant association as needing to be updated when a specific
diff --git a/ee/app/models/elastic/migration_record.rb b/ee/app/models/elastic/migration_record.rb
index 473ea2874c74f2f56c7c28649cc4965eae4e11c8..5f56b8a03228c50529ffae9de7d344fb7b25613c 100644
--- a/ee/app/models/elastic/migration_record.rb
+++ b/ee/app/models/elastic/migration_record.rb
@@ -4,7 +4,7 @@ module Elastic
   class MigrationRecord
     attr_reader :version, :name, :filename
 
-    delegate :migrate, :skip_migration?, :completed?, :batched?, :throttle_delay, to: :migration
+    delegate :migrate, :skip_migration?, :completed?, :batched?, :throttle_delay, :pause_indexing?, to: :migration
 
     def initialize(version:, name:, filename:)
       @version = version
@@ -16,11 +16,17 @@ def initialize(version:, name:, filename:)
     def save!(completed:)
       raise 'Migrations index is not found' unless helper.index_exists?(index_name: index_name)
 
-      data = { completed: completed }.merge(timestamps(completed: completed))
+      data = { completed: completed, state: load_state }.merge(timestamps(completed: completed))
 
       client.index index: index_name, type: '_doc', id: version, body: data
     end
 
+    def save_state!(state)
+      completed = load_from_index&.dig('_source', 'completed')
+
+      client.index index: index_name, type: '_doc', id: version, body: { completed: completed, state: load_state.merge(state) }
+    end
+
     def persisted?
       load_from_index.present?
     end
@@ -31,6 +37,18 @@ def load_from_index
       nil
     end
 
+    def load_state
+      load_from_index&.dig('_source', 'state')&.with_indifferent_access || {}
+    end
+
+    def halted?
+      !!load_state&.dig('halted')
+    end
+
+    def name_for_key
+      name.underscore
+    end
+
     def self.persisted_versions(completed:)
       helper = Gitlab::Elastic::Helper.default
       helper.client
diff --git a/ee/app/models/elastic/reindexing_subtask.rb b/ee/app/models/elastic/reindexing_subtask.rb
new file mode 100644
index 0000000000000000000000000000000000000000..57adb59420cd4f4078be7a644f15dac6d82ce2ba
--- /dev/null
+++ b/ee/app/models/elastic/reindexing_subtask.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class Elastic::ReindexingSubtask < ApplicationRecord
+  self.table_name = 'elastic_reindexing_subtasks'
+
+  belongs_to :elastic_reindexing_task, class_name: 'Elastic::ReindexingTask'
+
+  validates :index_name_from, :index_name_to, :elastic_task, presence: true
+end
diff --git a/ee/app/models/elastic/reindexing_task.rb b/ee/app/models/elastic/reindexing_task.rb
index 3fc05ee5011ef3a05cbbffdd4af4de51cda439d7..4482aa3fe3ba3ab4a581b9d04630543e76f3690e 100644
--- a/ee/app/models/elastic/reindexing_task.rb
+++ b/ee/app/models/elastic/reindexing_task.rb
@@ -3,6 +3,8 @@
 class Elastic::ReindexingTask < ApplicationRecord
   self.table_name = 'elastic_reindexing_tasks'
 
+  has_many :subtasks, class_name: 'Elastic::ReindexingSubtask', foreign_key: :elastic_reindexing_task_id
+
   enum state: {
     initial:                0,
     indexing_paused:        1,
@@ -27,8 +29,9 @@ def self.running?
 
   def self.drop_old_indices!
     old_indices_to_be_deleted.find_each do |task|
-      next unless Gitlab::Elastic::Helper.default.delete_index(index_name: task.index_name_from)
-
+      task.subtasks.each do |subtask|
+        Gitlab::Elastic::Helper.default.delete_index(index_name: subtask.index_name_from)
+      end
       task.update!(state: :original_index_deleted)
     end
   end
diff --git a/ee/app/services/elastic/cluster_reindexing_service.rb b/ee/app/services/elastic/cluster_reindexing_service.rb
index bfc953423192279b1466a3e0ed64f9f8b60909ae..db9b75dce0014af7d9f2313da9cb33380015f2ed 100644
--- a/ee/app/services/elastic/cluster_reindexing_service.rb
+++ b/ee/app/services/elastic/cluster_reindexing_service.rb
@@ -31,6 +31,10 @@ def current_task
 
     private
 
+    def alias_names
+      [elastic_helper.target_name] + elastic_helper.standalone_indices_proxies.map(&:index_name)
+    end
+
     def default_index_options
       {
         refresh_interval: nil, # Change it back to the default
@@ -40,6 +44,12 @@ def default_index_options
     end
 
     def initial!
+      if Elastic::DataMigrationService.pending_migrations?
+        # migrations may have paused indexing so we do not want to unpause when aborting the reindexing process
+        abort_reindexing!('You have unapplied advanced search migrations. Please wait until it is finished', unpause_indexing: false)
+        return false
+      end
+
       # Pause indexing
       Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: true)
 
@@ -48,7 +58,7 @@ def initial!
         return false
       end
 
-      expected_free_size = elastic_helper.index_size_bytes * 2
+      expected_free_size = alias_names.sum {|name| elastic_helper.index_size_bytes(index_name: name) } * 2
       if elastic_helper.cluster_free_size_bytes < expected_free_size
         abort_reindexing!("You should have at least #{expected_free_size} bytes of storage available to perform reindexing. Please increase the storage in your Elasticsearch cluster before reindexing.")
         return false
@@ -60,43 +70,52 @@ def initial!
     end
 
     def indexing_paused!
-      # Create an index with custom settings
-      index_name = elastic_helper.create_empty_index(with_alias: false, options: { settings: INITIAL_INDEX_OPTIONS })
-
-      # Record documents count
-      documents_count = elastic_helper.documents_count
-
-      # Trigger reindex
-      task_id = elastic_helper.reindex(to: index_name)
+      # Create indices with custom settings
+      main_index = elastic_helper.create_empty_index(with_alias: false, options: { settings: INITIAL_INDEX_OPTIONS })
+      standalone_indices = elastic_helper.create_standalone_indices(with_alias: false, options: { settings: INITIAL_INDEX_OPTIONS })
+
+      main_index.merge(standalone_indices).each do |new_index_name, alias_name|
+        old_index_name = elastic_helper.target_index_name(target: alias_name)
+        # Record documents count
+        documents_count = elastic_helper.documents_count(index_name: old_index_name)
+        # Trigger reindex
+        task_id = elastic_helper.reindex(from: old_index_name, to: new_index_name)
+
+        current_task.subtasks.create!(
+          alias_name: alias_name,
+          index_name_from: old_index_name,
+          index_name_to: new_index_name,
+          documents_count: documents_count,
+          elastic_task: task_id
+        )
+      end
 
-      current_task.update!(
-        index_name_from: elastic_helper.target_index_name,
-        index_name_to: index_name,
-        documents_count: documents_count,
-        elastic_task: task_id,
-        state: :reindexing
-      )
+      current_task.update!(state: :reindexing)
 
       true
     end
 
     def save_documents_count!(refresh:)
-      elastic_helper.refresh_index(index_name: current_task.index_name_to) if refresh
+      current_task.subtasks.each do |subtask|
+        elastic_helper.refresh_index(index_name: subtask.index_name_to) if refresh
 
-      new_documents_count = elastic_helper.documents_count(index_name: current_task.index_name_to)
-      current_task.update!(documents_count_target: new_documents_count)
+        new_documents_count = elastic_helper.documents_count(index_name: subtask.index_name_to)
+        subtask.update!(documents_count_target: new_documents_count)
+      end
     end
 
     def check_task_status
       save_documents_count!(refresh: false)
 
-      task_status = elastic_helper.task_status(task_id: current_task.elastic_task)
-      return false unless task_status['completed']
+      current_task.subtasks.each do |subtask|
+        task_status = elastic_helper.task_status(task_id: subtask.elastic_task)
+        return false unless task_status['completed']
 
-      reindexing_error = task_status.dig('error', 'type')
-      if reindexing_error
-        abort_reindexing!("Task #{current_task.elastic_task} has failed with Elasticsearch error.", additional_logs: { elasticsearch_error_type: reindexing_error })
-        return false
+        reindexing_error = task_status.dig('error', 'type')
+        if reindexing_error
+          abort_reindexing!("Task #{subtask.elastic_task} has failed with Elasticsearch error.", additional_logs: { elasticsearch_error_type: reindexing_error })
+          return false
+        end
       end
 
       true
@@ -109,22 +128,28 @@ def check_task_status
     def compare_documents_count
       save_documents_count!(refresh: true)
 
-      old_documents_count = current_task.documents_count
-      new_documents_count = current_task.documents_count_target
-      if old_documents_count != new_documents_count
-        abort_reindexing!("Documents count is different, Count from new index: #{new_documents_count} Count from original index: #{old_documents_count}. This likely means something went wrong during reindexing.")
-        return false
+      current_task.subtasks.each do |subtask|
+        old_documents_count = subtask.documents_count
+        new_documents_count = subtask.documents_count_target
+        if old_documents_count != new_documents_count
+          abort_reindexing!("Documents count is different, Count from new index: #{new_documents_count} Count from original index: #{old_documents_count}. This likely means something went wrong during reindexing.")
+          return false
+        end
       end
 
       true
     end
 
     def apply_default_index_options
-      elastic_helper.update_settings(index_name: current_task.index_name_to, settings: default_index_options)
+      current_task.subtasks.each do |subtask|
+        elastic_helper.update_settings(index_name: subtask.index_name_to, settings: default_index_options)
+      end
     end
 
     def switch_alias_to_new_index
-      elastic_helper.switch_alias(to: current_task.index_name_to)
+      current_task.subtasks.each do |subtask|
+        elastic_helper.switch_alias(from: subtask.index_name_from, to: subtask.index_name_to, alias_name: subtask.alias_name)
+      end
     end
 
     def finalize_reindexing
@@ -144,8 +169,8 @@ def reindexing!
       true
     end
 
-    def abort_reindexing!(reason, additional_logs: {})
-      error = { message: 'elasticsearch_reindex_error', error: reason, elasticsearch_task_id: current_task.elastic_task, gitlab_task_id: current_task.id, gitlab_task_state: current_task.state }
+    def abort_reindexing!(reason, additional_logs: {}, unpause_indexing: true)
+      error = { message: 'elasticsearch_reindex_error', error: reason, gitlab_task_id: current_task.id, gitlab_task_state: current_task.state }
       logger.error(error.merge(additional_logs))
 
       current_task.update!(
@@ -154,7 +179,7 @@ def abort_reindexing!(reason, additional_logs: {})
       )
 
       # Unpause indexing
-      Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: false)
+      Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: false) if unpause_indexing
     end
 
     def logger
diff --git a/ee/app/services/elastic/data_migration_service.rb b/ee/app/services/elastic/data_migration_service.rb
index 1a783db87adfac9f8e900667082ca336aa8e575b..918ba6003d4f0e09efa322e76ebfa025d12f741e 100644
--- a/ee/app/services/elastic/data_migration_service.rb
+++ b/ee/app/services/elastic/data_migration_service.rb
@@ -20,23 +20,32 @@ def migrations
         migrations.sort_by(&:version)
       end
 
+      def [](version)
+        migrations.find { |m| m.version == version }
+      end
+
       def drop_migration_has_finished_cache!(migration)
-        name = migration.name.underscore
-        Rails.cache.delete cache_key(:migration_has_finished, name)
+        Rails.cache.delete cache_key(:migration_has_finished, migration.name_for_key)
       end
 
       def migration_has_finished?(name)
-        Rails.cache.fetch cache_key(:migration_has_finished, name), expires_in: 30.minutes do
+        Rails.cache.fetch cache_key(:migration_has_finished, name.to_s.underscore), expires_in: 30.minutes do
           migration_has_finished_uncached?(name)
         end
       end
 
       def migration_has_finished_uncached?(name)
-        migration = migrations.find { |migration| migration.name == name.to_s.camelize }
+        migration = migrations.find { |migration| migration.name_for_key == name.to_s.underscore }
 
         !!migration&.load_from_index&.dig('_source', 'completed')
       end
 
+      def pending_migrations?
+        migrations.reverse.any? do |migration|
+          !migration_has_finished?(migration.name_for_key)
+        end
+      end
+
       def mark_all_as_completed!
         migrations.each do |migration|
           migration.save!(completed: true)
diff --git a/ee/app/views/admin/application_settings/_elasticsearch_form.html.haml b/ee/app/views/admin/application_settings/_elasticsearch_form.html.haml
index d28eb23a3871ec625f7c18822cb648157e6b21ec..70616801fc822de7d02d2b74beaa721569d8551d 100644
--- a/ee/app/views/admin/application_settings/_elasticsearch_form.html.haml
+++ b/ee/app/views/admin/application_settings/_elasticsearch_form.html.haml
@@ -23,7 +23,7 @@
             .form-check
               = f.check_box :elasticsearch_indexing, class: 'form-check-input', data: { qa_selector: 'indexing_checkbox' }
               = f.label :elasticsearch_indexing, class: 'form-check-label' do
-                Elasticsearch indexing
+                = _('Elasticsearch indexing')
               - unless Gitlab::CurrentSettings.elasticsearch_indexing?
                 .form-text.text-muted
                   = _('An empty index will be created if one does not already exist')
@@ -35,11 +35,16 @@
           .card-body
             .form-group
               .form-check
-                = f.check_box :elasticsearch_pause_indexing, class: 'form-check-input', data: { qa_selector: 'pause_checkbox' }, disabled: !Gitlab::CurrentSettings.elasticsearch_indexing?
+                - pending_migrations = Elastic::DataMigrationService.pending_migrations? && Gitlab::CurrentSettings.elasticsearch_pause_indexing? rescue false
+                - disable_checkbox = !Gitlab::CurrentSettings.elasticsearch_indexing? || pending_migrations
+                = f.check_box :elasticsearch_pause_indexing, class: 'form-check-input', data: { qa_selector: 'pause_checkbox' }, disabled: disable_checkbox
                 = f.label :elasticsearch_pause_indexing, class: 'form-check-label' do
-                  Pause Elasticsearch indexing
+                  = _('Pause Elasticsearch indexing')
                 .form-text.text-muted
                   = _('Changes are still tracked. Useful for cluster/index migrations.')
+                - if pending_migrations
+                  .form-text.text-warning
+                    = _('There are pending advanced search migrations. Indexing must remain paused until the migrations are completed.')
 
           .form-group
             .form-check
@@ -102,29 +107,31 @@
 
         .sub-section
           %h4= _('Elasticsearch zero-downtime reindexing')
-          = link_to _('Trigger cluster reindexing'), admin_elasticsearch_trigger_reindexing_path, class: 'gl-button btn btn-primary', data: { confirm: _('Are you sure you want to reindex?') }, method: :post, disabled: @elasticsearch_reindexing_task&.in_progress?
-          .form-text.text-muted
+          = link_to _('Trigger cluster reindexing'), admin_elasticsearch_trigger_reindexing_path, class: "gl-button btn btn-info", disabled: @elasticsearch_reindexing_task&.in_progress?, data: { confirm: _('Are you sure you want to reindex?') }, method: :post
+          .form-text.gl-text-gray-600
             = _('This feature should be used with an index that was created after 13.0')
           - Elastic::ReindexingTask.old_indices_scheduled_for_deletion.each do |task|
             .form-text.text-danger
-              = _("Unused, previous index '%{index_name}' will be deleted after %{time} automatically.") % { index_name: task.index_name_from, time: task.delete_original_index_at }
-              = link_to _('Cancel index deletion'), admin_elasticsearch_cancel_index_deletion_path(task_id: task.id), method: :post
+              = _("Unused, previous indices: %{index_names} will be deleted after %{time} automatically.") % { index_names: task.subtasks.map(&:index_name_from).join(', '), time: task.delete_original_index_at }
+              = link_to _('Cancel index deletion'), admin_elasticsearch_cancel_index_deletion_path(task_id: task.id), class: 'gl-mb-2', method: :post
           - if @elasticsearch_reindexing_task
-            - expected_documents = @elasticsearch_reindexing_task.documents_count
-            - processed_documents = @elasticsearch_reindexing_task.documents_count_target
-            %h5= _('Reindexing status')
-            %p= _('State: %{last_reindexing_task_state}') % { last_reindexing_task_state: @elasticsearch_reindexing_task.state }
-            - if @elasticsearch_reindexing_task.elastic_task
-              %p= _('Task ID: %{elastic_task}') % { elastic_task: @elasticsearch_reindexing_task.elastic_task }
+            %h5= _('Reindexing Status: %{status}') % { status: @elasticsearch_reindexing_task.state }
             - if @elasticsearch_reindexing_task.error_message
               %p= _('Error: %{error_message}') % { error_message: @elasticsearch_reindexing_task.error_message }
-            - if expected_documents
-              %p= _('Expected documents: %{expected_documents}') % { expected_documents: expected_documents }
-            - if processed_documents && expected_documents
-              - percentage = ((processed_documents / expected_documents.to_f) * 100).round(2)
-              %p= _('Documents reindexed: %{processed_documents} (%{percentage}%%)') % { processed_documents: processed_documents, percentage: percentage }
-              .progress
-                .progress-bar.progress-bar-striped.bg-primary{ "aria-valuemax" => "100", "aria-valuemin" => "0", "aria-valuenow" => percentage, :role => "progressbar", :style => "width: #{percentage}%" }
+            - @elasticsearch_reindexing_task.subtasks.each do |subtask|
+              .card-body.form-group
+                %h5= subtask.alias_name
+                - expected_documents = subtask.documents_count
+                - if subtask.elastic_task
+                  %p= _('Task ID: %{elastic_task}') % { elastic_task: subtask.elastic_task }
+                - if expected_documents
+                  - processed_documents = subtask.documents_count_target
+                  %p= _('Expected documents: %{expected_documents}') % { expected_documents: expected_documents }
+                  - if processed_documents && expected_documents
+                    - percentage = ((processed_documents / expected_documents.to_f) * 100).round(2)
+                    %p= _('Documents reindexed: %{processed_documents} (%{percentage}%%)') % { processed_documents: processed_documents, percentage: percentage }
+                    .progress
+                      .progress-bar{ "aria-valuemax" => "100", "aria-valuemin" => "0", "aria-valuenow" => percentage, :role => "progressbar", :style => "width: #{percentage}%" }
 
         .sub-section
           %h4= _('Elasticsearch indexing restrictions')
diff --git a/ee/app/workers/concerns/elastic/migration_options.rb b/ee/app/workers/concerns/elastic/migration_options.rb
index a5b6e78c52c0c6989f36827435e92147737948a4..cba43a79fba4212c72abc6f467749db4548566c8 100644
--- a/ee/app/workers/concerns/elastic/migration_options.rb
+++ b/ee/app/workers/concerns/elastic/migration_options.rb
@@ -15,6 +15,10 @@ def throttle_delay
       self.class.get_throttle_delay
     end
 
+    def pause_indexing?
+      self.class.get_pause_indexing
+    end
+
     class_methods do
       def batched!
         class_attributes[:batched] = true
@@ -24,6 +28,14 @@ def get_batched
         class_attributes[:batched]
       end
 
+      def pause_indexing!
+        class_attributes[:pause_indexing] = true
+      end
+
+      def get_pause_indexing
+        class_attributes[:pause_indexing]
+      end
+
       def throttle_delay(value)
         class_attributes[:throttle_delay] = value
       end
diff --git a/ee/app/workers/concerns/elastic/migration_state.rb b/ee/app/workers/concerns/elastic/migration_state.rb
new file mode 100644
index 0000000000000000000000000000000000000000..0f2ac48a37651344f634919d37e5f0a11c3d1996
--- /dev/null
+++ b/ee/app/workers/concerns/elastic/migration_state.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Elastic
+  module MigrationState
+    def migration_state
+      migration_record.load_state
+    end
+
+    def set_migration_state(state)
+      log "Setting migration_state to #{state.to_json}"
+
+      migration_record.save_state!(state)
+    end
+  end
+end
diff --git a/ee/app/workers/elastic/migration_worker.rb b/ee/app/workers/elastic/migration_worker.rb
index 66d889c557446b5f3ba84c0062b2be4033b72ab1..42c83db14cb1c6800c0b3b8e822cb47c9b57d001 100644
--- a/ee/app/workers/elastic/migration_worker.rb
+++ b/ee/app/workers/elastic/migration_worker.rb
@@ -29,12 +29,21 @@ def perform
           helper.create_migrations_index
         end
 
+        if migration.halted?
+          logger.info "MigrationWorker: migration[#{migration.name}] has been halted. All future migrations will be halted because of that. Exiting"
+          unpause_indexing!(migration)
+
+          break false
+        end
+
         execute_migration(migration)
 
         completed = migration.completed?
         logger.info "MigrationWorker: migration[#{migration.name}] updating with completed: #{completed}"
         migration.save!(completed: completed)
 
+        unpause_indexing!(migration) if completed
+
         Elastic::DataMigrationService.drop_migration_has_finished_cache!(migration)
       end
     end
@@ -45,6 +54,8 @@ def execute_migration(migration)
       if migration.persisted? && !migration.batched?
         logger.info "MigrationWorker: migration[#{migration.name}] did not execute migrate method since it was already executed. Waiting for migration to complete"
       else
+        pause_indexing!(migration)
+
         logger.info "MigrationWorker: migration[#{migration.name}] executing migrate method"
         migration.migrate
 
@@ -61,6 +72,27 @@ def current_migration
       Elastic::DataMigrationService.migrations.find { |migration| !completed_migrations.include?(migration.version) }
     end
 
+    def pause_indexing!(migration)
+      return unless migration.pause_indexing?
+      return if migration.load_state[:pause_indexing].present?
+
+      pause_indexing = !Gitlab::CurrentSettings.elasticsearch_pause_indexing?
+      migration.save_state!(pause_indexing: pause_indexing)
+
+      if pause_indexing
+        logger.info 'MigrationWorker: Pausing indexing'
+        Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: true)
+      end
+    end
+
+    def unpause_indexing!(migration)
+      return unless migration.pause_indexing?
+      return unless migration.load_state[:pause_indexing]
+
+      logger.info 'MigrationWorker: unpausing indexing'
+      Gitlab::CurrentSettings.update!(elasticsearch_pause_indexing: false)
+    end
+
     def helper
       Gitlab::Elastic::Helper.default
     end
diff --git a/ee/app/workers/elastic_delete_project_worker.rb b/ee/app/workers/elastic_delete_project_worker.rb
index 0ecd0b8255626005544926214486587b4fbf8410..f0ac92156c299bbe721feddce589a917d04a23df 100644
--- a/ee/app/workers/elastic_delete_project_worker.rb
+++ b/ee/app/workers/elastic_delete_project_worker.rb
@@ -16,9 +16,19 @@ def perform(project_id, es_id)
 
   private
 
+  def indices
+    helper = Gitlab::Elastic::Helper.default
+
+    if Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
+      [helper.target_name] + helper.standalone_indices_proxies.map(&:index_name)
+    else
+      [helper.target_name]
+    end
+  end
+
   def remove_project_and_children_documents(project_id, es_id)
     client.delete_by_query({
-      index: Project.__elasticsearch__.index_name,
+      index: indices,
       routing: es_id,
       body: {
         query: {
diff --git a/ee/changelogs/unreleased/273264-copy-issues-to-new-index.yml b/ee/changelogs/unreleased/273264-copy-issues-to-new-index.yml
new file mode 100644
index 0000000000000000000000000000000000000000..1feb00f88719609d5a89ab0194b6a3f4f3f334ab
--- /dev/null
+++ b/ee/changelogs/unreleased/273264-copy-issues-to-new-index.yml
@@ -0,0 +1,5 @@
+---
+title: 'Advanced Search: Copy issues to new index'
+merge_request: 48334
+author:
+type: changed
diff --git a/ee/elastic/migrate/20201123123400_migrate_issues_to_separate_index.rb b/ee/elastic/migrate/20201123123400_migrate_issues_to_separate_index.rb
new file mode 100644
index 0000000000000000000000000000000000000000..2f87146079538660cf78dfc234ee53deaffaf6a3
--- /dev/null
+++ b/ee/elastic/migrate/20201123123400_migrate_issues_to_separate_index.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+class MigrateIssuesToSeparateIndex < Elastic::Migration
+  pause_indexing!
+  batched!
+  throttle_delay 1.minute
+
+  MAX_ATTEMPTS = 30
+
+  FIELDS = %w(
+    type
+    id
+    iid
+    title
+    description
+    created_at
+    updated_at
+    state
+    project_id
+    author_id
+    confidential
+    assignee_id
+    visibility_level
+    issues_access_level
+  ).freeze
+
+  def migrate
+    # On initial batch we only create index
+    if migration_state[:slice].blank?
+      log "Create standalone issues index under #{issues_index_name}"
+      helper.create_standalone_indices unless helper.index_exists?(index_name: issues_index_name)
+
+      options = {
+        slice: 0,
+        retry_attempt: 0,
+        max_slices: get_number_of_shards
+      }
+      set_migration_state(options)
+
+      return
+    end
+
+    retry_attempt = migration_state[:retry_attempt].to_i
+    slice = migration_state[:slice]
+    max_slices = migration_state[:max_slices]
+
+    if retry_attempt >= MAX_ATTEMPTS
+      fail_migration_halt_error!(retry_attempt: retry_attempt)
+      return
+    end
+
+    if slice < max_slices
+      log "Launching reindexing for slice:#{slice} | max_slices:#{max_slices}"
+
+      response = reindex(slice: slice, max_slices: max_slices)
+      process_response(response)
+
+      log "Reindexing for slice:#{slice} | max_slices:#{max_slices} is completed with #{response.to_json}"
+
+      set_migration_state(
+        slice: slice + 1,
+        retry_attempt: retry_attempt,
+        max_slices: max_slices
+      )
+    end
+  rescue StandardError => e
+    log "migrate failed, increasing migration_state retry_attempt: #{retry_attempt} error:#{e.message}"
+
+    set_migration_state(
+      slice: slice,
+      retry_attempt: retry_attempt + 1,
+      max_slices: max_slices
+    )
+
+    raise e
+  end
+
+  def completed?
+    log "completed check: Refreshing #{issues_index_name}"
+    helper.refresh_index(index_name: issues_index_name)
+
+    original_count = original_issues_documents_count
+    new_count = new_issues_documents_count
+    log "Checking to see if migration is completed based on index counts: original_count:#{original_count}, new_count:#{new_count}"
+
+    original_count == new_count
+  end
+
+  private
+
+  def reindex(slice:, max_slices:)
+    body = query(slice: slice, max_slices: max_slices)
+
+    client.reindex(body: body, wait_for_completion: true)
+  end
+
+  def process_response(response)
+    if response['failures'].present?
+      log_raise "Reindexing failed with #{response['failures']}"
+    end
+
+    if response['total'] != (response['updated'] + response['created'] + response['deleted'])
+      log_raise "Slice reindexing seems to have failed, total is not equal to updated + created + deleted"
+    end
+  end
+
+  def query(slice:, max_slices:)
+    {
+      source: {
+        index: default_index_name,
+        _source: FIELDS,
+        query: {
+          match: {
+            type: 'issue'
+          }
+        },
+        slice: {
+          id: slice,
+          max: max_slices
+        }
+      },
+      dest: {
+        index: issues_index_name
+      }
+    }
+  end
+
+  def original_issues_documents_count
+    query = {
+      size: 0,
+      aggs: {
+        issues: {
+          filter: {
+            term: {
+              type: {
+                value: 'issue'
+              }
+            }
+          }
+        }
+      }
+    }
+
+    results = client.search(index: default_index_name, body: query)
+    results.dig('aggregations', 'issues', 'doc_count')
+  end
+
+  def new_issues_documents_count
+    helper.documents_count(index_name: issues_index_name)
+  end
+
+  def get_number_of_shards
+    helper.get_settings.dig('number_of_shards').to_i
+  end
+
+  def default_index_name
+    helper.target_name
+  end
+
+  def issues_index_name
+    "#{default_index_name}-issues"
+  end
+end
diff --git a/ee/lib/elastic/class_proxy_util.rb b/ee/lib/elastic/class_proxy_util.rb
index 272baffc99bee3401b5242dceaf4c25086743185..a0e3654480928d1bd0e167de9557567b4d81fd0a 100644
--- a/ee/lib/elastic/class_proxy_util.rb
+++ b/ee/lib/elastic/class_proxy_util.rb
@@ -6,15 +6,24 @@ module Elastic
   module ClassProxyUtil
     extend ActiveSupport::Concern
 
-    def initialize(target)
+    attr_reader :use_separate_indices
+
+    def initialize(target, use_separate_indices: false)
       super(target)
 
-      config = version_namespace.const_get('Config', false)
+      const_name = if use_separate_indices
+                     "#{target.name}Config"
+                   else
+                     'Config'
+                   end
+
+      config = version_namespace.const_get(const_name, false)
 
       @index_name = config.index_name
       @document_type = config.document_type
       @settings = config.settings
       @mapping = config.mapping
+      @use_separate_indices = use_separate_indices
     end
 
     ### Multi-version utils
diff --git a/ee/lib/elastic/instance_proxy_util.rb b/ee/lib/elastic/instance_proxy_util.rb
index e2ec3cea58dd2e006499c633603fd5cb22e50858..3f85185a84dfdb21e823a1dad2a4d22879c73df4 100644
--- a/ee/lib/elastic/instance_proxy_util.rb
+++ b/ee/lib/elastic/instance_proxy_util.rb
@@ -6,10 +6,16 @@ module Elastic
   module InstanceProxyUtil
     extend ActiveSupport::Concern
 
-    def initialize(target)
+    def initialize(target, use_separate_indices: false)
       super(target)
 
-      config = version_namespace.const_get('Config', false)
+      const_name = if use_separate_indices
+                     "#{target.class.name}Config"
+                   else
+                     'Config'
+                   end
+
+      config = version_namespace.const_get(const_name, false)
 
       @index_name = config.index_name
       @document_type = config.document_type
diff --git a/ee/lib/elastic/latest/issue_config.rb b/ee/lib/elastic/latest/issue_config.rb
new file mode 100644
index 0000000000000000000000000000000000000000..e700629e76a53b6bd65857bfbf214a32c9cf66ea
--- /dev/null
+++ b/ee/lib/elastic/latest/issue_config.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Elastic
+  module Latest
+    module IssueConfig
+      # To obtain settings and mappings methods
+      extend Elasticsearch::Model::Indexing::ClassMethods
+      extend Elasticsearch::Model::Naming::ClassMethods
+
+      self.document_type = 'doc'
+      self.index_name = [Rails.application.class.module_parent_name.downcase, Rails.env, 'issues'].join('-')
+
+      settings Elastic::Latest::Config.settings.to_hash
+
+      mappings dynamic: 'strict' do
+        indexes :type, type: :keyword
+
+        indexes :id, type: :integer
+        indexes :iid, type: :integer
+
+        indexes :title, type: :text, index_options: 'positions'
+        indexes :description, type: :text, index_options: 'positions'
+        indexes :created_at, type: :date
+        indexes :updated_at, type: :date
+        indexes :state, type: :keyword
+        indexes :project_id, type: :integer
+        indexes :author_id, type: :integer
+        indexes :confidential, type: :boolean
+        indexes :assignee_id, type: :integer
+
+        indexes :visibility_level, type: :integer
+        indexes :issues_access_level, type: :integer
+      end
+    end
+  end
+end
diff --git a/ee/lib/elastic/latest/issue_instance_proxy.rb b/ee/lib/elastic/latest/issue_instance_proxy.rb
index 218fbeed95c9bd20a55c13047dd6dc8ccf42393b..084cbff4a0398f0cb80934e104b938cee6b84786 100644
--- a/ee/lib/elastic/latest/issue_instance_proxy.rb
+++ b/ee/lib/elastic/latest/issue_instance_proxy.rb
@@ -26,6 +26,16 @@ def as_indexed_json(options = {})
 
         data.merge(generic_attributes)
       end
+
+      private
+
+      def generic_attributes
+        if Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
+          super.except('join_field')
+        else
+          super
+        end
+      end
     end
   end
 end
diff --git a/ee/lib/elastic/migration.rb b/ee/lib/elastic/migration.rb
index 9e26349a4a7cb01de9ca14604f2d3ecf7b3615b2..5fc5d50cbbc885b903935ce3f96f8e600e2a49c1 100644
--- a/ee/lib/elastic/migration.rb
+++ b/ee/lib/elastic/migration.rb
@@ -3,6 +3,7 @@
 module Elastic
   class Migration
     include Elastic::MigrationOptions
+    include Elastic::MigrationState
 
     attr_reader :version
 
@@ -28,10 +29,26 @@ def client
       helper.client
     end
 
+    def migration_record
+      Elastic::DataMigrationService[version]
+    end
+
+    def fail_migration_halt_error!(retry_attempt: 0)
+      set_migration_state(
+        retry_attempt: retry_attempt,
+        halted: true
+      )
+    end
+
     def log(message)
       logger.info "[Elastic::Migration: #{self.version}] #{message}"
     end
 
+    def log_raise(message)
+      logger.error "[Elastic::Migration: #{self.version}] #{message}"
+      raise message
+    end
+
     def logger
       @logger ||= ::Gitlab::Elasticsearch::Logger.build
     end
diff --git a/ee/lib/elastic/multi_version_class_proxy.rb b/ee/lib/elastic/multi_version_class_proxy.rb
index 1325f6b4933cf932014ee1922c315ed4fb96847e..5dcedcaba4f62835be734cee77d0b6f9704653a9 100644
--- a/ee/lib/elastic/multi_version_class_proxy.rb
+++ b/ee/lib/elastic/multi_version_class_proxy.rb
@@ -5,9 +5,10 @@ module Elastic
   class MultiVersionClassProxy
     include MultiVersionUtil
 
-    def initialize(data_target)
+    def initialize(data_target, use_separate_indices: false)
       @data_target = data_target
       @data_class = get_data_class(data_target)
+      @use_separate_indices = use_separate_indices
 
       generate_forwarding
     end
diff --git a/ee/lib/elastic/multi_version_instance_proxy.rb b/ee/lib/elastic/multi_version_instance_proxy.rb
index c477ddebd4b9062f576a895fb8ab1a7acd03ca6a..5758a57760eb605d50d8f92e307f4b27678de85e 100644
--- a/ee/lib/elastic/multi_version_instance_proxy.rb
+++ b/ee/lib/elastic/multi_version_instance_proxy.rb
@@ -5,9 +5,10 @@ module Elastic
   class MultiVersionInstanceProxy
     include MultiVersionUtil
 
-    def initialize(data_target)
+    def initialize(data_target, use_separate_indices: false)
       @data_target = data_target
       @data_class = get_data_class(data_target.class)
+      @use_separate_indices = use_separate_indices
 
       generate_forwarding
     end
diff --git a/ee/lib/elastic/multi_version_util.rb b/ee/lib/elastic/multi_version_util.rb
index 84e4d0509c5924618c86a98eae9040d034a969be..9359599b52e9b0205821c7426bb56cd194db64dc 100644
--- a/ee/lib/elastic/multi_version_util.rb
+++ b/ee/lib/elastic/multi_version_util.rb
@@ -5,7 +5,7 @@ module MultiVersionUtil
     extend ActiveSupport::Concern
     include Gitlab::Utils::StrongMemoize
 
-    attr_reader :data_class, :data_target
+    attr_reader :data_class, :data_target, :use_separate_indices
 
     # TODO: remove once multi-version is functional https://gitlab.com/gitlab-org/gitlab/issues/10156
     TARGET_VERSION = 'V12p1'
@@ -13,7 +13,8 @@ module MultiVersionUtil
     # @params version [String, Module] can be a string "V12p1" or module (Elastic::V12p1)
     def version(version)
       version = Elastic.const_get(version, false) if version.is_a?(String)
-      version.const_get(proxy_class_name, false).new(data_target)
+
+      version.const_get(proxy_class_name, false).new(data_target, use_separate_indices: use_separate_indices)
     end
 
     # TODO: load from db table https://gitlab.com/gitlab-org/gitlab/issues/12555
diff --git a/ee/lib/elastic/v12p1/config.rb b/ee/lib/elastic/v12p1/config.rb
index 30e1577b7a3798a9eeeffe681b40133330b1ef5a..d9f7c0215af6b80b9b89a0134fca71c6e7e4d4ca 100644
--- a/ee/lib/elastic/v12p1/config.rb
+++ b/ee/lib/elastic/v12p1/config.rb
@@ -3,5 +3,6 @@
 module Elastic
   module V12p1
     Config = Elastic::Latest::Config
+    IssueConfig = Elastic::Latest::IssueConfig
   end
 end
diff --git a/ee/lib/gitlab/elastic/helper.rb b/ee/lib/gitlab/elastic/helper.rb
index 15659554964985012c9736264ac3347cef2133c9..9c398baea53d5a432ed357e9119e7c32b2a3c4f9 100644
--- a/ee/lib/gitlab/elastic/helper.rb
+++ b/ee/lib/gitlab/elastic/helper.rb
@@ -3,9 +3,8 @@
 module Gitlab
   module Elastic
     class Helper
-      ES_ENABLED_CLASSES = [
+      ES_MAPPINGS_CLASSES = [
           Project,
-          Issue,
           MergeRequest,
           Snippet,
           Note,
@@ -14,6 +13,10 @@ class Helper
           Repository
       ].freeze
 
+      ES_SEPARATE_CLASSES = [
+        Issue
+      ].freeze
+
       attr_reader :version, :client
       attr_accessor :target_name
 
@@ -40,13 +43,13 @@ def default
       end
 
       def default_settings
-        ES_ENABLED_CLASSES.inject({}) do |settings, klass|
+        ES_MAPPINGS_CLASSES.inject({}) do |settings, klass|
           settings.deep_merge(klass.__elasticsearch__.settings.to_hash)
         end
       end
 
       def default_mappings
-        mappings = ES_ENABLED_CLASSES.inject({}) do |m, klass|
+        mappings = ES_MAPPINGS_CLASSES.inject({}) do |m, klass|
           m.deep_merge(klass.__elasticsearch__.mappings.to_hash)
         end
         mappings.deep_merge(::Elastic::Latest::CustomLanguageAnalyzers.custom_analyzers_mappings)
@@ -64,6 +67,9 @@ def create_migrations_index
               completed: {
                 type: 'boolean'
               },
+              state: {
+                type: 'object'
+              },
               started_at: {
                 type: 'date'
               },
@@ -87,6 +93,54 @@ def create_migrations_index
         migrations_index_name
       end
 
+      def standalone_indices_proxies
+        ES_SEPARATE_CLASSES.map do |class_name|
+          ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
+        end
+      end
+
+      def create_standalone_indices(with_alias: true, options: {})
+        standalone_indices_proxies.each_with_object({}) do |proxy, indices|
+          alias_name = proxy.index_name
+          new_index_name = "#{alias_name}-#{Time.now.strftime("%Y%m%d-%H%M")}"
+
+          raise "Index under '#{new_index_name}' already exists" if index_exists?(index_name: new_index_name)
+
+          if with_alias
+            raise "Alias under '#{alias_name}' already exists" if alias_exists?(name: alias_name)
+          end
+
+          settings = proxy.settings.to_hash
+          settings = settings.merge(options[:settings]) if options[:settings]
+
+          mappings = proxy.mappings.to_hash
+          mappings = mappings.merge(options[:mappings]) if options[:mappings]
+
+          create_index_options = {
+            index: new_index_name,
+            body: {
+              settings: settings,
+              mappings: mappings
+            }
+          }.merge(additional_index_options)
+
+          client.indices.create create_index_options
+
+          client.indices.put_alias(name: alias_name, index: new_index_name) if with_alias
+
+          indices[new_index_name] = alias_name
+        end
+      end
+
+      def delete_standalone_indices
+        standalone_indices_proxies.map do |proxy|
+          index_name = target_index_name(target: proxy.index_name)
+          result = delete_index(index_name: index_name)
+
+          [index_name, proxy.index_name, result]
+        end
+      end
+
       def create_empty_index(with_alias: true, options: {})
         new_index_name = options[:index_name] || "#{target_name}-#{Time.now.strftime("%Y%m%d-%H%M")}"
 
@@ -111,11 +165,13 @@ def create_empty_index(with_alias: true, options: {})
         client.indices.create create_index_options
         client.indices.put_alias(name: target_name, index: new_index_name) if with_alias
 
-        new_index_name
+        {
+          new_index_name => target_name
+        }
       end
 
       def delete_index(index_name: nil)
-        result = client.indices.delete(index: index_name || target_index_name)
+        result = client.indices.delete(index: target_index_name(target: index_name))
         result['acknowledged']
       rescue ::Elasticsearch::Transport::Transport::Errors::NotFound => e
         Gitlab::ErrorTracking.log_exception(e)
@@ -126,29 +182,40 @@ def index_exists?(index_name: nil)
         client.indices.exists?(index: index_name || target_name) # rubocop:disable CodeReuse/ActiveRecord
       end
 
-      def alias_exists?
-        client.indices.exists_alias(name: target_name)
+      def alias_exists?(name: nil)
+        client.indices.exists_alias(name: name || target_name)
       end
 
       # Calls Elasticsearch refresh API to ensure data is searchable
       # immediately.
       # https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html
+      # By default refreshes main and standalone_indices
       def refresh_index(index_name: nil)
-        client.indices.refresh(index: index_name || target_name)
+        indices = if index_name.nil?
+                    [target_name] + standalone_indices_proxies.map(&:index_name)
+                  else
+                    [index_name]
+                  end
+
+        indices.each do |index|
+          client.indices.refresh(index: index)
+        end
       end
 
       def index_size(index_name: nil)
-        client.indices.stats['indices'][index_name || target_index_name]['total']
+        index = target_index_name(target: index_name || target_index_name)
+
+        client.indices.stats.dig('indices', index, 'total')
       end
 
       def documents_count(index_name: nil)
-        index = index_name || target_index_name
+        index = target_index_name(target: index_name || target_index_name)
 
         client.indices.stats.dig('indices', index, 'primaries', 'docs', 'count')
       end
 
-      def index_size_bytes
-        index_size['store']['size_in_bytes']
+      def index_size_bytes(index_name: nil)
+        index_size(index_name: index_name)['store']['size_in_bytes']
       end
 
       def cluster_free_size_bytes
@@ -184,13 +251,13 @@ def update_settings(index_name: nil, settings:)
         client.indices.put_settings(index: index_name || target_index_name, body: settings)
       end
 
-      def switch_alias(from: target_index_name, to:)
+      def switch_alias(from: target_index_name, alias_name: target_name, to:)
         actions = [
           {
-            remove: { index: from, alias: target_name }
+            remove: { index: from, alias: alias_name }
           },
           {
-            add: { index: to, alias: target_name }
+            add: { index: to, alias: alias_name }
           }
         ]
 
@@ -199,11 +266,13 @@ def switch_alias(from: target_index_name, to:)
       end
 
       # This method is used when we need to get an actual index name (if it's used through an alias)
-      def target_index_name
-        if alias_exists?
-          client.indices.get_alias(name: target_name).each_key.first
+      def target_index_name(target: nil)
+        target ||= target_name
+
+        if alias_exists?(name: target)
+          client.indices.get_alias(name: target).each_key.first
         else
-          target_name
+          target
         end
       end
 
diff --git a/ee/lib/tasks/gitlab/elastic.rake b/ee/lib/tasks/gitlab/elastic.rake
index f91a59fb990a720bf69eaa8d42b021f847787227..40fd0fd41470584dd6e2674fdc49117ec66901f4 100644
--- a/ee/lib/tasks/gitlab/elastic.rake
+++ b/ee/lib/tasks/gitlab/elastic.rake
@@ -1,6 +1,6 @@
 namespace :gitlab do
   namespace :elastic do
-    desc "GitLab | Elasticsearch | Index eveything at once"
+    desc "GitLab | Elasticsearch | Index everything at once"
     task :index do
       # UPDATE_INDEX=true can cause some projects not to be indexed properly if someone were to push a commit to the
       # project before the rake task could get to it, so we set it to `nil` here to avoid that. It doesn't make sense
@@ -57,7 +57,7 @@ namespace :gitlab do
       logger.info("Indexing snippets... " + "done".color(:green))
     end
 
-    desc "GitLab | Elasticsearch | Create empty index and assign alias"
+    desc "GitLab | Elasticsearch | Create empty indexes and assigns an alias for each"
     task :create_empty_index, [:target_name] => [:environment] do |t, args|
       with_alias = ENV["SKIP_ALIAS"].nil?
       options = {}
@@ -68,14 +68,25 @@ namespace :gitlab do
       helper = Gitlab::Elastic::Helper.new(target_name: args[:target_name])
       index_name = helper.create_empty_index(with_alias: with_alias, options: options)
 
-      helper.create_migrations_index unless helper.index_exists?(index_name: helper.migrations_index_name)
-      ::Elastic::DataMigrationService.mark_all_as_completed!
+      # with_alias is used to support interacting with a specific index (such as when reclaiming the production index
+      # name when the index was created prior to 13.0). If the `SKIP_ALIAS` environment variable is set,
+      # do not create standalone indexes and do not create the migrations index
+      if with_alias
+        standalone_index_names = helper.create_standalone_indices(options: options)
+        standalone_index_names.each do |index_name, alias_name|
+          puts "Index '#{index_name}' has been created.".color(:green)
+          puts "Alias '#{alias_name}' -> '#{index_name}' has been created.".color(:green)
+        end
+
+        helper.create_migrations_index unless helper.index_exists?(index_name: helper.migrations_index_name)
+        ::Elastic::DataMigrationService.mark_all_as_completed!
+      end
 
       puts "Index '#{index_name}' has been created.".color(:green)
       puts "Alias '#{helper.target_name}' → '#{index_name}' has been created".color(:green) if with_alias
     end
 
-    desc "GitLab | Elasticsearch | Delete index"
+    desc "GitLab | Elasticsearch | Delete all indexes"
     task :delete_index, [:target_name] => [:environment] do |t, args|
       helper = Gitlab::Elastic::Helper.new(target_name: args[:target_name])
 
@@ -84,9 +95,18 @@ namespace :gitlab do
       else
         puts "Index/alias '#{helper.target_name}' was not found".color(:green)
       end
+
+      results = helper.delete_standalone_indices
+      results.each do |index_name, alias_name, result|
+        if result
+          puts "Index '#{index_name}' with alias '#{alias_name}' has been deleted".color(:green)
+        else
+          puts "Index '#{index_name}' with alias '#{alias_name}' was not found".color(:green)
+        end
+      end
     end
 
-    desc "GitLab | Elasticsearch | Recreate index"
+    desc "GitLab | Elasticsearch | Recreate indexes"
     task :recreate_index, [:target_name] => [:environment] do |t, args|
       Rake::Task["gitlab:elastic:delete_index"].invoke(*args)
       Rake::Task["gitlab:elastic:create_empty_index"].invoke(*args)
diff --git a/ee/lib/tasks/gitlab/elastic/test.rake b/ee/lib/tasks/gitlab/elastic/test.rake
index edf3bc34325143ea2abb236634be67e2e081c44a..26a2e73fba82e3e1708f28676296436931628c30 100644
--- a/ee/lib/tasks/gitlab/elastic/test.rake
+++ b/ee/lib/tasks/gitlab/elastic/test.rake
@@ -3,8 +3,14 @@ namespace :gitlab do
     namespace :test do
       desc 'GitLab | Elasticsearch | Test | Measure space taken by ES indices'
       task index_size: :environment do
-        puts "===== Size stats for index: #{Project.__elasticsearch__.index_name} ====="
-        pp Gitlab::Elastic::Helper.default.index_size.slice(*%w(docs store))
+        helper = Gitlab::Elastic::Helper.default
+
+        indices = [helper.target_name]
+        indices += helper.standalone_indices_proxies.map(&:index_name) if Elastic::DataMigrationService.migration_has_finished?(:migrate_issues_to_separate_index)
+        indices.each do |index_name|
+          puts "===== Size stats for index: #{index_name} ====="
+          pp helper.index_size(index_name: index_name).slice(*%w(docs store))
+        end
       end
 
       desc 'GitLab | Elasticsearch | Test | Measure space taken by ES indices, reindex, and measure space taken again'
diff --git a/ee/spec/controllers/admin/application_settings_controller_spec.rb b/ee/spec/controllers/admin/application_settings_controller_spec.rb
index 1fe5476d324fe911173e12450544b22ead05d330..c3a7e359966d44fddb5adfe4d2ebd35a837c88c7 100644
--- a/ee/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/ee/spec/controllers/admin/application_settings_controller_spec.rb
@@ -25,8 +25,7 @@
         get :general
 
         expect(assigns(:elasticsearch_reindexing_task)).to eq(task)
-        expect(response.body).to include('Reindexing status')
-        expect(response.body).to include("State: #{task.state}")
+        expect(response.body).to include("Reindexing Status: #{task.state}")
       end
     end
   end
diff --git a/ee/spec/elastic/migrate/20201116142400_add_new_data_to_issues_documents_spec.rb b/ee/spec/elastic/migrate/20201116142400_add_new_data_to_issues_documents_spec.rb
index 86d05f04fee035e6d664f9cd48a8a34d9af45bf2..2057d372c47a871808a1e4339b0d7155d77e5d00 100644
--- a/ee/spec/elastic/migrate/20201116142400_add_new_data_to_issues_documents_spec.rb
+++ b/ee/spec/elastic/migrate/20201116142400_add_new_data_to_issues_documents_spec.rb
@@ -11,6 +11,10 @@
   before do
     stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
 
+    allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
+      .with(:migrate_issues_to_separate_index)
+      .and_return(false)
+
     # ensure issues are indexed
     issues
 
diff --git a/ee/spec/elastic/migrate/20201123123400_migrate_issues_to_separate_index_spec.rb b/ee/spec/elastic/migrate/20201123123400_migrate_issues_to_separate_index_spec.rb
new file mode 100644
index 0000000000000000000000000000000000000000..404d1a0cfa78847ba570ac8352edb760805f0740
--- /dev/null
+++ b/ee/spec/elastic/migrate/20201123123400_migrate_issues_to_separate_index_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require File.expand_path('ee/elastic/migrate/20201123123400_migrate_issues_to_separate_index.rb')
+
+RSpec.describe MigrateIssuesToSeparateIndex, :elastic, :sidekiq_inline do
+  let(:version) { 20201123123400 }
+  let(:migration) { described_class.new(version) }
+  let(:issues) { create_list(:issue, 3) }
+  let(:index_name) { "#{es_helper.target_name}-issues" }
+
+  before do
+    allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
+      .with(:migrate_issues_to_separate_index)
+      .and_return(false)
+
+    stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
+
+    issues
+
+    ensure_elasticsearch_index!
+  end
+
+  describe 'migration_options' do
+    it 'has migration options set', :aggregate_failures do
+      expect(migration.batched?).to be_truthy
+      expect(migration.throttle_delay).to eq(1.minute)
+      expect(migration.pause_indexing?).to be_truthy
+    end
+  end
+
+  describe '.migrate', :clean_gitlab_redis_shared_state do
+    context 'initial launch' do
+      before do
+        es_helper.delete_index(index_name: es_helper.target_index_name(target: index_name))
+      end
+
+      it 'creates an index and sets migration_state' do
+        expect { migration.migrate }.to change { es_helper.alias_exists?(name: index_name) }.from(false).to(true)
+
+        expect(migration.migration_state).to include(slice: 0, max_slices: 5)
+      end
+    end
+
+    context 'batch run' do
+      it 'migrates all issues' do
+        total_shards = es_helper.get_settings.dig('number_of_shards').to_i
+        migration.set_migration_state(slice: 0, max_slices: total_shards)
+
+        total_shards.times do |i|
+          migration.migrate
+        end
+
+        expect(migration.completed?).to be_truthy
+        expect(es_helper.documents_count(index_name: "#{es_helper.target_name}-issues")).to eq(issues.count)
+      end
+    end
+
+    context 'failed run' do
+      let(:client) { double('Elasticsearch::Transport::Client') }
+
+      before do
+        allow(migration).to receive(:client).and_return(client)
+      end
+
+      context 'exception is raised' do
+        before do
+          allow(client).to receive(:reindex).and_raise(StandardError)
+        end
+
+        it 'increases retry_attempt' do
+          migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 1)
+
+          expect { migration.migrate }.to raise_error(StandardError)
+          expect(migration.migration_state).to match(slice: 0, max_slices: 2, retry_attempt: 2)
+        end
+
+        it 'fails the migration after too many attempts' do
+          migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 30)
+
+          migration.migrate
+
+          expect(migration.migration_state).to match(slice: 0, max_slices: 2, retry_attempt: 30, halted: true)
+          expect(migration).not_to receive(:process_response)
+        end
+      end
+
+      context 'elasticsearch failures' do
+        context 'total is not equal' do
+          before do
+            allow(client).to receive(:reindex).and_return({ "total" => 60, "updated" => 0, "created" => 45, "deleted" => 0, "failures" => [] })
+          end
+
+          it 'raises an error' do
+            migration.set_migration_state(slice: 0, max_slices: 2)
+
+            expect { migration.migrate }.to raise_error(/total is not equal/)
+          end
+        end
+
+        context 'reindexing failues' do
+          before do
+            allow(client).to receive(:reindex).and_return({ "total" => 60, "updated" => 0, "created" => 0, "deleted" => 0, "failures" => [{ "type": "es_rejected_execution_exception" }] })
+          end
+
+          it 'raises an error' do
+            migration.set_migration_state(slice: 0, max_slices: 2)
+
+            expect { migration.migrate }.to raise_error(/failed with/)
+          end
+        end
+      end
+    end
+  end
+
+  describe '.completed?' do
+    subject { migration.completed? }
+
+    before do
+      2.times do |slice|
+        migration.set_migration_state(slice: slice, max_slices: 2)
+        migration.migrate
+      end
+    end
+
+    context 'counts are equal' do
+      let(:issues_count) { issues.count }
+
+      it 'returns true' do
+        is_expected.to be_truthy
+      end
+    end
+  end
+end
diff --git a/ee/spec/factories/elastic/reindexing_subtasks.rb b/ee/spec/factories/elastic/reindexing_subtasks.rb
new file mode 100644
index 0000000000000000000000000000000000000000..1057a51fc2c5a483047b1149782619cd60b2255f
--- /dev/null
+++ b/ee/spec/factories/elastic/reindexing_subtasks.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+  factory :elastic_reindexing_subtask, class: 'Elastic::ReindexingSubtask' do
+    association :elastic_reindexing_task, in_progress: false, state: :success
+    sequence(:index_name_from) { |n| "old_index_name_#{n}" }
+    sequence(:index_name_to) { |n| "new_index_name_#{n}" }
+    sequence(:elastic_task) { |n| "elastic_task_#{n}" }
+    sequence(:alias_name) { |n| "alias_name_#{n}" }
+  end
+end
diff --git a/ee/spec/factories/elastic/reindexing_tasks.rb b/ee/spec/factories/elastic/reindexing_tasks.rb
index f82920cb5777101bd4a8ae30a95b588d7745cb9e..79fdfcb854ff818399165e951e9889e2370a1436 100644
--- a/ee/spec/factories/elastic/reindexing_tasks.rb
+++ b/ee/spec/factories/elastic/reindexing_tasks.rb
@@ -6,5 +6,11 @@
     in_progress { true }
     index_name_from { 'old_index_name' }
     index_name_to { 'new_index_name' }
+
+    trait :with_subtask do
+      after(:create) do |task|
+        create :elastic_reindexing_subtask, elastic_reindexing_task: task
+      end
+    end
   end
 end
diff --git a/ee/spec/lib/ee/gitlab/elastic/helper_spec.rb b/ee/spec/lib/ee/gitlab/elastic/helper_spec.rb
index 16d3797ae30c0da3d73ebdc8148e7df0b1339bd4..b2b576fb21b6fc6e099d5a2541792a6ad85b1d01 100644
--- a/ee/spec/lib/ee/gitlab/elastic/helper_spec.rb
+++ b/ee/spec/lib/ee/gitlab/elastic/helper_spec.rb
@@ -7,13 +7,13 @@
 
   shared_context 'with a legacy index' do
     before do
-      @index_name = helper.create_empty_index(with_alias: false, options: { index_name: helper.target_name })
+      @index_name = helper.create_empty_index(with_alias: false, options: { index_name: helper.target_name }).each_key.first
     end
   end
 
   shared_context 'with an existing index and alias' do
     before do
-      @index_name = helper.create_empty_index(with_alias: true)
+      @index_name = helper.create_empty_index(with_alias: true).each_key.first
     end
   end
 
@@ -40,6 +40,10 @@
   end
 
   describe '#default_mappings' do
+    it 'has only one type' do
+      expect(helper.default_mappings.keys).to match_array %i(doc)
+    end
+
     context 'custom analyzers' do
       let(:custom_analyzers_mappings) { { doc: { properties: { title: { fields: { custom: true } } } } } }
 
@@ -65,6 +69,44 @@
     end
   end
 
+  describe '#create_standalone_indices' do
+    after do
+      @indices.each do |index_name, _|
+        helper.delete_index(index_name: index_name)
+      end
+    end
+
+    it 'creates standalone indices' do
+      @indices = helper.create_standalone_indices
+
+      @indices.each do |index|
+        expect(helper.index_exists?(index_name: index)).to be_truthy
+      end
+    end
+
+    it 'raises an exception when there is an existing alias' do
+      @indices = helper.create_standalone_indices
+
+      expect { helper.create_standalone_indices }.to raise_error(/already exists/)
+    end
+
+    it 'raises an exception when there is an existing index' do
+      @indices = helper.create_standalone_indices(with_alias: false)
+
+      expect { helper.create_standalone_indices(with_alias: false) }.to raise_error(/already exists/)
+    end
+  end
+
+  describe '#delete_standalone_indices' do
+    before do
+      helper.create_standalone_indices
+    end
+
+    subject { helper.delete_standalone_indices }
+
+    it_behaves_like 'deletes all standalone indices'
+  end
+
   describe '#create_empty_index' do
     context 'with an empty cluster' do
       context 'with alias and index' do
@@ -174,7 +216,7 @@
     end
   end
 
-  describe '#cluster_free_size' do
+  describe '#cluster_free_size_bytes' do
     it 'returns valid cluster size' do
       expect(helper.cluster_free_size_bytes).to be_positive
     end
@@ -194,4 +236,51 @@
       helper.delete_index(index_name: new_index_name)
     end
   end
+
+  describe '#index_size' do
+    subject { helper.index_size }
+
+    context 'when there is a legacy index' do
+      include_context 'with a legacy index'
+
+      it { is_expected.to have_key("docs") }
+      it { is_expected.to have_key("store") }
+    end
+
+    context 'when there is an alias', :aggregate_failures do
+      include_context 'with an existing index and alias'
+
+      it { is_expected.to have_key("docs") }
+      it { is_expected.to have_key("store") }
+
+      it 'supports providing the alias name' do
+        alias_name = helper.target_name
+
+        expect(helper.index_size(index_name: alias_name)).to have_key("docs")
+        expect(helper.index_size(index_name: alias_name)).to have_key("store")
+      end
+    end
+  end
+
+  describe '#documents_count' do
+    subject { helper.documents_count }
+
+    context 'when there is a legacy index' do
+      include_context 'with a legacy index'
+
+      it { is_expected.to eq(0) }
+    end
+
+    context 'when there is an alias' do
+      include_context 'with an existing index and alias'
+
+      it { is_expected.to eq(0) }
+
+      it 'supports providing the alias name' do
+        alias_name = helper.target_name
+
+        expect(helper.documents_count(index_name: alias_name)).to eq(0)
+      end
+    end
+  end
 end
diff --git a/ee/spec/lib/elastic/latest/issue_config_spec.rb b/ee/spec/lib/elastic/latest/issue_config_spec.rb
new file mode 100644
index 0000000000000000000000000000000000000000..32733305e9a59197931601b04770f59314997df6
--- /dev/null
+++ b/ee/spec/lib/elastic/latest/issue_config_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Elastic::Latest::IssueConfig do
+  describe '.document_type' do
+    it 'returns config' do
+      expect(described_class.document_type).to eq('doc')
+    end
+  end
+
+  describe '.settings' do
+    it 'returns config' do
+      expect(described_class.settings).to be_a(Elasticsearch::Model::Indexing::Settings)
+    end
+  end
+
+  describe '.mappings' do
+    it 'returns config' do
+      expect(described_class.mapping).to be_a(Elasticsearch::Model::Indexing::Mappings)
+    end
+  end
+end
diff --git a/ee/spec/lib/gitlab/instrumentation/elasticsearch_transport_spec.rb b/ee/spec/lib/gitlab/instrumentation/elasticsearch_transport_spec.rb
index 6403d30cf11dd93351f4e0fde2cba09a3a2b0d86..aa2ebe6cd8e9a7cc202f01f5bcd7619d1722087f 100644
--- a/ee/spec/lib/gitlab/instrumentation/elasticsearch_transport_spec.rb
+++ b/ee/spec/lib/gitlab/instrumentation/elasticsearch_transport_spec.rb
@@ -35,7 +35,7 @@
 
       ::Gitlab::SafeRequestStore.clear!
 
-      create(:issue, title: "new issue")
+      create(:merge_request, title: "new MR")
       ensure_elasticsearch_index!
 
       request = ::Gitlab::Instrumentation::ElasticsearchTransport.detail_store.first
diff --git a/ee/spec/migrations/create_elastic_reindexing_subtasks_spec.rb b/ee/spec/migrations/create_elastic_reindexing_subtasks_spec.rb
new file mode 100644
index 0000000000000000000000000000000000000000..b819d09e3f55753c78ed63b67499ca32180bacbc
--- /dev/null
+++ b/ee/spec/migrations/create_elastic_reindexing_subtasks_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CreateElasticReindexingSubtasks do
+  let(:migration) { described_class.new }
+  let(:reindexing_tasks) { table(:elastic_reindexing_tasks) }
+  let(:reindexing_subtasks) { table(:elastic_reindexing_subtasks) }
+  let(:fields_to_migrate) { %w(documents_count documents_count_target index_name_from index_name_to elastic_task) }
+
+  describe "#up" do
+    it 'migrates old reindexing tasks' do
+      # these tasks should not be migrated
+      reindexing_tasks.create!(in_progress: false, state: 10)
+      reindexing_tasks.create!(in_progress: false, state: 10, index_name_from: 'index_name')
+      reindexing_tasks.create!(in_progress: false, state: 10, index_name_to: 'index_name')
+      reindexing_tasks.create!(in_progress: false, state: 10, elastic_task: 'TASK')
+      # these tasks should not be migrated
+
+      task1 = reindexing_tasks.create!(in_progress: false, documents_count: 100, state: 10, index_name_from: 'index1', index_name_to: 'index2', elastic_task: 'TASK_ID', documents_count_target: 100)
+      task2 = reindexing_tasks.create!(in_progress: false, documents_count: 50, state: 11, index_name_from: 'index3', index_name_to: 'index4', elastic_task: 'TASK_ID2', documents_count_target: 99)
+
+      migrate!
+
+      expect(reindexing_subtasks.count).to eq(2)
+
+      [task1, task2].each do |task|
+        subtask = reindexing_subtasks.find_by(elastic_reindexing_task_id: task.id)
+
+        expect(task.attributes.slice(*fields_to_migrate)).to match(subtask.attributes.slice(*fields_to_migrate))
+      end
+    end
+  end
+end
diff --git a/ee/spec/models/concerns/elastic/issue_spec.rb b/ee/spec/models/concerns/elastic/issue_spec.rb
index 61866f47a8cd982179fd60cdf1b92d6f5f147c3a..fc38ae2ba6a334eff5e70a975206ec6f981ae494 100644
--- a/ee/spec/models/concerns/elastic/issue_spec.rb
+++ b/ee/spec/models/concerns/elastic/issue_spec.rb
@@ -122,11 +122,7 @@
       'confidential'
     ).merge({
       'type' => issue.es_type,
-      'state' => issue.state,
-      'join_field' => {
-        'name' => issue.es_type,
-        'parent' => issue.es_parent
-      }
+      'state' => issue.state
     })
 
     expected_hash['assignee_id'] = [assignee.id]
diff --git a/ee/spec/models/elastic/reindexing_subtask_spec.rb b/ee/spec/models/elastic/reindexing_subtask_spec.rb
new file mode 100644
index 0000000000000000000000000000000000000000..02519d55fc1c9483113e6065e6658b45f0c89a54
--- /dev/null
+++ b/ee/spec/models/elastic/reindexing_subtask_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Elastic::ReindexingSubtask, type: :model do
+  describe 'relations' do
+    it { is_expected.to belong_to(:elastic_reindexing_task) }
+  end
+
+  describe 'validations' do
+    it { is_expected.to validate_presence_of(:index_name_from) }
+    it { is_expected.to validate_presence_of(:index_name_to) }
+    it { is_expected.to validate_presence_of(:elastic_task) }
+  end
+end
diff --git a/ee/spec/models/elastic/reindexing_task_spec.rb b/ee/spec/models/elastic/reindexing_task_spec.rb
index b096cebf011d352212a1401f97155ce6391fba36..0f692935ba77549e52017113fb8db1a41e1c33f3 100644
--- a/ee/spec/models/elastic/reindexing_task_spec.rb
+++ b/ee/spec/models/elastic/reindexing_task_spec.rb
@@ -3,6 +3,10 @@
 require 'spec_helper'
 
 RSpec.describe Elastic::ReindexingTask, type: :model do
+  describe 'relations' do
+    it { is_expected.to have_many(:subtasks) }
+  end
+
   it 'only allows one running task at a time' do
     expect { create(:elastic_reindexing_task, state: :success) }.not_to raise_error
     expect { create(:elastic_reindexing_task) }.not_to raise_error
@@ -18,21 +22,21 @@
   end
 
   describe '.drop_old_indices!' do
-    let(:task_1) { create(:elastic_reindexing_task, index_name_from: 'original_index_1', state: :reindexing, delete_original_index_at: 1.day.ago) }
-    let(:task_2) { create(:elastic_reindexing_task, index_name_from: 'original_index_2', state: :success, delete_original_index_at: nil) }
-    let(:task_3) { create(:elastic_reindexing_task, index_name_from: 'original_index_3', state: :success, delete_original_index_at: 1.day.ago) }
-    let(:task_4) { create(:elastic_reindexing_task, index_name_from: 'original_index_4', state: :success, delete_original_index_at: 5.days.ago) }
-    let(:task_5) { create(:elastic_reindexing_task, index_name_from: 'original_index_5', state: :success, delete_original_index_at: 14.days.from_now) }
+    let(:task_1) { create(:elastic_reindexing_task, :with_subtask, state: :reindexing, delete_original_index_at: 1.day.ago) }
+    let(:task_2) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: nil) }
+    let(:task_3) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: 1.day.ago) }
+    let(:task_4) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: 5.days.ago) }
+    let(:task_5) { create(:elastic_reindexing_task, :with_subtask, state: :success, delete_original_index_at: 14.days.from_now) }
     let(:tasks_for_deletion) { [task_3, task_4] }
     let(:other_tasks) { [task_1, task_2, task_5] }
 
     it 'deletes the correct indices' do
       other_tasks.each do |task|
-        expect(Gitlab::Elastic::Helper.default).not_to receive(:delete_index).with(index_name: task.index_name_from)
+        expect(Gitlab::Elastic::Helper.default).not_to receive(:delete_index).with(index_name: task.subtasks.first.index_name_from)
       end
 
       tasks_for_deletion.each do |task|
-        expect(Gitlab::Elastic::Helper.default).to receive(:delete_index).with(index_name: task.index_name_from).and_return(true)
+        expect(Gitlab::Elastic::Helper.default).to receive(:delete_index).with(index_name: task.subtasks.first.index_name_from).and_return(true)
       end
 
       described_class.drop_old_indices!
diff --git a/ee/spec/services/elastic/cluster_reindexing_service_spec.rb b/ee/spec/services/elastic/cluster_reindexing_service_spec.rb
index 55008b7b1bf4a4b110ee6a125374c65f1aaf7af6..40fedfce81bda7dc102328e7e41dcb0d7c118f54 100644
--- a/ee/spec/services/elastic/cluster_reindexing_service_spec.rb
+++ b/ee/spec/services/elastic/cluster_reindexing_service_spec.rb
@@ -8,6 +8,20 @@
   context 'state: initial' do
     let(:task) { create(:elastic_reindexing_task, state: :initial) }
 
+    it 'aborts if the main index does not use aliases' do
+      allow(Gitlab::Elastic::Helper.default).to receive(:alias_exists?).and_return(false)
+
+      expect { subject.execute }.to change { task.reload.state }.from('initial').to('failure')
+      expect(task.reload.error_message).to match(/use aliases/)
+    end
+
+    it 'aborts if there are pending ES migrations' do
+      allow(Elastic::DataMigrationService).to receive(:pending_migrations?).and_return(true)
+
+      expect { subject.execute }.to change { task.reload.state }.from('initial').to('failure')
+      expect(task.reload.error_message).to match(/unapplied advanced search migrations/)
+    end
+
     it 'errors when there is not enough space' do
       allow(Gitlab::Elastic::Helper.default).to receive(:index_size_bytes).and_return(100.megabytes)
       allow(Gitlab::Elastic::Helper.default).to receive(:cluster_free_size_bytes).and_return(30.megabytes)
@@ -29,19 +43,26 @@
     it 'triggers reindexing' do
       task = create(:elastic_reindexing_task, state: :indexing_paused)
 
-      allow(Gitlab::Elastic::Helper.default).to receive(:create_empty_index).and_return('new_index_name')
-      allow(Gitlab::Elastic::Helper.default).to receive(:reindex).and_return('task_id')
+      allow(Gitlab::Elastic::Helper.default).to receive(:create_empty_index).and_return('new_index_name' => 'new_index')
+      allow(Gitlab::Elastic::Helper.default).to receive(:create_standalone_indices).and_return('new_issues_name' => 'new_issues')
+      allow(Gitlab::Elastic::Helper.default).to receive(:reindex).with(from: anything, to: 'new_index_name').and_return('task_id_1')
+      allow(Gitlab::Elastic::Helper.default).to receive(:reindex).with(from: anything, to: 'new_issues_name').and_return('task_id_2')
 
       expect { subject.execute }.to change { task.reload.state }.from('indexing_paused').to('reindexing')
 
-      task = task.reload
-      expect(task.index_name_to).to eq('new_index_name')
-      expect(task.elastic_task).to eq('task_id')
+      subtasks = task.subtasks
+      expect(subtasks.count).to eq(2)
+
+      expect(subtasks.first.index_name_to).to eq('new_index_name')
+      expect(subtasks.first.elastic_task).to eq('task_id_1')
+      expect(subtasks.last.index_name_to).to eq('new_issues_name')
+      expect(subtasks.last.elastic_task).to eq('task_id_2')
     end
   end
 
   context 'state: reindexing' do
-    let(:task) { create(:elastic_reindexing_task, state: :reindexing, documents_count: 10) }
+    let(:task) { create(:elastic_reindexing_task, state: :reindexing) }
+    let(:subtask) { create(:elastic_reindexing_subtask, elastic_reindexing_task: task, documents_count: 10)}
     let(:expected_default_settings) do
       {
         refresh_interval: nil,
@@ -57,7 +78,7 @@
 
     context 'errors are raised' do
       before do
-        allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: task.index_name_to).and_return(task.reload.documents_count * 2)
+        allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: subtask.index_name_to).and_return(subtask.reload.documents_count * 2)
       end
 
       it 'errors if documents count is different' do
@@ -82,12 +103,12 @@
 
     context 'task finishes correctly' do
       before do
-        allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: task.index_name_to).and_return(task.reload.documents_count)
+        allow(Gitlab::Elastic::Helper.default).to receive(:documents_count).with(index_name: subtask.index_name_to).and_return(subtask.reload.documents_count)
       end
 
       it 'launches all state steps' do
-        expect(Gitlab::Elastic::Helper.default).to receive(:update_settings).with(index_name: task.index_name_to, settings: expected_default_settings)
-        expect(Gitlab::Elastic::Helper.default).to receive(:switch_alias).with(to: task.index_name_to)
+        expect(Gitlab::Elastic::Helper.default).to receive(:update_settings).with(index_name: subtask.index_name_to, settings: expected_default_settings)
+        expect(Gitlab::Elastic::Helper.default).to receive(:switch_alias).with(to: subtask.index_name_to, from: subtask.index_name_from, alias_name: subtask.alias_name)
         expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false)
 
         expect { subject.execute }.to change { task.reload.state }.from('reindexing').to('success')
diff --git a/ee/spec/services/search/global_service_spec.rb b/ee/spec/services/search/global_service_spec.rb
index af1d265e896be98abfd8992e38c74d648c775b24..41660260fd93fc6fc3016e0f4831792557f78c42 100644
--- a/ee/spec/services/search/global_service_spec.rb
+++ b/ee/spec/services/search/global_service_spec.rb
@@ -112,14 +112,21 @@
       # finished we need a test to verify the old style searches work for
       # instances which haven't finished the migration yet
       context 'when add_new_data_to_issues_documents migration is not finished' do
-        let!(:issue) { create :issue, project: project }
-
         before do
+          allow(Elastic::DataMigrationService).to receive(:migration_has_finished?).and_call_original
           allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
             .with(:add_new_data_to_issues_documents)
             .and_return(false)
+          allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
+            .with(:migrate_issues_to_separate_index)
+            .and_return(false)
         end
 
+        # issue cannot be defined prior to the migration mocks because it
+        # will cause the incorrect value to be passed to `use_separate_indices` when creating
+        # the proxy
+        let!(:issue) { create(:issue, project: project) }
+
         where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do
           permission_table_for_guest_feature_access
         end
@@ -158,6 +165,11 @@
         let(:search_url) { Addressable::Template.new("#{es_host}/{index}/doc/_search{?params*}") }
 
         before do
+          allow(Elastic::DataMigrationService).to receive(:migration_has_finished?).and_call_original
+          allow(Elastic::DataMigrationService).to receive(:migration_has_finished?)
+            .with(:migrate_issues_to_separate_index)
+            .and_return(false)
+
           ensure_elasticsearch_index!
         end
 
diff --git a/ee/spec/support/elastic.rb b/ee/spec/support/elastic.rb
index 9b8a1be5a4de8157ad93e6ddd9b545918ca347ba..a5d540623f8da3fa0263904247bf4c0e34456875 100644
--- a/ee/spec/support/elastic.rb
+++ b/ee/spec/support/elastic.rb
@@ -6,20 +6,23 @@
 
     Elastic::ProcessBookkeepingService.clear_tracking!
 
-    # Delete the migration index and the main ES index
-    helper.delete_index(index_name: helper.migrations_index_name)
-    helper.delete_index
+    # Delete all test indices
+    indices = [helper.target_name, helper.migrations_index_name] + helper.standalone_indices_proxies.map(&:index_name)
+    indices.each do |index_name|
+      helper.delete_index(index_name: index_name)
+    end
 
     helper.create_empty_index(options: { settings: { number_of_replicas: 0 } })
     helper.create_migrations_index
     ::Elastic::DataMigrationService.mark_all_as_completed!
+    helper.create_standalone_indices
     refresh_index!
 
     example.run
 
-    helper.delete_index(index_name: helper.migrations_index_name)
-    helper.delete_index
-
+    indices.each do |index_name|
+      helper.delete_index(index_name: index_name)
+    end
     Elastic::ProcessBookkeepingService.clear_tracking!
   end
 
diff --git a/ee/spec/support/shared_examples/lib/gitlab/elastic/standalone_indices_shared_examples.rb b/ee/spec/support/shared_examples/lib/gitlab/elastic/standalone_indices_shared_examples.rb
new file mode 100644
index 0000000000000000000000000000000000000000..1bf1acc09869c3f9a134ce768addc0fa1d78fb87
--- /dev/null
+++ b/ee/spec/support/shared_examples/lib/gitlab/elastic/standalone_indices_shared_examples.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'deletes all standalone indices' do
+  Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.each do |class_name|
+    describe "#{class_name}" do
+      it 'removes a standalone index' do
+        proxy = ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
+
+        expect { subject }.to change { helper.index_exists?(index_name: proxy.index_name) }.from(true).to(false)
+      end
+    end
+  end
+end
diff --git a/ee/spec/tasks/gitlab/elastic_rake_spec.rb b/ee/spec/tasks/gitlab/elastic_rake_spec.rb
index 15d94308efb3bc8243f57d138ea507a0b5b34ee0..5c15d58da3f22f84e46e6a4a3b09ffdf045d97f1 100644
--- a/ee/spec/tasks/gitlab/elastic_rake_spec.rb
+++ b/ee/spec/tasks/gitlab/elastic_rake_spec.rb
@@ -13,12 +13,68 @@
     before do
       es_helper.delete_index
       es_helper.delete_index(index_name: es_helper.migrations_index_name)
+      es_helper.delete_standalone_indices
     end
 
-    it 'creates an index' do
+    it 'creates the default index' do
       expect { subject }.to change { es_helper.index_exists? }.from(false).to(true)
     end
 
+    context 'when SKIP_ALIAS environment variable is set' do
+      let(:secondary_index_name) { "gitlab-test-#{Time.now.strftime("%Y%m%d-%H%M")}"}
+
+      before do
+        stub_env('SKIP_ALIAS', '1')
+      end
+
+      after do
+        es_helper.delete_index(index_name: secondary_index_name)
+      end
+
+      subject { run_rake_task('gitlab:elastic:create_empty_index', secondary_index_name) }
+
+      it 'does not alias the new index' do
+        expect { subject }.not_to change { es_helper.alias_exists?(name: es_helper.target_name) }
+      end
+
+      it 'does not create the migrations index if it does not exist' do
+        migration_index_name = es_helper.migrations_index_name
+        es_helper.delete_index(index_name: migration_index_name)
+
+        expect { subject }.not_to change { es_helper.index_exists?(index_name: migration_index_name) }
+      end
+
+      it 'creates an index at the specified name' do
+        expect { subject }.to change { es_helper.index_exists?(index_name: secondary_index_name) }.from(false).to(true)
+      end
+
+      Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.each do |class_name|
+        describe "#{class_name}" do
+          it "does not create a standalone index" do
+            proxy = ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
+
+            expect { subject }.not_to change { es_helper.alias_exists?(name: proxy.index_name) }
+          end
+        end
+      end
+    end
+
+    it 'creates the migrations index if it does not exist' do
+      migration_index_name = es_helper.migrations_index_name
+      es_helper.delete_index(index_name: migration_index_name)
+
+      expect { subject }.to change { es_helper.index_exists?(index_name: migration_index_name) }.from(false).to(true)
+    end
+
+    Gitlab::Elastic::Helper::ES_SEPARATE_CLASSES.each do |class_name|
+      describe "#{class_name}" do
+        it "creates a standalone index" do
+          proxy = ::Elastic::Latest::ApplicationClassProxy.new(class_name, use_separate_indices: true)
+          expect { subject }.to change { es_helper.index_exists?(index_name: proxy.index_name) }.from(false).to(true)
+        end
+      end
+    end
+
     it 'marks all migrations as completed' do
       expect(Elastic::DataMigrationService).to receive(:mark_all_as_completed!).and_call_original
       expect(Elastic::MigrationRecord.persisted_versions(completed: true)).to eq([])
@@ -37,6 +93,10 @@
     it 'removes the index' do
       expect { subject }.to change { es_helper.index_exists? }.from(true).to(false)
     end
+
+    it_behaves_like 'deletes all standalone indices' do
+      let(:helper) { es_helper }
+    end
   end
 
   context "with elasticsearch_indexing enabled" do
diff --git a/ee/spec/views/admin/application_settings/_elasticsearch_form.html.haml_spec.rb b/ee/spec/views/admin/application_settings/_elasticsearch_form.html.haml_spec.rb
index e11e8e68a9bb224f9ea8b5479374b32b7dce49bc..ab8a6d7d3531c7e1b4291b52da7e2fbaf02f353c 100644
--- a/ee/spec/views/admin/application_settings/_elasticsearch_form.html.haml_spec.rb
+++ b/ee/spec/views/admin/application_settings/_elasticsearch_form.html.haml_spec.rb
@@ -5,6 +5,8 @@
 RSpec.describe 'admin/application_settings/_elasticsearch_form' do
   let_it_be(:admin) { create(:admin) }
   let(:page) { Capybara::Node::Simple.new(rendered) }
+  let(:pause_indexing) { false }
+  let(:pending_migrations) { false }
 
   before do
     assign(:application_setting, application_setting)
@@ -18,7 +20,8 @@
 
     before do
       allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_indexing?)).and_return(es_indexing)
-      allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_pause_indexing?)).and_return(true)
+      allow(Gitlab::CurrentSettings).to(receive(:elasticsearch_pause_indexing?)).and_return(pause_indexing)
+      allow(Elastic::DataMigrationService).to(receive(:pending_migrations?)).and_return(pending_migrations)
     end
 
     context 'indexing is enabled' do
@@ -36,6 +39,17 @@
         expect(rendered).to have_css('input[id=application_setting_elasticsearch_pause_indexing]')
         expect(rendered).not_to have_css('input[id=application_setting_elasticsearch_pause_indexing][disabled="disabled"]')
       end
+
+      context 'pending migrations' do
+        let(:pending_migrations) { true }
+        let(:pause_indexing) { true }
+
+        it 'renders a disabled pause checkbox' do
+          render
+
+          expect(rendered).to have_css('input[id=application_setting_elasticsearch_pause_indexing][disabled="disabled"]')
+        end
+      end
     end
 
     context 'indexing is disabled' do
@@ -88,7 +102,7 @@
       it 'renders the task' do
         render
 
-        expect(rendered).to include("State: #{task.state}")
+        expect(rendered).to include("Reindexing Status: #{task.state}")
         expect(rendered).not_to include("Task ID:")
         expect(rendered).not_to include("Error:")
         expect(rendered).not_to include("Expected documents:")
@@ -97,28 +111,30 @@
     end
 
     context 'with extended details' do
-      let(:task) { build(:elastic_reindexing_task, state: :reindexing, elastic_task: 'elastic-task-id', error_message: 'error-message', documents_count_target: 5, documents_count: 10) }
+      let!(:task) { create(:elastic_reindexing_task, state: :reindexing, error_message: 'error-message') }
+      let!(:subtask) { create(:elastic_reindexing_subtask, elastic_reindexing_task: task, documents_count_target: 5, documents_count: 10) }
 
       it 'renders the task' do
         render
 
-        expect(rendered).to include("State: #{task.state}")
-        expect(rendered).to include("Task ID: #{task.elastic_task}")
+        expect(rendered).to include("Reindexing Status: #{task.state}")
+        expect(rendered).to include("Task ID: #{subtask.elastic_task}")
         expect(rendered).to include("Error: #{task.error_message}")
-        expect(rendered).to include("Expected documents: #{task.documents_count}")
-        expect(rendered).to include("Documents reindexed: #{task.documents_count_target} (50.0%)")
+        expect(rendered).to include("Expected documents: #{subtask.documents_count}")
+        expect(rendered).to include("Documents reindexed: #{subtask.documents_count_target} (50.0%)")
       end
     end
 
     context 'with extended details, but without documents_count_target' do
-      let(:task) { build(:elastic_reindexing_task, state: :reindexing, elastic_task: 'elastic-task-id', documents_count: 10) }
+      let!(:task) { create(:elastic_reindexing_task, state: :reindexing) }
+      let!(:subtask) { create(:elastic_reindexing_subtask, elastic_reindexing_task: task, documents_count: 10) }
 
       it 'renders the task' do
         render
 
-        expect(rendered).to include("State: #{task.state}")
-        expect(rendered).to include("Task ID: #{task.elastic_task}")
-        expect(rendered).to include("Expected documents: #{task.documents_count}")
+        expect(rendered).to include("Reindexing Status: #{task.state}")
+        expect(rendered).to include("Task ID: #{subtask.elastic_task}")
+        expect(rendered).to include("Expected documents: #{subtask.documents_count}")
         expect(rendered).not_to include("Error:")
         expect(rendered).not_to include("Documents reindexed:")
       end
diff --git a/ee/spec/workers/elastic/migration_worker_spec.rb b/ee/spec/workers/elastic/migration_worker_spec.rb
index 7f9c2ef4b8e94a14950cfe7a4c3688847e840587..3f7337c7d7cad453d2a5536538b9c2f289e95094 100644
--- a/ee/spec/workers/elastic/migration_worker_spec.rb
+++ b/ee/spec/workers/elastic/migration_worker_spec.rb
@@ -44,6 +44,45 @@
         end
       end
 
+      context 'migration is halted' do
+        before do
+          allow(Gitlab::CurrentSettings).to receive(:elasticsearch_pause_indexing?).and_return(true)
+          allow(subject).to receive(:current_migration).and_return(migration)
+          allow(migration).to receive(:pause_indexing?).and_return(true)
+          allow(migration).to receive(:halted?).and_return(true)
+        end
+
+        it 'skips execution' do
+          expect(migration).not_to receive(:migrate)
+
+          subject.perform
+        end
+
+        context 'pause indexing is not allowed' do
+          before do
+            migration.save_state!(pause_indexing: false)
+          end
+
+          it 'does not unpauses indexing' do
+            expect(Gitlab::CurrentSettings).not_to receive(:update!)
+
+            subject.perform
+          end
+        end
+
+        context 'pause indexing is allowed' do
+          before do
+            migration.save_state!(pause_indexing: true)
+          end
+
+          it 'unpauses indexing' do
+            expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false)
+
+            subject.perform
+          end
+        end
+      end
+
       context 'migration process' do
         before do
           allow(migration).to receive(:persisted?).and_return(persisted)
@@ -91,6 +130,29 @@
             subject.perform
           end
         end
+
+        context 'indexing pause' do
+          before do
+            allow(migration).to receive(:pause_indexing?).and_return(true)
+          end
+
+          let(:batched) { true }
+
+          where(:persisted, :completed, :expected) do
+            false | false | false
+            true  | false | false
+            true  | true  | true
+          end
+
+          with_them do
+            it 'pauses and unpauses indexing' do
+              expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: true)
+              expect(Gitlab::CurrentSettings).to receive(:update!).with(elasticsearch_pause_indexing: false) if expected
+
+              subject.perform
+            end
+          end
+        end
       end
     end
   end
diff --git a/ee/spec/workers/elastic_delete_project_worker_spec.rb b/ee/spec/workers/elastic_delete_project_worker_spec.rb
index 6285276d57f42e8d4eff7eea5ca5441a222b36fc..d19777988b9ee3dbf9814b57b437011beaae81ba 100644
--- a/ee/spec/workers/elastic_delete_project_worker_spec.rb
+++ b/ee/spec/workers/elastic_delete_project_worker_spec.rb
@@ -2,7 +2,7 @@
 
 require 'spec_helper'
 
-RSpec.describe ElasticDeleteProjectWorker, :elastic do
+RSpec.describe ElasticDeleteProjectWorker, :elastic, :sidekiq_inline do
   subject { described_class.new }
 
   # Create admin user and search globally to avoid dealing with permissions in
@@ -11,6 +11,7 @@
 
   before do
     stub_ee_application_setting(elasticsearch_indexing: true)
+    allow(Elastic::DataMigrationService).to receive(:migration_has_finished?).and_return(migration_has_finished)
   end
 
   # Extracted to a method as the `#elastic_search` methods using it below will
@@ -19,36 +20,51 @@ def search_options
     { options: { current_user: user, project_ids: :any } }
   end
 
-  it 'deletes a project with all nested objects' do
-    project = create :project, :repository
-    issue = create :issue, project: project
-    milestone = create :milestone, project: project
-    note = create :note, project: project
-    merge_request = create :merge_request, target_project: project, source_project: project
-
-    ensure_elasticsearch_index!
-
-    ## All database objects + data from repository. The absolute value does not matter
-    expect(Project.elastic_search('*', **search_options).records).to include(project)
-    expect(Issue.elastic_search('*', **search_options).records).to include(issue)
-    expect(Milestone.elastic_search('*', **search_options).records).to include(milestone)
-    expect(Note.elastic_search('*', **search_options).records).to include(note)
-    expect(MergeRequest.elastic_search('*', **search_options).records).to include(merge_request)
-
-    subject.perform(project.id, project.es_id)
-    ensure_elasticsearch_index!
-
-    expect(Project.elastic_search('*', **search_options).total_count).to be(0)
-    expect(Issue.elastic_search('*', **search_options).total_count).to be(0)
-    expect(Milestone.elastic_search('*', **search_options).total_count).to be(0)
-    expect(Note.elastic_search('*', **search_options).total_count).to be(0)
-    expect(MergeRequest.elastic_search('*', **search_options).total_count).to be(0)
-
-    # verify that entire index is empty
-    # searches use joins on the parent record (project)
-    # and the previous queries will not find data left in the index
-    helper = Gitlab::Elastic::Helper.default
-
-    expect(helper.documents_count).to be(0)
+  shared_examples 'delete project and objects' do
+    it 'deletes a project with all nested objects' do
+      project = create :project, :repository
+      issue = create :issue, project: project
+      milestone = create :milestone, project: project
+      note = create :note, project: project
+      merge_request = create :merge_request, target_project: project, source_project: project
+
+      ensure_elasticsearch_index!
+
+      ## All database objects + data from repository. The absolute value does not matter
+      expect(Project.elastic_search('*', **search_options).records).to include(project)
+      expect(Issue.elastic_search('*', **search_options).records).to include(issue)
+      expect(Milestone.elastic_search('*', **search_options).records).to include(milestone)
+      expect(Note.elastic_search('*', **search_options).records).to include(note)
+      expect(MergeRequest.elastic_search('*', **search_options).records).to include(merge_request)
+
+      subject.perform(project.id, project.es_id)
+
+      ensure_elasticsearch_index!
+
+      expect(Project.elastic_search('*', **search_options).total_count).to be(0)
+      expect(Issue.elastic_search('*', **search_options).total_count).to be(0)
+      expect(Milestone.elastic_search('*', **search_options).total_count).to be(0)
+      expect(Note.elastic_search('*', **search_options).total_count).to be(0)
+      expect(MergeRequest.elastic_search('*', **search_options).total_count).to be(0)
+
+      # verify that entire index is empty
+      # searches use joins on the parent record (project)
+      # and the previous queries will not find data left in the index
+      helper = Gitlab::Elastic::Helper.default
+
+      expect(helper.documents_count).to be(0)
+    end
+  end
+
+  context 'migration has finished' do
+    let(:migration_has_finished) { true }
+
+    include_examples 'delete project and objects'
+  end
+
+  context 'migration has not finished' do
+    let(:migration_has_finished) { false }
+
+    include_examples 'delete project and objects'
   end
 end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index c89412998417bb99fedb7411dd2a60429036d42f..77c9ad3f012b52a1bcf70199b9bf4a90bf623c10 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -10398,6 +10398,9 @@ msgstr ""
 msgid "Elasticsearch HTTP client timeout value in seconds."
 msgstr ""
 
+msgid "Elasticsearch indexing"
+msgstr ""
+
 msgid "Elasticsearch indexing restrictions"
 msgstr ""
 
@@ -20371,6 +20374,9 @@ msgstr ""
 msgid "Pause"
 msgstr ""
 
+msgid "Pause Elasticsearch indexing"
+msgstr ""
+
 msgid "Pause replication"
 msgstr ""
 
@@ -23196,7 +23202,7 @@ msgstr ""
 msgid "Regulate approvals by authors/committers. Affects all projects."
 msgstr ""
 
-msgid "Reindexing status"
+msgid "Reindexing Status: %{status}"
 msgstr ""
 
 msgid "Rejected (closed)"
@@ -26687,9 +26693,6 @@ msgstr ""
 msgid "State your message to activate"
 msgstr ""
 
-msgid "State: %{last_reindexing_task_state}"
-msgstr ""
-
 msgid "Static Application Security Testing (SAST)"
 msgstr ""
 
@@ -28212,6 +28215,9 @@ msgstr ""
 msgid "There are no variables yet."
 msgstr ""
 
+msgid "There are pending advanced search migrations. Indexing must remain paused until the migrations are completed."
+msgstr ""
+
 msgid "There are running deployments on the environment. Please retry later."
 msgstr ""
 
@@ -30138,7 +30144,7 @@ msgstr ""
 msgid "Until"
 msgstr ""
 
-msgid "Unused, previous index '%{index_name}' will be deleted after %{time} automatically."
+msgid "Unused, previous indices: %{index_names} will be deleted after %{time} automatically."
 msgstr ""
 
 msgid "Unverified"