Skip to content
代码片段 群组 项目
未验证 提交 a426524c 编辑于 作者: Terri Chu's avatar Terri Chu 提交者: GitLab
浏览文件

Mark 20230530500000 as obsolete

This migration marks the 20230530500000 MigrateProjectsToSeparateIndex Advanced search migration as obsolete.

This MR will still need changes to remove references to the migration in the code.
At the moment the `gitlab-housekeeper` is not always capable of removing all references so you must check the
diff and pipeline failures to confirm if there are any issues.
It is the responsibility of the assignee (picked from ~"group::global search") to push those changes to this branch.

You can read more about the process for marking Advanced search migrations as obsolete in
https://docs.gitlab.com/ee/development/search/advanced_search_migration_styleguide.html#deleting-advanced-search-migrations-in-a-major-version-upgrade.

As part of our process we want to ensure all Advanced search migrations have had at least one
[required stop](https://docs.gitlab.com/ee/development/database/required_stops.html)
to process the migration. Therefore we can mark any Advanced search migrations added before the
last required stop as obsolete.

This change was generated by
[gitlab-housekeeper](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-housekeeper)
using the Keeps::MarkOldAdvancedSearchMigrationsAsObsolete keep.

To provide feedback on your experience with `gitlab-housekeeper` please comment in
<https://gitlab.com/gitlab-org/gitlab/-/issues/442003>.

Changelog: other
EE: true
上级 5e078f88
No related branches found
No related tags found
无相关合并请求
显示
137 个添加677 个删除
......@@ -1861,7 +1861,6 @@ Style/InlineDisableAnnotation:
- 'ee/spec/controllers/concerns/gitlab_subscriptions/seat_count_alert_spec.rb'
- 'ee/spec/controllers/concerns/routable_actions_spec.rb'
- 'ee/spec/controllers/projects/settings/merge_requests_controller_spec.rb'
- 'ee/spec/elastic/migrate/20230503064300_backfill_project_permissions_in_blobs_using_permutations_spec.rb'
- 'ee/spec/factories/package_metadata/pm_licenses.rb'
- 'ee/spec/factories/security_scans.rb'
- 'ee/spec/features/dashboards/todos_spec.rb'
......
......@@ -500,7 +500,7 @@ def mirror_hard_failed?
override :use_separate_indices?
def use_separate_indices?
::Elastic::DataMigrationService.migration_has_finished?(:migrate_projects_to_separate_index)
true
end
override :verification_state_table_class
......
......@@ -73,9 +73,6 @@ def estimate_shard_sizes
estimates = {}
klasses = CLASSES_TO_COUNT
unless ::Elastic::DataMigrationService.migration_has_finished?(:migrate_projects_to_separate_index)
klasses -= [Project]
end
unless ::Elastic::DataMigrationService.migration_has_finished?(:create_epic_index) &&
::Elastic::DataMigrationService.migration_has_finished?(:backfill_epics)
......
......@@ -5,6 +5,6 @@ description: Moves project records to a standalone index
group: group::global search
milestone: '16.2'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/122193
obsolete: false
marked_obsolete_by_url:
marked_obsolete_in_milestone:
obsolete: true
marked_obsolete_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/152985
marked_obsolete_in_milestone: '17.1'
......@@ -5,6 +5,6 @@ description: Remove Project documents from main index.
group: group::global search
milestone: '16.3'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/127002
obsolete: false
marked_obsolete_by_url:
marked_obsolete_in_milestone:
obsolete: true
marked_obsolete_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/152985
marked_obsolete_in_milestone: '17.1'
......@@ -135,3 +135,5 @@ def set_migration_state_for_next_slice(slice)
)
end
end
MigrateProjectsToSeparateIndex.prepend ::Elastic::MigrationObsolete
......@@ -62,3 +62,5 @@ def document_type
:project
end
end
RemoveProjectsFromMainIndex.prepend ::Elastic::MigrationObsolete
......@@ -6,7 +6,7 @@ class ApplicationInstanceProxy < Elasticsearch::Model::Proxy::InstanceMethodsPro
include InstanceProxyUtil
def es_parent
"project_#{target.project_id}" unless target.is_a?(Project) || target&.project_id.nil?
"project_#{target.project_id}" unless target&.project_id.nil?
end
def es_type
......
......@@ -73,38 +73,6 @@ def delete_index_for_commits_and_blobs(is_wiki: false)
}
}
)
return if ::Elastic::DataMigrationService.migration_has_finished?(:migrate_projects_to_separate_index)
# This delete_by_query can be removed completely once the blob gets migrated to the separate index
client.delete_by_query(
index: index_name,
routing: es_parent,
conflicts: 'proceed',
body: {
query: {
bool: {
filter: [
{
terms: {
type: types
}
},
{
has_parent: {
parent_type: 'project',
query: {
term: {
id: project_id
}
}
}
}
]
}
}
}
)
end
private
......
......@@ -6,12 +6,9 @@ class ProjectClassProxy < ApplicationClassProxy
extend ::Gitlab::Utils::Override
def elastic_search(query, options: {})
if ::Elastic::DataMigrationService.migration_has_finished?(:migrate_projects_to_separate_index)
options[:project_id_field] = :id
options[:no_join_project] = true
end
options[:in] = %w[name^10 name_with_namespace^2 path_with_namespace path^9 description]
options[:project_id_field] = :id
options[:no_join_project] = true
query_hash = basic_query_hash(options[:in], query, options)
......
......@@ -3,6 +3,8 @@
module Elastic
module Latest
class ProjectInstanceProxy < ApplicationInstanceProxy
extend ::Gitlab::Utils::Override
SCHEMA_VERSION = 24_02
TRACKED_FEATURE_SETTINGS = %w[
......@@ -55,25 +57,11 @@ def as_indexed_json(_options = {})
data.merge!(add_count_fields(target))
unless ::Elastic::DataMigrationService.migration_has_finished?(:migrate_projects_to_separate_index)
# Set it as a parent in our `project => child` JOIN field
data['join_field'] = es_type
TRACKED_FEATURE_SETTINGS.each do |feature|
data[feature] = if target.project_feature.present?
target.project_feature.public_send(feature) # rubocop:disable GitlabSecurity/PublicSend
else
logger.warn(message: 'Project is missing ProjectFeature', id: target.id)
ProjectFeature::PRIVATE
end
end
end
data
end
override :es_parent
def es_parent
return unless ::Elastic::DataMigrationService.migration_has_finished?(:migrate_projects_to_separate_index)
"n_#{target.root_ancestor.id}"
end
......
......@@ -81,10 +81,7 @@ def test_array!(array)
end
end
attr_reader :klass, :db_id, :es_id
# This attribute is nil for some records, e.g., projects
attr_reader :es_parent
attr_reader :klass, :db_id, :es_id, :es_parent
alias_attribute :identifier, :es_id
alias_attribute :routing, :es_parent
......
......@@ -3,6 +3,6 @@
require 'spec_helper'
require File.expand_path('ee/elastic/migrate/20230503064300_backfill_project_permissions_in_blobs_using_permutations.rb') # rubocop disable Layout/LineLength
RSpec.describe BackfillProjectPermissionsInBlobsUsingPermutations, :elastic, feature_category: :global_search do
RSpec.describe BackfillProjectPermissionsInBlobsUsingPermutations, feature_category: :global_search do
it_behaves_like 'a deprecated Advanced Search migration', 20230503064300
end
......@@ -6,7 +6,6 @@
RSpec.describe BackfillProjectPermissionsInBlobs, :elastic_clean, :sidekiq_inline,
feature_category: :global_search do
let(:version) { 20230518064300 }
let(:old_version_without_project_permissions) { 20230321202400 }
let(:helper) { Gitlab::Elastic::Helper.new }
let(:migration) { described_class.new(version) }
......@@ -156,7 +155,7 @@
describe 'integration test', :elastic_clean do
before do
set_elasticsearch_migration_to(old_version_without_project_permissions, including: false)
set_elasticsearch_migration_to(version, including: false)
projects.each do |project|
project.repository.index_commits_and_blobs # ensure objects are indexed
......@@ -208,8 +207,8 @@ def remove_permissions_for_blob(project)
end
def update_by_query(project, source)
Project.__elasticsearch__.client.update_by_query({
index: Project.__elasticsearch__.index_name,
Repository.__elasticsearch__.client.update_by_query({
index: Repository.__elasticsearch__.index_name,
wait_for_completion: true,
refresh: true,
body: {
......
......@@ -3,230 +3,6 @@
require 'spec_helper'
require File.expand_path('ee/elastic/migrate/20230530500000_migrate_projects_to_separate_index.rb')
RSpec.describe MigrateProjectsToSeparateIndex, :elastic_clean, :sidekiq_inline, feature_category: :global_search do
let(:version) { 20230530500000 }
let(:migration) { described_class.new(version) }
let(:index_name) { "#{es_helper.target_name}-projects" }
let_it_be(:helper) { ::Gitlab::Elastic::Helper.default }
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
allow(migration).to receive(:helper).and_return(helper)
end
describe 'migration_options' do
it 'has migration options set', :aggregate_failures do
expect(migration.batched?).to be_truthy
expect(migration.throttle_delay).to eq(1.minute)
expect(migration.pause_indexing?).to be_truthy
expect(migration.space_requirements?).to be_truthy
end
end
describe '.migrate' do
before do
set_elasticsearch_migration_to(:migrate_projects_to_separate_index, including: false)
end
context 'for initial launch' do
before do
es_helper.delete_index(index_name: es_helper.target_index_name(target: index_name))
end
it 'creates an index and sets migration_state' do
expect { migration.migrate }.to change { es_helper.alias_exists?(name: index_name) }.from(false).to(true)
expect(migration.migration_state).to include(slice: 0, max_slices: 5)
end
it 'sets correct number of slices for 1 shard' do
allow(migration).to receive(:get_number_of_shards).and_return(1)
migration.migrate
expect(migration.migration_state).to include(slice: 0, max_slices: 2)
end
end
context 'for batch run' do
it 'sets migration_state task_id' do
allow(migration).to receive(:reindex).and_return('task_id')
migration.set_migration_state(slice: 0, max_slices: 5, retry_attempt: 0)
migration.migrate
expect(migration.migration_state).to include(slice: 0, max_slices: 5, task_id: 'task_id')
end
it 'sets next slice and clears task_id after task check' do
allow(migration).to receive(:reindexing_completed?).and_return(true)
migration.set_migration_state(slice: 0, max_slices: 5, retry_attempt: 0, task_id: 'task_id')
migration.migrate
expect(migration.migration_state).to include(slice: 1, max_slices: 5, task_id: nil)
end
it 'resets retry_attempt clears task_id for the next slice' do
allow(migration).to receive(:reindexing_completed?).and_return(true)
migration.set_migration_state(slice: 0, max_slices: 5, retry_attempt: 5, task_id: 'task_id')
migration.migrate
expect(migration.migration_state).to match(slice: 1, max_slices: 5, retry_attempt: 0, task_id: nil)
end
context 'when reindexing is still in progress' do
before do
allow(migration).to receive(:reindexing_completed?).and_return(false)
end
it 'does nothing' do
migration.set_migration_state(slice: 0, max_slices: 5, retry_attempt: 0, task_id: 'task_id')
migration.migrate
expect(migration).not_to receive(:reindex)
end
end
it 'migrates all projects', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/436579' do
create_list(:project, 3, visibility_level: 0, wiki_access_level: 0)
ensure_elasticsearch_index! # ensure objects are indexed
slices = 2
migration.set_migration_state(slice: 0, max_slices: slices, retry_attempt: 0)
migration.migrate
10.times do
break if migration.completed?
migration.migrate
end
expect(migration.completed?).to be_truthy
expect(es_helper.documents_count(index_name: index_name)).to eq(3)
end
end
context 'for failed run' do
context 'if exception is raised' do
before do
allow(migration).to receive(:reindex).and_raise(StandardError)
end
it 'increases retry_attempt and clears task_id' do
migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 1)
expect { migration.migrate }.to raise_error(StandardError)
expect(migration.migration_state).to match(slice: 0, max_slices: 2, retry_attempt: 2, task_id: nil)
end
it 'fails the migration after too many attempts',
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/436579' do
migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 30)
migration.migrate
expect(migration.migration_state).to match(
slice: 0,
max_slices: 2,
retry_attempt: 30,
halted: true,
failed: true,
halted_indexing_unpaused: false
)
expect(migration).not_to receive(:reindex)
end
end
context 'when elasticsearch failures' do
context 'if total is not equal' do
before do
allow(helper).to receive(:task_status).and_return(
{
"completed" => true,
"response" => {
"total" => 60, "updated" => 0, "created" => 45, "deleted" => 0, "failures" => []
}
}
)
end
it 'raises an error and clears task_id' do
migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 0, task_id: 'task_id')
expect { migration.migrate }.to raise_error(/total is not equal/)
expect(migration.migration_state[:task_id]).to be_nil
end
end
context 'when reindexing fails' do
before do
allow(helper).to receive(:task_status).with(task_id: 'task_id').and_return(
{
"completed" => true,
"response" => {
"total" => 60,
"updated" => 0,
"created" => 0,
"deleted" => 0,
"failures" => [
{ type: "es_rejected_execution_exception" }
]
}
}
)
end
it 'raises an error and clears task_id' do
migration.set_migration_state(slice: 0, max_slices: 2, retry_attempt: 0, task_id: 'task_id')
expect { migration.migrate }.to raise_error(/failed with/)
expect(migration.migration_state[:task_id]).to be_nil
end
end
end
end
end
describe '.completed?' do
subject { migration.completed? }
let(:original_count) { 5 }
before do
allow(helper).to receive(:refresh_index).and_return(true)
allow(migration).to receive(:original_documents_count).and_return(original_count)
allow(migration).to receive(:new_documents_count).and_return(new_count)
end
context 'when counts are equal' do
let(:new_count) { original_count }
it 'returns true' do
is_expected.to be_truthy
end
end
context 'when counts are not equal' do
let(:new_count) { original_count - 1 }
it 'returns true' do
is_expected.to be_falsey
end
end
end
describe 'space_required_bytes' do
subject { migration.space_required_bytes }
before do
allow(helper).to receive(:index_size_bytes).and_return(300)
end
it { is_expected.to eq(3) }
end
RSpec.describe MigrateProjectsToSeparateIndex, feature_category: :global_search do
it_behaves_like 'a deprecated Advanced Search migration', 20230530500000
end
......@@ -4,145 +4,5 @@
require File.expand_path('ee/elastic/migrate/20230726094700_remove_projects_from_main_index.rb')
RSpec.describe RemoveProjectsFromMainIndex, feature_category: :global_search do
let(:version) { 20230726094700 }
let(:migration) { described_class.new(version) }
let(:helper) { Gitlab::Elastic::Helper.new }
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
allow(migration).to receive(:helper).and_return(helper)
end
describe 'migration_options' do
it 'has migration options set', :aggregate_failures do
expect(migration.batched?).to be_truthy
expect(migration).to be_retry_on_failure
expect(migration.batch_size).to eq(2000)
end
end
describe '.migrate', :elastic_clean, :sidekiq_inline do
let(:client) { ::Gitlab::Search::Client.new }
before do
allow(migration).to receive(:client).and_return(client)
allow(migration).to receive(:batch_size).and_return(2)
set_elasticsearch_migration_to(:migrate_projects_to_separate_index, including: false)
create_list(:project, 5)
ensure_elasticsearch_index! # ensure objects are indexed
end
context 'when projects are still present in the index' do
it 'removes projects from the index', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/448563' do
expect(migration.completed?).to be_falsey
migration.migrate
expect(migration.migration_state).to match(documents_remaining: anything, task_id: anything)
# the migration might not complete after the initial task is created
# so make sure it actually completes
10.times do
migration.migrate
break if migration.completed?
sleep 0.01
end
migration.migrate # To set a pristine state
expect(migration.completed?).to be_truthy
expect(migration.migration_state).to match(task_id: nil, documents_remaining: 0)
end
context 'and task in progress' do
before do
allow(migration).to receive(:completed?).and_return(false)
allow(migration).to receive(:client).and_return(client)
allow(helper).to receive(:task_status).and_return('completed' => false)
migration.set_migration_state(task_id: 'task_1')
end
it 'does nothing if task is not completed' do
migration.migrate
expect(client).not_to receive(:delete_by_query)
end
end
end
context 'when migration fails' do
context 'and exception is raised' do
before do
allow(client).to receive(:delete_by_query).and_raise(StandardError)
end
it 'resets task_id' do
migration.set_migration_state(task_id: 'task_1')
expect { migration.migrate }.to raise_error(StandardError)
expect(migration.migration_state).to match(task_id: nil, documents_remaining: anything)
end
end
context 'and es responds with errors' do
before do
allow(client).to receive(:delete_by_query).and_return('task' => 'task_1')
allow(migration).to receive(:get_number_of_shards).and_return(1)
end
context 'when a task throws an error' do
before do
allow(helper).to receive(:task_status).and_return('error' => ['failed'])
migration.migrate
end
it 'resets task_id' do
expect { migration.migrate }.to raise_error(/Failed to delete projects/)
expect(migration.migration_state).to match(task_id: nil, documents_remaining: anything)
end
end
context 'when delete_by_query throws an error' do
before do
allow(client).to receive(:delete_by_query).and_return('failures' => ['failed'])
end
it 'resets task_id' do
expect { migration.migrate }.to raise_error(/Failed to delete projects/)
expect(migration.migration_state).to match(task_id: nil, documents_remaining: anything)
end
end
end
end
context 'when projects are already deleted' do
before do
client.delete_by_query(index: helper.target_name,
body: { query: { bool: { filter: { term: { type: 'project' } } } } })
end
it 'does not execute delete_by_query' do
expect(migration.completed?).to be_truthy
expect(helper.client).not_to receive(:delete_by_query)
migration.migrate
end
end
end
describe '.completed?' do
context 'when original_documents_count is zero' do
before do
allow(migration).to receive(:original_documents_count).and_return 0
end
it 'returns true' do
expect(migration.completed?).to eq true
end
end
context 'when original_documents_count is non zero' do
before do
allow(migration).to receive(:original_documents_count).and_return 1
end
it 'returns false' do
expect(migration.completed?).to eq false
end
end
end
it_behaves_like 'a deprecated Advanced Search migration', 20230726094700
end
......@@ -56,14 +56,6 @@
it 'includes project id' do
expect(subject.es_parent).to eq("project_#{target.project.id}")
end
context 'when target type is in routing excluded list' do
let(:target) { project }
it 'is nil' do
expect(subject.es_parent).to be_nil
end
end
end
describe '#namespace_ancestry' do
......
......@@ -9,198 +9,92 @@
subject(:proxy) { described_class.new(project) }
describe 'when migrate_projects_to_separate_index migration is not completed' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
set_elasticsearch_migration_to(:migrate_projects_to_separate_index, including: false)
ensure_elasticsearch_index! # ensure objects are indexed
end
describe '#as_indexed_json' do
it 'serializes project as hash' do
result = proxy.as_indexed_json.with_indifferent_access
expect(result).to include(
id: project.id,
name: project.name,
path: project.path,
description: project.description,
namespace_id: project.namespace_id,
created_at: project.created_at,
updated_at: project.updated_at,
archived: project.archived,
visibility_level: project.visibility_level,
last_activity_at: project.last_activity_at,
name_with_namespace: project.name_with_namespace,
path_with_namespace: project.path_with_namespace)
described_class::TRACKED_FEATURE_SETTINGS.each do |feature|
expect(result).to include(feature => project.project_feature.public_send(feature)) # rubocop:disable GitlabSecurity/PublicSend
end
end
context 'when project_feature is null' do
before do
allow(project).to receive(:project_feature).and_return(nil)
end
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
ensure_elasticsearch_index! # ensure objects are indexed
allow(::Elastic::DataMigrationService).to receive(:migration_has_finished?).and_return(true)
end
it 'sets all tracked feature access levels to PRIVATE' do
result = proxy.as_indexed_json.with_indifferent_access
describe '#as_indexed_json' do
it 'contains the expected mappings' do
result = proxy.as_indexed_json.with_indifferent_access.keys
project_proxy = Elastic::Latest::ApplicationClassProxy.new(Project, use_separate_indices: true)
# readme_content is not populated by as_indexed_json
expected_keys = project_proxy.mappings.to_hash[:properties].keys.map(&:to_s) - ['readme_content']
Elastic::Latest::ProjectInstanceProxy::TRACKED_FEATURE_SETTINGS.each do |feature|
expect(result).to include(feature => ProjectFeature::PRIVATE) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
expect(result).to match_array(expected_keys)
end
end
describe 'when migrate_projects_to_separate_index migration is completed' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
set_elasticsearch_migration_to(:migrate_projects_to_separate_index, including: true)
ensure_elasticsearch_index! # ensure objects are indexed
it 'serializes project as hash' do
result = proxy.as_indexed_json.with_indifferent_access
expect(result).to include(
id: project.id,
name: project.name,
path: project.path,
description: project.description,
namespace_id: project.namespace_id,
created_at: project.created_at,
updated_at: project.updated_at,
archived: project.archived,
last_activity_at: project.last_activity_at,
name_with_namespace: project.name_with_namespace,
path_with_namespace: project.path_with_namespace,
traversal_ids: project.elastic_namespace_ancestry,
type: 'project',
visibility_level: project.visibility_level,
schema_version: schema_version,
ci_catalog: project.catalog_resource.present?
)
end
describe '#as_indexed_json' do
it 'serializes project as hash' do
context 'when project does not have an owner' do
it 'does not throw an exception' do
allow(project).to receive(:owner).and_return(nil)
result = proxy.as_indexed_json.with_indifferent_access
expect(result).to include(
id: project.id,
name: project.name,
path: project.path,
description: project.description,
namespace_id: project.namespace_id,
created_at: project.created_at,
updated_at: project.updated_at,
archived: project.archived,
last_activity_at: project.last_activity_at,
name_with_namespace: project.name_with_namespace,
path_with_namespace: project.path_with_namespace,
traversal_ids: project.elastic_namespace_ancestry,
type: 'project',
visibility_level: project.visibility_level,
schema_version: schema_version,
ci_catalog: project.catalog_resource.present?
)
expect(result[:owner_id]).to be_nil
end
end
end
describe 'when add_fields_to_projects_index migration is completed' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
set_elasticsearch_migration_to(:add_fields_to_projects_index, including: true)
ensure_elasticsearch_index! # ensure objects are indexed
end
describe '#as_indexed_json' do
it 'serializes project as hash' do
result = proxy.as_indexed_json.with_indifferent_access
expect(result).to include(
id: project.id,
name: project.name,
path: project.path,
description: project.description,
namespace_id: project.namespace_id,
created_at: project.created_at,
updated_at: project.updated_at,
archived: project.archived,
last_activity_at: project.last_activity_at,
name_with_namespace: project.name_with_namespace,
path_with_namespace: project.path_with_namespace,
traversal_ids: project.elastic_namespace_ancestry,
type: 'project',
visibility_level: project.visibility_level,
schema_version: schema_version,
ci_catalog: project.catalog_resource.present?,
mirror: project.mirror?,
forked: project.forked? || false,
owner_id: project.owner.id,
repository_languages: project.repository_languages.map(&:name)
)
context 'when add_fields_to_projects_index migration is not completed' do
before do
set_elasticsearch_migration_to(:add_fields_to_projects_index, including: false)
end
context 'when project does not have an owner' do
it 'does not throw an exception' do
allow(project).to receive(:owner).and_return(nil)
result = proxy.as_indexed_json.with_indifferent_access
it 'does not include the gated fields' do
result = proxy.as_indexed_json.with_indifferent_access
expect(result[:owner_id]).to be_nil
end
expect(result.keys).not_to include(:mirror)
expect(result.keys).not_to include(:forked)
expect(result.keys).not_to include(:owner_id)
expect(result.keys).not_to include(:repository_languages)
end
end
end
describe 'when add_count_fields_to_projects migration is completed' do
before do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
set_elasticsearch_migration_to(:add_count_fields_to_projects, including: true)
ensure_elasticsearch_index! # ensure objects are indexed
end
describe '#as_indexed_json' do
it 'serializes project as hash' do
result = proxy.as_indexed_json.with_indifferent_access
expect(result).to include(
id: project.id,
name: project.name,
path: project.path,
description: project.description,
namespace_id: project.namespace_id,
created_at: project.created_at,
updated_at: project.updated_at,
archived: project.archived,
last_activity_at: project.last_activity_at,
name_with_namespace: project.name_with_namespace,
path_with_namespace: project.path_with_namespace,
traversal_ids: project.elastic_namespace_ancestry,
type: 'project',
visibility_level: project.visibility_level,
schema_version: schema_version,
ci_catalog: project.catalog_resource.present?,
mirror: project.mirror?,
forked: project.forked? || false,
owner_id: project.owner.id,
repository_languages: project.repository_languages.map(&:name),
star_count: project.star_count,
last_repository_updated_date: project.last_repository_updated_at
)
context 'when add_count_fields_to_projects migration is not completed' do
before do
set_elasticsearch_migration_to(:add_count_fields_to_projects, including: false)
end
it 'contains the expected mappings' do
result = proxy.as_indexed_json.with_indifferent_access.keys
project_proxy = Elastic::Latest::ApplicationClassProxy.new(Project, use_separate_indices: true)
# readme_content is not populated by as_indexed_json
expected_keys = project_proxy.mappings.to_hash[:properties].keys.map(&:to_s) - ['readme_content']
it 'does not include the gated fields' do
result = proxy.as_indexed_json.with_indifferent_access
expect(result).to match_array(expected_keys)
expect(result.keys).not_to include(:star_count)
expect(result.keys).not_to include(:last_repository_updated_date)
end
end
end
describe '#es_parent' do
let_it_be(:group) { create(:group) }
let_it_be(:target) { create(:project, group: group) }
subject(:es_parent) { described_class.new(target).es_parent }
it 'is the root namespace id' do
expect(es_parent).to eq("n_#{group.id}")
end
describe '#es_parent' do
let_it_be(:group) { create(:group) }
let_it_be(:target) { create(:project, group: group) }
context 'if migration is not finished' do
before do
set_elasticsearch_migration_to :migrate_projects_to_separate_index, including: false
end
subject(:es_parent) { described_class.new(target).es_parent }
it 'is nil' do
expect(es_parent).to be_nil
end
end
it 'is the root namespace id' do
expect(es_parent).to eq("n_#{group.id}")
end
end
end
......@@ -11,7 +11,7 @@
let(:issue_as_ref) { described_class.new(*issue_as_array) }
let(:issue_as_str) { issue_as_array.join(' ') }
let(:project_as_array) { [Project, project.id, project.es_id] }
let(:project_as_array) { [Project, project.id, project.es_id, project.es_parent] }
let(:project_as_ref) { described_class.new(*project_as_array) }
let(:project_as_str) { project_as_array.join(delimiter) }
let(:delimiter) { ' ' }
......
......@@ -195,76 +195,65 @@
describe '.as_indexed_json' do
let_it_be(:project) { create(:project) }
context 'when the migrate_projects_to_separate_index migration has not finished' do
before do
ensure_elasticsearch_index!
end
it 'returns json with all needed elements' do
expected_hash = project.attributes.extract!(
'id',
'name',
'path',
'description',
'namespace_id',
'created_at',
'archived',
'updated_at',
'visibility_level',
'last_activity_at',
'mirror',
'star_count'
).merge({
'ci_catalog' => project.catalog_resource.present?,
'type' => project.es_type,
'schema_version' => schema_version,
'traversal_ids' => project.elastic_namespace_ancestry,
'name_with_namespace' => project.full_name,
'path_with_namespace' => project.full_path,
'forked' => false,
'owner_id' => project.owner.id,
'repository_languages' => project.repository_languages.map(&:name),
'last_repository_updated_date' => project.last_repository_updated_at
})
expect(project.__elasticsearch__.as_indexed_json).to eq(expected_hash)
end
context 'when add_count_fields_to_projects is not finished' do
before do
set_elasticsearch_migration_to(:migrate_projects_to_separate_index, including: false)
ensure_elasticsearch_index!
set_elasticsearch_migration_to(:add_count_fields_to_projects, including: false)
end
it 'returns json with all needed elements' do
expected_hash = project.attributes.extract!(
'id',
'name',
'path',
'description',
'namespace_id',
'created_at',
'archived',
'updated_at',
'visibility_level',
'last_activity_at'
).merge({
'ci_catalog' => project.catalog_resource.present?,
'join_field' => project.es_type,
'type' => project.es_type,
'schema_version' => schema_version,
'traversal_ids' => project.elastic_namespace_ancestry,
'name_with_namespace' => project.full_name,
'path_with_namespace' => project.full_path
})
expected_hash.merge!(
project.project_feature.attributes.extract!(
'issues_access_level',
'merge_requests_access_level',
'snippets_access_level',
'wiki_access_level',
'repository_access_level'
)
)
expect(project.__elasticsearch__.as_indexed_json).to eq(expected_hash)
it 'does not include the ci_catalog field' do
as_indexed_json = project.__elasticsearch__.as_indexed_json
expect(as_indexed_json).not_to have_key('star_count')
expect(as_indexed_json).not_to have_key('last_repository_updated_date')
end
end
context 'when the migrate_projects_to_separate_index migration has finished' do
context 'when add_fields_to_projects_index is not finished' do
before do
set_elasticsearch_migration_to(:migrate_projects_to_separate_index, including: true)
ensure_elasticsearch_index!
set_elasticsearch_migration_to(:add_fields_to_projects_index, including: false)
end
it 'returns json with all needed elements' do
expected_hash = project.attributes.extract!(
'id',
'name',
'path',
'description',
'namespace_id',
'created_at',
'archived',
'updated_at',
'visibility_level',
'last_activity_at'
).merge({
'ci_catalog' => project.catalog_resource.present?,
'type' => project.es_type,
'schema_version' => schema_version,
'traversal_ids' => project.elastic_namespace_ancestry,
'name_with_namespace' => project.full_name,
'path_with_namespace' => project.full_path
})
expect(project.__elasticsearch__.as_indexed_json).to eq(expected_hash)
it 'does not include the ci_catalog field' do
as_indexed_json = project.__elasticsearch__.as_indexed_json
expect(as_indexed_json).not_to have_key('mirror')
expect(as_indexed_json).not_to have_key('forked')
expect(as_indexed_json).not_to have_key('owner_id')
expect(as_indexed_json).not_to have_key('repository_languages')
end
end
end
......
0% 加载中 .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册