Skip to content
代码片段 群组 项目
未验证 提交 3e801a4b 编辑于 作者: Igor Drozdov's avatar Igor Drozdov 提交者: GitLab
浏览文件

Explicitly state returning columns for p_ci_builds

This new PR assigns auto populated columns:
https://github.com/rails/rails/pull/48241

However, a column is identified as auto-populated if it contains
default value with nextval function.

The id column of p_ci_builds table is auto-populated via a trigger
and it's not identified as an auto-populated column by Rails.

Let's explicitly return ['id'] in a function that expects a list
of auto-populated columns
上级 913fe45a
No related branches found
No related tags found
无相关合并请求
显示
135 个添加37 个删除
......@@ -147,9 +147,11 @@ def _bulk_insert_all!(items, on_duplicate:, returns:, unique_by:, validate:, bat
returns
end
composite_primary_key = ::Gitlab.next_rails? && composite_primary_key?
# Handle insertions for tables with a composite primary key
primary_keys = connection.schema_cache.primary_keys(table_name)
if unique_by.blank? && primary_key != primary_keys
if unique_by.blank? && (composite_primary_key || primary_key != primary_keys)
unique_by = primary_keys
end
......
......@@ -18,5 +18,20 @@ def partitioned_by(partitioning_key, strategy:, **kwargs)
@partitioning_strategy = strategy_class.new(self, partitioning_key, **kwargs)
end
# This PR assigns auto populated columns: https://github.com/rails/rails/pull/48241
# However, a column is identified as auto-populated if it contains default value with nextval function.
# The id column of partitioned tables is auto-populated via a trigger
# and it's not identified as an auto-populated column by Rails.
# Let's explicitly return [primary_key] in a function that expects a list of auto-populated columns
#
# Can be removed in Rails 7.2 because it's handled there:
#
# https://github.com/rails/rails/blob/v7.2.0.rc1/activerecord/lib/active_record/model_schema.rb#L444
def _returning_columns_for_insert
auto_populated_columns = []
auto_populated_columns = super if Gitlab.next_rails?
auto_populated_columns.empty? ? Array(primary_key) : auto_populated_columns
end
end
end
......@@ -23,8 +23,16 @@ def migrate_status
def status_with_milestones
# rubocop:disable Database/MultipleDatabases -- From Rails base code which doesn't follow our style guide
versions = ActiveRecord::SchemaMigration.all_versions.map(&:to_i)
ActiveRecord::Base.connection.migration_context.migrations.sort_by(&:version).map do |m|
conn = ActiveRecord::Base.connection
versions =
if ::Gitlab.next_rails?
conn.schema_migration.versions.map(&:to_i)
else
ActiveRecord::SchemaMigration.all_versions.map(&:to_i)
end
conn.migration_context.migrations.sort_by(&:version).map do |m|
[
(versions.include?(m.version.to_i) ? 'up' : 'down'),
m.version.to_s,
......
......@@ -13,6 +13,12 @@ class TrackingBase < ApplicationRecord
connects_to database: { writing: :geo, reading: :geo }
end
def self.get_primary_key(*)
return 'id' unless ::Gitlab::Geo.geo_database_configured?
super
end
def self.connected?
return false unless ::Gitlab::Geo.geo_database_configured?
......
......@@ -6,7 +6,7 @@ def up
t.datetime_with_timezone "created_at"
t.datetime_with_timezone "retry_at"
t.integer "bytes", limit: 8
t.integer "artifact_id", unique: true
t.integer "artifact_id", index: { unique: true }
t.integer "retry_count"
t.boolean "success"
t.string "sha256" # rubocop:disable Migration/PreventStrings
......
......@@ -10,10 +10,10 @@
let(:security_findings) { table(:security_findings) }
let(:security_scans) { table(:security_scans) }
let(:vulnerability_scanners) { table(:vulnerability_scanners) }
let(:ci_builds) { table(:p_ci_builds, database: :ci) }
let(:ci_pipelines) { table(:ci_pipelines, database: :ci) }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:ci_builds) { partitioned_table(:p_ci_builds, database: :ci) }
let(:scanner) { vulnerability_scanners.create!(project_id: project.id, name: 'Foo', external_id: 'foo') }
let(:namespace) { namespaces.create!(name: 'test', path: 'test', type: 'Group') }
......
......@@ -12,10 +12,10 @@
let(:security_findings) { table(:security_findings) }
let(:security_scans) { table(:security_scans) }
let(:vulnerability_scanners) { table(:vulnerability_scanners) }
let(:ci_builds) { table(:p_ci_builds, database: :ci) }
let(:ci_pipelines) { table(:ci_pipelines, database: :ci) }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:ci_builds) { partitioned_table(:p_ci_builds, database: :ci) }
let(:scanner) { vulnerability_scanners.create!(project_id: project.id, name: 'Foo', external_id: 'foo') }
let(:namespace) { namespaces.create!(name: 'test', path: 'test', type: 'Group') }
......
......@@ -46,7 +46,12 @@
result = described_class.delete_by_project_id(project.id)
expect(result).to be(1)
expect { dashboard_project.reload }.to raise_exception(ActiveRecord::UnknownPrimaryKey)
if ::Gitlab.next_rails?
expect { dashboard_project.reload }.to raise_exception(ActiveRecord::RecordNotFound)
else
expect { dashboard_project.reload }.to raise_exception(ActiveRecord::UnknownPrimaryKey)
end
end
context 'when there is no record with the given project ID' do
......
......@@ -18,28 +18,28 @@ def clear_data_source_cache!(connection, table_name)
end
def primary_keys(connection, table_name)
super(connection, underlying_table(table_name))
super(connection, underlying_table(connection, table_name))
end
def columns(connection, table_name)
super(connection, underlying_table(table_name))
super(connection, underlying_table(connection, table_name))
end
def columns_hash(connection, table_name)
super(connection, underlying_table(table_name))
super(connection, underlying_table(connection, table_name))
end
def indexes(connection, table_name)
super(connection, underlying_table(table_name))
super(connection, underlying_table(connection, table_name))
end
private
def underlying_table(table_name)
renamed_tables_cache.fetch(table_name, table_name)
def underlying_table(connection, table_name)
renamed_tables_cache(connection).fetch(table_name, table_name)
end
def renamed_tables_cache
def renamed_tables_cache(connection)
@renamed_tables ||= Gitlab::Database::TABLES_TO_BE_RENAMED.select do |old_name, _new_name|
connection.view_exists?(old_name)
end
......
......@@ -17,7 +17,13 @@ def schema_directory
end
def versions_to_create
versions_from_database = @connection.schema_migration.all_versions
versions_from_database =
if ::Gitlab.next_rails?
@connection.schema_migration.versions
else
@connection.schema_migration.all_versions
end
versions_from_migration_files = @connection.migration_context.migrations.map { |m| m.version.to_s }
versions_from_database & versions_from_migration_files
......
......@@ -129,7 +129,10 @@ namespace :gitlab do
def insert_db_identifier(db_config)
ActiveRecord::Base.establish_connection(db_config) # rubocop: disable Database/EstablishConnection
if ActiveRecord::InternalMetadata.table_exists?
if ::Gitlab.next_rails?
internal_metadata = ActiveRecord::Base.connection.internal_metadata # rubocop: disable Database/MultipleDatabases
internal_metadata[DB_CONFIG_NAME_KEY] = db_config.name if internal_metadata.table_exists?
elsif ActiveRecord::InternalMetadata.table_exists?
ts = Time.zone.now
ActiveRecord::InternalMetadata.upsert(
......@@ -151,8 +154,15 @@ namespace :gitlab do
def get_db_identifier(db_config)
ActiveRecord::Base.establish_connection(db_config) # rubocop: disable Database/EstablishConnection
internal_metadata =
if ::Gitlab.next_rails?
ActiveRecord::Base.connection.internal_metadata # rubocop: disable Database/MultipleDatabases
else
ActiveRecord::InternalMetadata
end
# rubocop:disable Database/MultipleDatabases
if ActiveRecord::InternalMetadata.table_exists?
if internal_metadata.table_exists?
ActiveRecord::Base.connection.select_one(
DB_IDENTIFIER_WITH_DB_CONFIG_NAME_SQL, nil, [DB_CONFIG_NAME_KEY])
else
......
# frozen_string_literal: true
require 'spec_helper'
require 'rails/generators/testing/behaviour'
require 'rails/generators/testing/assertions'
if ::Gitlab.next_rails?
require 'rails/generators/testing/behavior'
else
require 'rails/generators/testing/behaviour'
end
RSpec.describe BatchedBackgroundMigration::BatchedBackgroundMigrationGenerator, feature_category: :database do
include Rails::Generators::Testing::Behaviour
include Rails::Generators::Testing::Assertions
......
......@@ -4,7 +4,8 @@
RSpec.describe Gitlab::BackgroundMigration::BackfillUpstreamPipelinePartitionIdOnPCiBuilds, feature_category: :continuous_integration do
let(:pipelines_table) { table(:ci_pipelines, database: :ci) { |t| t.primary_key = :id } }
let(:jobs_table) { table(:p_ci_builds, database: :ci) { |t| t.primary_key = :id } }
let(:jobs_table) { partitioned_table(:p_ci_builds, database: :ci) }
let!(:pipeline_1) { pipelines_table.create!(partition_id: 100, project_id: 1) }
let!(:pipeline_2) { pipelines_table.create!(partition_id: 100, project_id: 1) }
......
......@@ -67,6 +67,9 @@
before do
allow(connection).to receive_message_chain(:schema_migration, :all_versions).and_return(migrated_versions)
# Can be removed after Gitlab.next_rails?
allow(connection).to receive_message_chain(:schema_migration, :versions).and_return(migrated_versions)
migrations_struct = Struct.new(:version)
migrations = file_versions.map { |version| migrations_struct.new(version) }
allow(connection).to receive_message_chain(:migration_context, :migrations).and_return(migrations)
......
......@@ -100,7 +100,7 @@
queuing_entry runtime_metadata trace_metadata
dast_site_profile dast_scanner_profile stage_id dast_site_profiles_build
dast_scanner_profiles_build auto_canceled_by_partition_id execution_config_id execution_config
build_source].freeze
build_source id_value].freeze
end
before_all do
......
......@@ -23,11 +23,11 @@
end
create_table :_test_bulk_insert_items_with_composite_pk, id: false, force: true do |t|
t.integer :id, null: true
t.integer :instance_id, null: true
t.string :name, null: true
end
execute("ALTER TABLE _test_bulk_insert_items_with_composite_pk ADD PRIMARY KEY (id,name);")
execute("ALTER TABLE _test_bulk_insert_items_with_composite_pk ADD PRIMARY KEY (instance_id,name);")
end
end
......@@ -255,12 +255,14 @@ def self.invalid_list(count)
end
end
let(:new_object) { bulk_insert_items_with_composite_pk_class.new(id: 1, name: 'composite') }
let(:new_object) { bulk_insert_items_with_composite_pk_class.new(instance_id: 1, name: 'composite') }
it 'successfully inserts an item' do
expect(ActiveRecord::InsertAll).to receive(:new)
.with(
bulk_insert_items_with_composite_pk_class.insert_all_proxy_class, [new_object.as_json], on_duplicate: :raise, returning: false, unique_by: %w[id name]
bulk_insert_items_with_composite_pk_class.insert_all_proxy_class,
[new_object.as_json],
on_duplicate: :raise, returning: false, unique_by: %w[instance_id name]
).and_call_original
expect { bulk_insert_items_with_composite_pk_class.bulk_insert!([new_object]) }.to(
......
......@@ -3,17 +3,19 @@
require 'spec_helper'
RSpec.describe PartitionedTable do
describe '.partitioned_by' do
subject { my_class.partitioned_by(key, strategy: :monthly) }
subject { my_class.partitioned_by(key, strategy: :monthly) }
let(:key) { :foo }
let(:key) { :foo }
let(:my_class) do
Class.new do
include PartitionedTable
end
let(:my_class) do
Class.new(ActiveRecord::Base) do
self.table_name = :p_ci_builds
include PartitionedTable
end
end
describe '.partitioned_by' do
context 'with keyword arguments passed to the strategy' do
subject { my_class.partitioned_by(key, strategy: :monthly, retain_for: 3.months) }
......@@ -36,4 +38,14 @@
expect(my_class.partitioning_strategy.partitioning_key).to eq(key)
end
end
describe 'self._returning_columns_for_insert' do
it 'identifies the columns that are returned on insert' do
expect(my_class._returning_columns_for_insert).to eq(Array.wrap(my_class.primary_key))
end
it 'allows creating a partitionable record' do
expect { create(:ci_build) }.not_to raise_error
end
end
end
......@@ -26,9 +26,20 @@
describe '#execute' do
subject(:execute) { service.execute }
def versions
schema_table =
if ::Gitlab.next_rails?
connection.schema_migration
else
ActiveRecord::SchemaMigration
end
schema_table.where(version: version).count
end
it 'marks the migration as successful' do
expect { execute }
.to change { ActiveRecord::SchemaMigration.where(version: version).count }
.to change { versions }
.by(1)
is_expected.to be_success
......@@ -42,7 +53,7 @@
it 'does not insert records' do
expect { execute }
.not_to change { ActiveRecord::SchemaMigration.where(version: version).count }
.not_to change { versions }
end
end
......@@ -56,7 +67,7 @@
it 'does not insert records' do
expect { execute }
.not_to change { ActiveRecord::SchemaMigration.where(version: version).count }
.not_to change { versions }
end
end
......
......@@ -51,8 +51,13 @@
FOR VALUES IN (101);
SQL
table("_test_target_table").create!(id: 1, parent_id: deleted_id, partition_id: 100)
table("_test_target_table").create!(id: 2, parent_id: deleted_id, partition_id: 101)
if ::Gitlab.next_rails?
table("_test_target_table").create!(id: [1, 100], parent_id: deleted_id)
table("_test_target_table").create!(id: [2, 101], parent_id: deleted_id)
else
table("_test_target_table").create!(id: 1, parent_id: deleted_id, partition_id: 100)
table("_test_target_table").create!(id: 2, parent_id: deleted_id, partition_id: 101)
end
end
describe 'query generation' do
......
......@@ -25,9 +25,16 @@ def duplicate_indexes
end
def self.btree_index_struct(index)
columns =
if ::Gitlab.next_rails?
Array.wrap(index.columns) + Array.wrap(index.include)
else
Array.wrap(index.columns)
end
BTREE_INDEX_STRUCT.new(
index.name,
Array.wrap(index.columns).map do |column|
columns.map do |column|
# https://apidock.com/rails/ActiveRecord/ConnectionAdapters/PostgreSQL/SchemaStatements/indexes
# asc is the default order
column_order = index.orders.is_a?(Symbol) ? index.orders : (index.orders[column] || :asc)
......
0% 加载中 .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册