# frozen_string_literal: true require 'spec_helper' RSpec.describe Gitlab::Database::MigrationHelpers do include Database::TableSchemaHelpers include Database::TriggerHelpers let(:model) do ActiveRecord::Migration.new.extend(described_class) end before do allow(model).to receive(:puts) end describe 'overridden dynamic model helpers' do let(:test_table) { '_test_batching_table' } before do model.connection.execute(<<~SQL) CREATE TABLE #{test_table} ( id integer NOT NULL PRIMARY KEY, name text NOT NULL ); INSERT INTO #{test_table} (id, name) VALUES (1, 'bob'), (2, 'mary'), (3, 'amy'); SQL end describe '#define_batchable_model' do it 'defines a batchable model with the migration connection' do expect(model.define_batchable_model(test_table).count).to eq(3) end end describe '#each_batch' do before do allow(model).to receive(:transaction_open?).and_return(false) end it 'calls each_batch with the migration connection' do each_batch_name = ->(&block) do model.each_batch(test_table, of: 2) do |batch| block.call(batch.pluck(:name)) end end expect { |b| each_batch_name.call(&b) }.to yield_successive_args(%w[bob mary], %w[amy]) end end describe '#each_batch_range' do before do allow(model).to receive(:transaction_open?).and_return(false) end it 'calls each_batch with the migration connection' do expect { |b| model.each_batch_range(test_table, of: 2, &b) }.to yield_successive_args([1, 2], [3, 3]) end end end describe '#remove_timestamps' do it 'can remove the default timestamps' do Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name| expect(model).to receive(:remove_column).with(:foo, column_name) end model.remove_timestamps(:foo) end it 'can remove custom timestamps' do expect(model).to receive(:remove_column).with(:foo, :bar) model.remove_timestamps(:foo, columns: [:bar]) end end describe '#add_timestamps_with_timezone' do it 'adds "created_at" and "updated_at" fields with the "datetime_with_timezone" data type' do Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name| expect(model).to receive(:add_column) .with(:foo, column_name, :datetime_with_timezone, { default: nil, null: false }) end model.add_timestamps_with_timezone(:foo) end it 'can disable the NOT NULL constraint' do Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name| expect(model).to receive(:add_column) .with(:foo, column_name, :datetime_with_timezone, { default: nil, null: true }) end model.add_timestamps_with_timezone(:foo, null: true) end it 'can add just one column' do expect(model).to receive(:add_column).with(:foo, :created_at, :datetime_with_timezone, anything) expect(model).not_to receive(:add_column).with(:foo, :updated_at, :datetime_with_timezone, anything) model.add_timestamps_with_timezone(:foo, columns: [:created_at]) end it 'can add choice of acceptable columns' do expect(model).to receive(:add_column).with(:foo, :created_at, :datetime_with_timezone, anything) expect(model).to receive(:add_column).with(:foo, :deleted_at, :datetime_with_timezone, anything) expect(model).to receive(:add_column).with(:foo, :processed_at, :datetime_with_timezone, anything) expect(model).not_to receive(:add_column).with(:foo, :updated_at, :datetime_with_timezone, anything) model.add_timestamps_with_timezone(:foo, columns: [:created_at, :deleted_at, :processed_at]) end it 'cannot add unacceptable column names' do expect do model.add_timestamps_with_timezone(:foo, columns: [:bar]) end.to raise_error %r/Illegal timestamp column name/ end end describe '#create_table_with_constraints' do let(:table_name) { :test_table } let(:column_attributes) do [ { name: 'id', sql_type: 'bigint', null: false, default: nil }, { name: 'created_at', sql_type: 'timestamp with time zone', null: false, default: nil }, { name: 'updated_at', sql_type: 'timestamp with time zone', null: false, default: nil }, { name: 'some_id', sql_type: 'integer', null: false, default: nil }, { name: 'active', sql_type: 'boolean', null: false, default: 'true' }, { name: 'name', sql_type: 'text', null: true, default: nil } ] end before do allow(model).to receive(:transaction_open?).and_return(true) end context 'when no check constraints are defined' do it 'creates the table as expected' do model.create_table_with_constraints table_name do |t| t.timestamps_with_timezone t.integer :some_id, null: false t.boolean :active, null: false, default: true t.text :name end expect_table_columns_to_match(column_attributes, table_name) end end context 'when check constraints are defined' do context 'when the text_limit is explicity named' do it 'creates the table as expected' do model.create_table_with_constraints table_name do |t| t.timestamps_with_timezone t.integer :some_id, null: false t.boolean :active, null: false, default: true t.text :name t.text_limit :name, 255, name: 'check_name_length' t.check_constraint :some_id_is_positive, 'some_id > 0' end expect_table_columns_to_match(column_attributes, table_name) expect_check_constraint(table_name, 'check_name_length', 'char_length(name) <= 255') expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0') end end context 'when the text_limit is not named' do it 'creates the table as expected, naming the text limit' do model.create_table_with_constraints table_name do |t| t.timestamps_with_timezone t.integer :some_id, null: false t.boolean :active, null: false, default: true t.text :name t.text_limit :name, 255 t.check_constraint :some_id_is_positive, 'some_id > 0' end expect_table_columns_to_match(column_attributes, table_name) expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255') expect_check_constraint(table_name, 'some_id_is_positive', 'some_id > 0') end end it 'runs the change within a with_lock_retries' do expect(model).to receive(:with_lock_retries).ordered.and_yield expect(model).to receive(:create_table).ordered.and_call_original expect(model).to receive(:execute).with(<<~SQL).ordered ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255) SQL model.create_table_with_constraints table_name do |t| t.text :name t.text_limit :name, 255 end end context 'when with_lock_retries re-runs the block' do it 'only creates constraint for unique definitions' do expected_sql = <<~SQL ALTER TABLE "#{table_name}"\nADD CONSTRAINT "check_cda6f69506" CHECK (char_length("name") <= 255) SQL expect(model).to receive(:create_table).twice.and_call_original expect(model).to receive(:execute).with(expected_sql).and_raise(ActiveRecord::LockWaitTimeout) expect(model).to receive(:execute).with(expected_sql).and_call_original model.create_table_with_constraints table_name do |t| t.timestamps_with_timezone t.integer :some_id, null: false t.boolean :active, null: false, default: true t.text :name t.text_limit :name, 255 end expect_table_columns_to_match(column_attributes, table_name) expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 255') end end context 'when constraints are given invalid names' do let(:expected_max_length) { described_class::MAX_IDENTIFIER_NAME_LENGTH } let(:expected_error_message) { "The maximum allowed constraint name is #{expected_max_length} characters" } context 'when the explicit text limit name is not valid' do it 'raises an error' do too_long_length = expected_max_length + 1 expect do model.create_table_with_constraints table_name do |t| t.timestamps_with_timezone t.integer :some_id, null: false t.boolean :active, null: false, default: true t.text :name t.text_limit :name, 255, name: ('a' * too_long_length) t.check_constraint :some_id_is_positive, 'some_id > 0' end end.to raise_error(expected_error_message) end end context 'when a check constraint name is not valid' do it 'raises an error' do too_long_length = expected_max_length + 1 expect do model.create_table_with_constraints table_name do |t| t.timestamps_with_timezone t.integer :some_id, null: false t.boolean :active, null: false, default: true t.text :name t.text_limit :name, 255 t.check_constraint ('a' * too_long_length), 'some_id > 0' end end.to raise_error(expected_error_message) end end end end end describe '#add_concurrent_index' do context 'outside a transaction' do before do allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:disable_statement_timeout).and_call_original end it 'creates the index concurrently' do expect(model).to receive(:add_index) .with(:users, :foo, algorithm: :concurrently) model.add_concurrent_index(:users, :foo) end it 'creates unique index concurrently' do expect(model).to receive(:add_index) .with(:users, :foo, { algorithm: :concurrently, unique: true }) model.add_concurrent_index(:users, :foo, unique: true) end context 'when the index exists and is valid' do before do model.add_index :users, :id, unique: true end it 'does leaves the existing index' do expect(model).to receive(:index_exists?) .with(:users, :id, { algorithm: :concurrently, unique: true }).and_call_original expect(model).not_to receive(:remove_index) expect(model).not_to receive(:add_index) model.add_concurrent_index(:users, :id, unique: true) end end context 'when an invalid copy of the index exists' do before do model.add_index :users, :id, unique: true, name: index_name model.connection.execute(<<~SQL) UPDATE pg_index SET indisvalid = false WHERE indexrelid = '#{index_name}'::regclass SQL end context 'when the default name is used' do let(:index_name) { model.index_name(:users, :id) } it 'drops and recreates the index' do expect(model).to receive(:index_exists?) .with(:users, :id, { algorithm: :concurrently, unique: true }).and_call_original expect(model).to receive(:index_invalid?).with(index_name, schema: nil).and_call_original expect(model).to receive(:remove_concurrent_index_by_name).with(:users, index_name) expect(model).to receive(:add_index) .with(:users, :id, { algorithm: :concurrently, unique: true }) model.add_concurrent_index(:users, :id, unique: true) end end context 'when a custom name is used' do let(:index_name) { 'my_test_index' } it 'drops and recreates the index' do expect(model).to receive(:index_exists?) .with(:users, :id, { algorithm: :concurrently, unique: true, name: index_name }).and_call_original expect(model).to receive(:index_invalid?).with(index_name, schema: nil).and_call_original expect(model).to receive(:remove_concurrent_index_by_name).with(:users, index_name) expect(model).to receive(:add_index) .with(:users, :id, { algorithm: :concurrently, unique: true, name: index_name }) model.add_concurrent_index(:users, :id, unique: true, name: index_name) end end context 'when a qualified table name is used' do let(:other_schema) { 'foo_schema' } let(:index_name) { 'my_test_index' } let(:table_name) { "#{other_schema}.users" } before do model.connection.execute(<<~SQL) CREATE SCHEMA #{other_schema}; ALTER TABLE users SET SCHEMA #{other_schema}; SQL end it 'drops and recreates the index' do expect(model).to receive(:index_exists?) .with(table_name, :id, { algorithm: :concurrently, unique: true, name: index_name }).and_call_original expect(model).to receive(:index_invalid?).with(index_name, schema: other_schema).and_call_original expect(model).to receive(:remove_concurrent_index_by_name).with(table_name, index_name) expect(model).to receive(:add_index) .with(table_name, :id, { algorithm: :concurrently, unique: true, name: index_name }) model.add_concurrent_index(table_name, :id, unique: true, name: index_name) end end end it 'unprepares the async index creation' do expect(model).to receive(:add_index) .with(:users, :foo, algorithm: :concurrently) expect(model).to receive(:unprepare_async_index) .with(:users, :foo, algorithm: :concurrently) model.add_concurrent_index(:users, :foo) end context 'when targeting a partition table' do let(:schema) { 'public' } let(:name) { '_test_partition_01' } let(:identifier) { "#{schema}.#{name}" } before do model.execute(<<~SQL) CREATE TABLE public._test_partitioned_table ( id serial NOT NULL, partition_id serial NOT NULL, PRIMARY KEY (id, partition_id) ) PARTITION BY LIST(partition_id); CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table FOR VALUES IN (1); SQL end context 'when allow_partition is true' do it 'creates the index concurrently' do expect(model).to receive(:add_index).with(:_test_partition_01, :foo, algorithm: :concurrently) model.add_concurrent_index(:_test_partition_01, :foo, allow_partition: true) end end context 'when allow_partition is not provided' do it 'raises ArgumentError' do expect { model.add_concurrent_index(:_test_partition_01, :foo) } .to raise_error(ArgumentError, /use add_concurrent_partitioned_index/) end end end end context 'inside a transaction' do it 'raises RuntimeError' do expect(model).to receive(:transaction_open?).and_return(true) expect { model.add_concurrent_index(:users, :foo) } .to raise_error(RuntimeError) end end end describe '#remove_concurrent_index' do context 'outside a transaction' do before do allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:index_exists?).and_return(true) allow(model).to receive(:disable_statement_timeout).and_call_original end describe 'by column name' do it 'removes the index concurrently' do expect(model).to receive(:remove_index) .with(:users, { algorithm: :concurrently, column: :foo }) model.remove_concurrent_index(:users, :foo) end it 'does nothing if the index does not exist' do expect(model).to receive(:index_exists?) .with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(false) expect(model).not_to receive(:remove_index) model.remove_concurrent_index(:users, :foo, unique: true) end it 'unprepares the async index creation' do expect(model).to receive(:remove_index) .with(:users, { algorithm: :concurrently, column: :foo }) expect(model).to receive(:unprepare_async_index) .with(:users, :foo, { algorithm: :concurrently }) model.remove_concurrent_index(:users, :foo) end context 'when targeting a partition table' do let(:schema) { 'public' } let(:partition_table_name) { '_test_partition_01' } let(:identifier) { "#{schema}.#{partition_table_name}" } let(:index_name) { '_test_partitioned_index' } let(:partition_index_name) { '_test_partition_01_partition_id_idx' } let(:column_name) { 'partition_id' } before do model.execute(<<~SQL) CREATE TABLE public._test_partitioned_table ( id serial NOT NULL, partition_id serial NOT NULL, PRIMARY KEY (id, partition_id) ) PARTITION BY LIST(partition_id); CREATE INDEX #{index_name} ON public._test_partitioned_table(#{column_name}); CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table FOR VALUES IN (1); SQL end context 'when dropping an index on the partition table' do it 'raises ArgumentError' do expect { model.remove_concurrent_index(partition_table_name, column_name) } .to raise_error(ArgumentError, /use remove_concurrent_partitioned_index_by_name/) end end end describe 'by index name' do before do allow(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(true) end it 'removes the index concurrently by index name' do expect(model).to receive(:remove_index) .with(:users, { algorithm: :concurrently, name: "index_x_by_y" }) model.remove_concurrent_index_by_name(:users, "index_x_by_y") end it 'does nothing if the index does not exist' do expect(model).to receive(:index_exists_by_name?).with(:users, "index_x_by_y").and_return(false) expect(model).not_to receive(:remove_index) model.remove_concurrent_index_by_name(:users, "index_x_by_y") end it 'removes the index with keyword arguments' do expect(model).to receive(:remove_index) .with(:users, { algorithm: :concurrently, name: "index_x_by_y" }) model.remove_concurrent_index_by_name(:users, name: "index_x_by_y") end it 'raises an error if the index is blank' do expect do model.remove_concurrent_index_by_name(:users, wrong_key: "index_x_by_y") end.to raise_error 'remove_concurrent_index_by_name must get an index name as the second argument' end it 'unprepares the async index creation' do expect(model).to receive(:remove_index) .with(:users, { algorithm: :concurrently, name: "index_x_by_y" }) expect(model).to receive(:unprepare_async_index_by_name) .with(:users, "index_x_by_y", { algorithm: :concurrently }) model.remove_concurrent_index_by_name(:users, "index_x_by_y") end context 'when targeting a partition table' do let(:schema) { 'public' } let(:partition_table_name) { '_test_partition_01' } let(:identifier) { "#{schema}.#{partition_table_name}" } let(:index_name) { '_test_partitioned_index' } let(:partition_index_name) { '_test_partition_01_partition_id_idx' } before do model.execute(<<~SQL) CREATE TABLE public._test_partitioned_table ( id serial NOT NULL, partition_id serial NOT NULL, PRIMARY KEY (id, partition_id) ) PARTITION BY LIST(partition_id); CREATE INDEX #{index_name} ON public._test_partitioned_table(partition_id); CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table FOR VALUES IN (1); SQL end context 'when dropping an index on the partition table' do it 'raises ArgumentError' do expect { model.remove_concurrent_index_by_name(partition_table_name, partition_index_name) } .to raise_error(ArgumentError, /use remove_concurrent_partitioned_index_by_name/) end end end end end end context 'inside a transaction' do it 'raises RuntimeError' do expect(model).to receive(:transaction_open?).and_return(true) expect { model.remove_concurrent_index(:users, :foo) } .to raise_error(RuntimeError) end end end describe '#remove_foreign_key_if_exists' do context 'when the foreign key does not exist' do before do allow(model).to receive(:foreign_key_exists?).and_return(false) end it 'does nothing' do expect(model).not_to receive(:remove_foreign_key) model.remove_foreign_key_if_exists(:projects, :users, column: :user_id) end end context 'when the foreign key exists' do before do allow(model).to receive(:foreign_key_exists?).and_return(true) end it 'removes the foreign key' do expect(model).to receive(:remove_foreign_key).with(:projects, :users, { column: :user_id }) model.remove_foreign_key_if_exists(:projects, :users, column: :user_id) end context 'when the target table is not given' do it 'passes the options as the second parameter' do expect(model).to receive(:remove_foreign_key).with(:projects, { column: :user_id }) model.remove_foreign_key_if_exists(:projects, column: :user_id) end end context 'when the reverse_lock_order option is given' do it 'requests for lock before removing the foreign key' do expect(model).to receive(:transaction_open?).and_return(true) expect(model).to receive(:execute).with(/LOCK TABLE users, projects/) expect(model).not_to receive(:remove_foreign_key).with(:projects, :users) model.remove_foreign_key_if_exists(:projects, :users, column: :user_id, reverse_lock_order: true) end context 'when not inside a transaction' do it 'does not lock' do expect(model).to receive(:transaction_open?).and_return(false) expect(model).not_to receive(:execute).with(/LOCK TABLE users, projects/) expect(model).to receive(:remove_foreign_key).with(:projects, :users, { column: :user_id }) model.remove_foreign_key_if_exists(:projects, :users, column: :user_id, reverse_lock_order: true) end end end end end describe '#add_concurrent_foreign_key' do before do allow(model).to receive(:foreign_key_exists?).and_return(false) end context 'inside a transaction' do it 'raises an error' do expect(model).to receive(:transaction_open?).and_return(true) expect do model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end.to raise_error(RuntimeError) end end context 'outside a transaction' do before do allow(model).to receive(:transaction_open?).and_return(false) end context 'target column' do it 'defaults to (id) when no custom target column is provided' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with(/REFERENCES users \(id\)/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end it 'references the custom taget column when provided' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with(/REFERENCES users \(id_convert_to_bigint\)/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, target_column: :id_convert_to_bigint) end end context 'ON DELETE statements' do context 'on_delete: :nullify' do it 'appends ON DELETE SET NULL statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with(/ON DELETE SET NULL/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, on_delete: :nullify) end end context 'on_delete: :cascade' do it 'appends ON DELETE CASCADE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with(/ON DELETE CASCADE/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, on_delete: :cascade) end end context 'on_delete: nil' do it 'appends no ON DELETE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).not_to receive(:execute).with(/ON DELETE/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, on_delete: nil) end end end context 'ON UPDATE statements' do context 'on_update: :nullify' do it 'appends ON UPDATE SET NULL statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with(/ON UPDATE SET NULL/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, on_update: :nullify) end end context 'on_update: :cascade' do it 'appends ON UPDATE CASCADE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with(/ON UPDATE CASCADE/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, on_update: :cascade) end end context 'on_update: nil' do it 'appends no ON UPDATE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).not_to receive(:execute).with(/ON UPDATE/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, on_update: nil) end end context 'when on_update is not provided' do it 'appends no ON UPDATE statement' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).not_to receive(:execute).with(/ON UPDATE/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end end end context 'when no custom key name is supplied' do it 'creates a concurrent foreign key and validates it' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end it 'does not create a foreign key if it exists already' do name = model.concurrent_foreign_key_name(:projects, :user_id) expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id, on_update: nil, on_delete: :cascade, name: name, primary_key: :id).and_return(true) expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/) expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id) end end context 'when a custom key name is supplied' do context 'for creating a new foreign key for a column that does not presently exist' do it 'creates a new foreign key' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo) end end context 'for creating a duplicate foreign key for a column that presently exists' do context 'when the supplied key name is the same as the existing foreign key name' do it 'does not create a new foreign key' do expect(model).to receive(:foreign_key_exists?).with(:projects, :users, name: :foo, primary_key: :id, on_update: nil, on_delete: :cascade, column: :user_id).and_return(true) expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/) expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo) end end context 'when the supplied key name is different from the existing foreign key name' do it 'creates a new foreign key' do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+bar/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :bar) end end end end describe 'validate option' do let(:args) { [:projects, :users] } let(:options) { { column: :user_id, on_delete: nil } } context 'when validate is supplied with a falsey value' do it_behaves_like 'skips validation', validate: false it_behaves_like 'skips validation', validate: nil end context 'when validate is supplied with a truthy value' do it_behaves_like 'performs validation', validate: true it_behaves_like 'performs validation', validate: :whatever end context 'when validate is not supplied' do it_behaves_like 'performs validation', {} end end context 'when the reverse_lock_order flag is set' do it 'explicitly locks the tables in target-source order', :aggregate_failures do expect(model).to receive(:with_lock_retries).and_call_original expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) expect(model).to receive(:execute).with('LOCK TABLE users, projects IN SHARE ROW EXCLUSIVE MODE') expect(model).to receive(:execute).with(/REFERENCES users \(id\)/) model.add_concurrent_foreign_key(:projects, :users, column: :user_id, reverse_lock_order: true) end end context 'when creating foreign key for a group of columns' do it 'references the custom target columns when provided', :aggregate_failures do expect(model).to receive(:with_lock_retries).and_yield expect(model).to receive(:execute).with( "ALTER TABLE projects\n" \ "ADD CONSTRAINT fk_multiple_columns\n" \ "FOREIGN KEY \(partition_number, user_id\)\n" \ "REFERENCES users \(partition_number, id\)\n" \ "ON UPDATE CASCADE\n" \ "ON DELETE CASCADE\n" \ "NOT VALID;\n" ) model.add_concurrent_foreign_key( :projects, :users, column: [:partition_number, :user_id], target_column: [:partition_number, :id], validate: false, name: :fk_multiple_columns, on_update: :cascade ) end context 'when foreign key is already defined' do before do expect(model).to receive(:foreign_key_exists?).with( :projects, :users, { column: [:partition_number, :user_id], name: :fk_multiple_columns, on_update: :cascade, on_delete: :cascade, primary_key: [:partition_number, :id] } ).and_return(true) end it 'does not create foreign key', :aggregate_failures do expect(model).not_to receive(:with_lock_retries).and_yield expect(model).not_to receive(:execute).with(/FOREIGN KEY/) model.add_concurrent_foreign_key( :projects, :users, column: [:partition_number, :user_id], target_column: [:partition_number, :id], on_update: :cascade, validate: false, name: :fk_multiple_columns ) end end end end end describe '#validate_foreign_key' do context 'when name is provided' do it 'does not infer the foreign key constraint name' do expect(model).to receive(:foreign_key_exists?).with(:projects, name: :foo).and_return(true) aggregate_failures do expect(model).not_to receive(:concurrent_foreign_key_name) expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) end model.validate_foreign_key(:projects, :user_id, name: :foo) end end context 'when name is not provided' do it 'infers the foreign key constraint name' do expect(model).to receive(:foreign_key_exists?).with(:projects, name: anything).and_return(true) aggregate_failures do expect(model).to receive(:concurrent_foreign_key_name) expect(model).to receive(:disable_statement_timeout).and_call_original expect(model).to receive(:statement_timeout_disabled?).and_return(false) expect(model).to receive(:execute).with(/SET statement_timeout TO/) expect(model).to receive(:execute).ordered.with(/ALTER TABLE projects VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/) end model.validate_foreign_key(:projects, :user_id) end context 'when the inferred foreign key constraint does not exist' do it 'raises an error' do expect(model).to receive(:foreign_key_exists?).and_return(false) error_message = /Could not find foreign key "fk_name" on table "projects"/ expect { model.validate_foreign_key(:projects, :user_id, name: :fk_name) }.to raise_error(error_message) end end end end describe '#concurrent_foreign_key_name' do it 'returns the name for a foreign key' do name = model.concurrent_foreign_key_name(:this_is_a_very_long_table_name, :with_a_very_long_column_name) expect(name).to be_an_instance_of(String) expect(name.length).to eq(13) end context 'when using multiple columns' do it 'returns the name of the foreign key', :aggregate_failures do result = model.concurrent_foreign_key_name(:table_name, [:partition_number, :id]) expect(result).to be_an_instance_of(String) expect(result.length).to eq(13) end end end describe '#foreign_key_exists?' do before do model.connection.execute(<<~SQL) create table referenced ( id bigserial primary key not null ); create table referencing ( id bigserial primary key not null, non_standard_id bigint not null, constraint fk_referenced foreign key (non_standard_id) references referenced(id) on delete cascade ); SQL end shared_examples_for 'foreign key checks' do it 'finds existing foreign keys by column' do expect(model.foreign_key_exists?(:referencing, target_table, column: :non_standard_id)).to be_truthy end it 'finds existing foreign keys by name' do expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced)).to be_truthy end it 'finds existing foreign_keys by name and column' do expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id)).to be_truthy end it 'finds existing foreign_keys by name, column and on_delete' do expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :cascade)).to be_truthy end it 'finds existing foreign keys by target table only' do expect(model.foreign_key_exists?(:referencing, target_table)).to be_truthy end it 'compares by column name if given' do expect(model.foreign_key_exists?(:referencing, target_table, column: :user_id)).to be_falsey end it 'compares by target column name if given' do expect(model.foreign_key_exists?(:referencing, target_table, primary_key: :user_id)).to be_falsey expect(model.foreign_key_exists?(:referencing, target_table, primary_key: :id)).to be_truthy end it 'compares by foreign key name if given' do expect(model.foreign_key_exists?(:referencing, target_table, name: :non_existent_foreign_key_name)).to be_falsey end it 'compares by foreign key name and column if given' do expect(model.foreign_key_exists?(:referencing, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey end it 'compares by foreign key name, column and on_delete if given' do expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :nullify)).to be_falsey end end context 'without specifying a target table' do let(:target_table) { nil } it_behaves_like 'foreign key checks' end context 'specifying a target table' do let(:target_table) { :referenced } it_behaves_like 'foreign key checks' end it 'compares by target table if no column given' do expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey end it 'raises an error if an invalid on_delete is specified' do # The correct on_delete key is "nullify" expect { model.foreign_key_exists?(:referenced, on_delete: :set_null) }.to raise_error(ArgumentError) end context 'with foreign key using multiple columns' do before do model.connection.execute(<<~SQL) create table p_referenced ( id bigserial not null, partition_number bigint not null default 100, primary key (partition_number, id) ); create table p_referencing ( id bigserial primary key not null, partition_number bigint not null, constraint fk_partitioning foreign key (partition_number, id) references p_referenced(partition_number, id) on delete cascade ); SQL end it 'finds existing foreign keys by columns' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, column: [:partition_number, :id])).to be_truthy end it 'finds existing foreign keys by name' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning)).to be_truthy end it 'finds existing foreign_keys by name and column' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id])).to be_truthy end it 'finds existing foreign_keys by name, column and on_delete' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy end it 'finds existing foreign keys by target table only' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced)).to be_truthy end it 'compares by column name if given' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, column: :id)).to be_falsey end it 'compares by target column name if given' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, primary_key: :user_id)).to be_falsey expect(model.foreign_key_exists?(:p_referencing, :p_referenced, primary_key: [:partition_number, :id])).to be_truthy end it 'compares by foreign key name if given' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :non_existent_foreign_key_name)).to be_falsey end it 'compares by foreign key name and column if given' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey end it 'compares by foreign key name, column and on_delete if given' do expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey end end end describe '#true_value' do it 'returns the appropriate value' do expect(model.true_value).to eq("'t'") end end describe '#false_value' do it 'returns the appropriate value' do expect(model.false_value).to eq("'f'") end end describe '#update_column_in_batches' do context 'when running outside of a transaction' do before do expect(model).to receive(:transaction_open?).and_return(false) create_list(:project, 5) end it 'updates all the rows in a table' do model.update_column_in_batches(:projects, :description_html, 'foo') expect(Project.where(description_html: 'foo').count).to eq(5) end it 'updates boolean values correctly' do model.update_column_in_batches(:projects, :archived, true) expect(Project.where(archived: true).count).to eq(5) end context 'when a block is supplied' do it 'yields an Arel table and query object to the supplied block' do first_id = Project.first.id model.update_column_in_batches(:projects, :archived, true) do |t, query| query.where(t[:id].eq(first_id)) end expect(Project.where(archived: true).count).to eq(1) end end context 'when the value is Arel.sql (Arel::Nodes::SqlLiteral)' do it 'updates the value as a SQL expression' do model.update_column_in_batches(:projects, :star_count, Arel.sql('1+1')) expect(Project.sum(:star_count)).to eq(2 * Project.count) end end context 'when the table is write-locked' do let(:test_table) { '_test_table' } let(:lock_writes_manager) do Gitlab::Database::LockWritesManager.new( table_name: test_table, connection: model.connection, database_name: 'main', with_retries: false ) end before do model.connection.execute(<<~SQL) CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0); INSERT INTO #{test_table} (id, value) VALUES (1, 1), (2, 2), (3, 3) SQL lock_writes_manager.lock_writes end it 'disables the write-lock trigger function' do expect do model.update_column_in_batches(test_table, :value, Arel.sql('1+1'), disable_lock_writes: true) end.not_to raise_error end it 'raises an error if it does not disable the trigger function' do expect do model.update_column_in_batches(test_table, :value, Arel.sql('1+1'), disable_lock_writes: false) end.to raise_error(ActiveRecord::StatementInvalid, /Table: "#{test_table}" is write protected/) end end end context 'when running inside the transaction' do it 'raises RuntimeError' do expect(model).to receive(:transaction_open?).and_return(true) expect do model.update_column_in_batches(:projects, :star_count, Arel.sql('1+1')) end.to raise_error(RuntimeError) end end end describe '#rename_column_concurrently' do context 'in a transaction' do it 'raises RuntimeError' do allow(model).to receive(:transaction_open?).and_return(true) expect { model.rename_column_concurrently(:users, :old, :new) } .to raise_error(RuntimeError) end end context 'outside a transaction' do let(:old_column) do double(:column, type: :integer, limit: 8, default: 0, null: false, precision: 5, scale: 1) end let(:trigger_name) { model.rename_trigger_name(:users, :old, :new) } before do allow(model).to receive(:transaction_open?).and_return(false) end context 'when the column to rename exists' do before do allow(model).to receive(:column_for).and_return(old_column) end it 'renames a column concurrently' do expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:install_rename_triggers) .with(:users, :old, :new) expect(model).to receive(:add_column) .with(:users, :new, :integer, limit: old_column.limit, precision: old_column.precision, scale: old_column.scale) expect(model).to receive(:change_column_default) .with(:users, :new, old_column.default) expect(model).to receive(:update_column_in_batches) expect(model).to receive(:add_not_null_constraint).with(:users, :new) expect(model).to receive(:copy_indexes).with(:users, :old, :new) expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new) expect(model).to receive(:copy_check_constraints).with(:users, :old, :new) model.rename_column_concurrently(:users, :old, :new) end context 'with existing records and type casting' do let(:trigger_name) { model.rename_trigger_name(:users, :id, :new) } let(:user) { create(:user) } let(:copy_trigger) { double('copy trigger') } let(:connection) { ActiveRecord::Migration.connection } before do expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table) .with(:users, connection: connection).and_return(copy_trigger) end it 'copies the value to the new column using the type_cast_function', :aggregate_failures do expect(model).to receive(:copy_indexes).with(:users, :id, :new) expect(model).to receive(:add_not_null_constraint).with(:users, :new) expect(model).to receive(:execute).with("SELECT set_config('lock_writes.users', 'false', true)") expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}") expect(copy_trigger).to receive(:create).with(:id, :new, trigger_name: nil) model.rename_column_concurrently(:users, :id, :new, type_cast_function: 'cast_to_jsonb_with_default') end end it 'passes the batch_column_name' do expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true) expect(model).to receive(:check_trigger_permissions!).and_return(true) expect(model).to receive(:create_column_from).with( :users, :old, :new, type: nil, batch_column_name: :other_batch_column, type_cast_function: nil ).and_return(true) expect(model).to receive(:install_rename_triggers).and_return(true) model.rename_column_concurrently(:users, :old, :new, batch_column_name: :other_batch_column) end it 'passes the type_cast_function' do expect(model).to receive(:create_column_from).with( :users, :old, :new, type: nil, batch_column_name: :id, type_cast_function: 'JSON' ).and_return(true) model.rename_column_concurrently(:users, :old, :new, type_cast_function: 'JSON') end it 'raises an error with invalid batch_column_name' do expect do model.rename_column_concurrently(:users, :old, :new, batch_column_name: :invalid) end.to raise_error(RuntimeError, /Column invalid does not exist on users/) end context 'when default is false' do let(:old_column) do double(:column, type: :boolean, limit: nil, default: false, null: false, precision: nil, scale: nil) end it 'copies the default to the new column' do expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield expect(model).to receive(:change_column_default) .with(:users, :new, old_column.default) expect(model).to receive(:copy_check_constraints) .with(:users, :old, :new) model.rename_column_concurrently(:users, :old, :new) end end end context 'when the table in the other database is write-locked' do let(:test_table) { '_test_table' } let(:lock_writes_manager) do Gitlab::Database::LockWritesManager.new( table_name: test_table, connection: model.connection, database_name: 'main', with_retries: false ) end before do model.connection.execute(<<~SQL) CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0); INSERT INTO #{test_table} (id, value) VALUES (1, 1), (2, 2), (3, 3) SQL lock_writes_manager.lock_writes end it 'does not raise an error when renaming the column' do expect do model.rename_column_concurrently(test_table, :value, :new_value) end.not_to raise_error end end context 'when the column to be renamed does not exist' do before do allow(model).to receive(:columns).and_return([]) end it 'raises an error with appropriate message' do expect(model).to receive(:check_trigger_permissions!).with(:users) error_message = /Could not find column "missing_column" on table "users"/ expect { model.rename_column_concurrently(:users, :missing_column, :new) }.to raise_error(error_message) end end end end describe '#undo_rename_column_concurrently' do it 'reverses the operations of rename_column_concurrently' do expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:remove_rename_triggers) .with(:users, /trigger_.{12}/) expect(model).to receive(:remove_column).with(:users, :new) model.undo_rename_column_concurrently(:users, :old, :new) end end describe '#cleanup_concurrent_column_rename' do it 'cleans up the renaming procedure' do expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:remove_rename_triggers) .with(:users, /trigger_.{12}/) expect(model).to receive(:remove_column).with(:users, :old) model.cleanup_concurrent_column_rename(:users, :old, :new) end end describe '#undo_cleanup_concurrent_column_rename' do context 'in a transaction' do it 'raises RuntimeError' do allow(model).to receive(:transaction_open?).and_return(true) expect { model.undo_cleanup_concurrent_column_rename(:users, :old, :new) } .to raise_error(RuntimeError) end end context 'outside a transaction' do let(:new_column) do double(:column, type: :integer, limit: 8, default: 0, null: false, precision: 5, scale: 1) end let(:trigger_name) { model.rename_trigger_name(:users, :old, :new) } before do allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:column_for).and_return(new_column) end it 'reverses the operations of cleanup_concurrent_column_rename' do expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:install_rename_triggers) .with(:users, :old, :new) expect(model).to receive(:add_column) .with(:users, :old, :integer, limit: new_column.limit, precision: new_column.precision, scale: new_column.scale) expect(model).to receive(:change_column_default) .with(:users, :old, new_column.default) expect(model).to receive(:update_column_in_batches) expect(model).to receive(:add_not_null_constraint).with(:users, :old) expect(model).to receive(:copy_indexes).with(:users, :new, :old) expect(model).to receive(:copy_foreign_keys).with(:users, :new, :old) expect(model).to receive(:copy_check_constraints).with(:users, :new, :old) model.undo_cleanup_concurrent_column_rename(:users, :old, :new) end it 'passes the batch_column_name' do expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true) expect(model).to receive(:check_trigger_permissions!).and_return(true) expect(model).to receive(:create_column_from).with( :users, :new, :old, type: nil, batch_column_name: :other_batch_column ).and_return(true) expect(model).to receive(:install_rename_triggers).and_return(true) model.undo_cleanup_concurrent_column_rename(:users, :old, :new, batch_column_name: :other_batch_column) end it 'raises an error with invalid batch_column_name' do expect do model.undo_cleanup_concurrent_column_rename(:users, :old, :new, batch_column_name: :invalid) end.to raise_error(RuntimeError, /Column invalid does not exist on users/) end context 'when default is false' do let(:new_column) do double(:column, type: :boolean, limit: nil, default: false, null: false, precision: nil, scale: nil) end it 'copies the default to the old column' do expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield expect(model).to receive(:change_column_default) .with(:users, :old, new_column.default) expect(model).to receive(:copy_check_constraints) .with(:users, :new, :old) model.undo_cleanup_concurrent_column_rename(:users, :old, :new) end end end end describe '#change_column_type_concurrently' do it 'changes the column type' do expect(model).to receive(:rename_column_concurrently) .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: nil, batch_column_name: :id) model.change_column_type_concurrently('users', 'username', :text) end it 'passed the batch column name' do expect(model).to receive(:rename_column_concurrently) .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: nil, batch_column_name: :user_id) model.change_column_type_concurrently('users', 'username', :text, batch_column_name: :user_id) end context 'with type cast' do it 'changes the column type with casting the value to the new type' do expect(model).to receive(:rename_column_concurrently) .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: 'JSON', batch_column_name: :id) model.change_column_type_concurrently('users', 'username', :text, type_cast_function: 'JSON') end end end describe '#undo_change_column_type_concurrently' do it 'reverses the operations of change_column_type_concurrently' do expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:remove_rename_triggers) .with(:users, /trigger_.{12}/) expect(model).to receive(:remove_column).with(:users, "old_for_type_change") model.undo_change_column_type_concurrently(:users, :old) end end describe '#cleanup_concurrent_column_type_change' do it 'cleans up the type changing procedure' do expect(model).to receive(:cleanup_concurrent_column_rename) .with('users', 'username', 'username_for_type_change') expect(model).to receive(:rename_column) .with('users', 'username_for_type_change', 'username') model.cleanup_concurrent_column_type_change('users', 'username') end end describe '#undo_cleanup_concurrent_column_type_change' do context 'in a transaction' do it 'raises RuntimeError' do allow(model).to receive(:transaction_open?).and_return(true) expect { model.undo_cleanup_concurrent_column_type_change(:users, :old, :new) } .to raise_error(RuntimeError) end end context 'outside a transaction' do let(:temp_column) { "old_for_type_change" } let(:temp_undo_cleanup_column) do identifier = "users_old_for_type_change" hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10) "tmp_undo_cleanup_column_#{hashed_identifier}" end let(:trigger_name) { model.rename_trigger_name(:users, :old, :old_for_type_change) } before do allow(model).to receive(:transaction_open?).and_return(false) end it 'reverses the operations of cleanup_concurrent_column_type_change' do expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!) expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:create_column_from).with( :users, :old, temp_undo_cleanup_column, type: :string, batch_column_name: :id, type_cast_function: nil, limit: nil ).and_return(true) expect(model).to receive(:rename_column) .with(:users, :old, temp_column) expect(model).to receive(:rename_column) .with(:users, temp_undo_cleanup_column, :old) expect(model).to receive(:install_rename_triggers) .with(:users, :old, 'old_for_type_change') model.undo_cleanup_concurrent_column_type_change(:users, :old, :string) end it 'passes the type_cast_function, batch_column_name and limit' do expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_ddl_mode!) expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true) expect(model).to receive(:check_trigger_permissions!).with(:users) expect(model).to receive(:create_column_from).with( :users, :old, temp_undo_cleanup_column, type: :string, batch_column_name: :other_batch_column, type_cast_function: :custom_type_cast_function, limit: 8 ).and_return(true) expect(model).to receive(:rename_column) .with(:users, :old, temp_column) expect(model).to receive(:rename_column) .with(:users, temp_undo_cleanup_column, :old) expect(model).to receive(:install_rename_triggers) .with(:users, :old, 'old_for_type_change') model.undo_cleanup_concurrent_column_type_change( :users, :old, :string, type_cast_function: :custom_type_cast_function, batch_column_name: :other_batch_column, limit: 8 ) end it 'raises an error with invalid batch_column_name' do expect do model.undo_cleanup_concurrent_column_type_change(:users, :old, :new, batch_column_name: :invalid) end.to raise_error(RuntimeError, /Column invalid does not exist on users/) end end end describe '#install_rename_triggers' do let(:connection) { ActiveRecord::Migration.connection } it 'installs the triggers' do copy_trigger = double('copy trigger') expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table) .with(:users, connection: connection).and_return(copy_trigger) expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo') model.install_rename_triggers(:users, :old, :new, trigger_name: 'foo') end end describe '#remove_rename_triggers' do let(:connection) { ActiveRecord::Migration.connection } it 'removes the function and trigger' do copy_trigger = double('copy trigger') expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table) .with('bar', connection: connection).and_return(copy_trigger) expect(copy_trigger).to receive(:drop).with('foo') model.remove_rename_triggers('bar', 'foo') end end describe '#rename_trigger_name' do it 'returns a String' do expect(model.rename_trigger_name(:users, :foo, :bar)) .to match(/trigger_.{12}/) end end describe '#indexes_for' do it 'returns the indexes for a column' do idx1 = double(:idx, columns: %w(project_id)) idx2 = double(:idx, columns: %w(user_id)) allow(model).to receive(:indexes).with('table').and_return([idx1, idx2]) expect(model.indexes_for('table', :user_id)).to eq([idx2]) end end describe '#foreign_keys_for' do it 'returns the foreign keys for a column' do fk1 = double(:fk, column: 'project_id') fk2 = double(:fk, column: 'user_id') allow(model).to receive(:foreign_keys).with('table').and_return([fk1, fk2]) expect(model.foreign_keys_for('table', :user_id)).to eq([fk2]) end end describe '#copy_indexes' do context 'using a regular index using a single column' do it 'copies the index' do index = double(:index, columns: %w(project_id), name: 'index_on_issues_project_id', using: nil, where: nil, opclasses: {}, unique: false, lengths: [], orders: []) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id), { unique: false, name: 'index_on_issues_gl_project_id', length: [], order: [] }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end context 'using a regular index with multiple columns' do it 'copies the index' do index = double(:index, columns: %w(project_id foobar), name: 'index_on_issues_project_id_foobar', using: nil, where: nil, opclasses: {}, unique: false, lengths: [], orders: []) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id foobar), { unique: false, name: 'index_on_issues_gl_project_id_foobar', length: [], order: [] }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end context 'using an index with a WHERE clause' do it 'copies the index' do index = double(:index, columns: %w(project_id), name: 'index_on_issues_project_id', using: nil, where: 'foo', opclasses: {}, unique: false, lengths: [], orders: []) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id), { unique: false, name: 'index_on_issues_gl_project_id', length: [], order: [], where: 'foo' }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end context 'using an index with a USING clause' do it 'copies the index' do index = double(:index, columns: %w(project_id), name: 'index_on_issues_project_id', where: nil, using: 'foo', opclasses: {}, unique: false, lengths: [], orders: []) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id), { unique: false, name: 'index_on_issues_gl_project_id', length: [], order: [], using: 'foo' }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end context 'using an index with custom operator classes' do it 'copies the index' do index = double(:index, columns: %w(project_id), name: 'index_on_issues_project_id', using: nil, where: nil, opclasses: { 'project_id' => 'bar' }, unique: false, lengths: [], orders: []) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id), { unique: false, name: 'index_on_issues_gl_project_id', length: [], order: [], opclass: { 'gl_project_id' => 'bar' } }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end context 'using an index with multiple columns and custom operator classes' do it 'copies the index' do index = double(:index, { columns: %w(project_id foobar), name: 'index_on_issues_project_id_foobar', using: :gin, where: nil, opclasses: { 'project_id' => 'bar', 'foobar' => :gin_trgm_ops }, unique: false, lengths: [], orders: [] }) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id foobar), { unique: false, name: 'index_on_issues_gl_project_id_foobar', length: [], order: [], opclass: { 'gl_project_id' => 'bar', 'foobar' => :gin_trgm_ops }, using: :gin }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end context 'using an index with multiple columns and a custom operator class on the non affected column' do it 'copies the index' do index = double(:index, { columns: %w(project_id foobar), name: 'index_on_issues_project_id_foobar', using: :gin, where: nil, opclasses: { 'foobar' => :gin_trgm_ops }, unique: false, lengths: [], orders: [] }) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect(model).to receive(:add_concurrent_index) .with(:issues, %w(gl_project_id foobar), { unique: false, name: 'index_on_issues_gl_project_id_foobar', length: [], order: [], opclass: { 'foobar' => :gin_trgm_ops }, using: :gin }) model.copy_indexes(:issues, :project_id, :gl_project_id) end end describe 'using an index of which the name does not contain the source column' do it 'raises RuntimeError' do index = double(:index, columns: %w(project_id), name: 'index_foobar_index', using: nil, where: nil, opclasses: {}, unique: false, lengths: [], orders: []) allow(model).to receive(:indexes_for).with(:issues, 'project_id') .and_return([index]) expect { model.copy_indexes(:issues, :project_id, :gl_project_id) } .to raise_error(RuntimeError) end end end describe '#copy_foreign_keys' do it 'copies foreign keys from one column to another' do fk = double(:fk, from_table: 'issues', to_table: 'projects', on_delete: :cascade) allow(model).to receive(:foreign_keys_for).with(:issues, :project_id) .and_return([fk]) expect(model).to receive(:add_concurrent_foreign_key) .with('issues', 'projects', column: :gl_project_id, on_delete: :cascade) model.copy_foreign_keys(:issues, :project_id, :gl_project_id) end end describe '#column_for' do it 'returns a column object for an existing column' do column = model.column_for(:users, :id) expect(column.name).to eq('id') end it 'raises an error when a column does not exist' do error_message = /Could not find column "kittens" on table "users"/ expect { model.column_for(:users, :kittens) }.to raise_error(error_message) end end describe '#replace_sql' do it 'builds the sql with correct functions' do expect(model.replace_sql(Arel::Table.new(:users)[:first_name], "Alice", "Eve").to_s) .to include('regexp_replace') end describe 'results' do let!(:user) { create(:user, name: 'Kathy Alice Aliceson') } it 'replaces the correct part of the string' do allow(model).to receive(:transaction_open?).and_return(false) query = model.replace_sql(Arel::Table.new(:users)[:name], 'Alice', 'Eve') model.update_column_in_batches(:users, :name, query) expect(user.reload.name).to eq('Kathy Eve Aliceson') end end end describe '#check_trigger_permissions!' do it 'does nothing when the user has the correct permissions' do expect { model.check_trigger_permissions!('users') } .not_to raise_error end it 'raises RuntimeError when the user does not have the correct permissions' do allow(Gitlab::Database::Grant).to receive(:create_and_execute_trigger?) .with('kittens') .and_return(false) expect { model.check_trigger_permissions!('kittens') } .to raise_error(RuntimeError, /Your database user is not allowed/) end end describe '#convert_to_bigint_column' do it 'returns the name of the temporary column used to convert to bigint' do expect(model.convert_to_bigint_column(:id)).to eq('id_convert_to_bigint') end end describe '#convert_to_type_column' do it 'returns the name of the temporary column used to convert to bigint' do expect(model.convert_to_type_column(:id, :int, :bigint)).to eq('id_convert_int_to_bigint') end it 'returns the name of the temporary column used to convert to uuid' do expect(model.convert_to_type_column(:uuid, :string, :uuid)).to eq('uuid_convert_string_to_uuid') end end describe '#create_temporary_columns_and_triggers' do let(:table) { :test_table } let(:column) { :id } let(:mappings) do { id: { from_type: :int, to_type: :bigint } } end let(:old_bigint_column_naming) { false } subject do model.create_temporary_columns_and_triggers( table, mappings, old_bigint_column_naming: old_bigint_column_naming ) end before do model.create_table table, id: false do |t| t.integer :id, primary_key: true t.integer :non_nullable_column, null: false t.integer :nullable_column t.timestamps end end context 'when no mappings are provided' do let(:mappings) { nil } it 'raises an error' do expect { subject }.to raise_error("No mappings for column conversion provided") end end context 'when any of the mappings does not have the required keys' do let(:mappings) do { id: { from_type: :int } } end it 'raises an error' do expect { subject }.to raise_error("Some mappings don't have required keys provided") end end context 'when the target table does not exist' do it 'raises an error' do expect { model.create_temporary_columns_and_triggers(:non_existent_table, mappings) }.to raise_error("Table non_existent_table does not exist") end end context 'when the column to migrate does not exist' do let(:missing_column) { :test } let(:mappings) do { missing_column => { from_type: :int, to_type: :bigint } } end it 'raises an error' do expect { subject }.to raise_error("Column #{missing_column} does not exist on #{table}") end end context 'when old_bigint_column_naming is true' do let(:old_bigint_column_naming) { true } it 'calls convert_to_bigint_column' do expect(model).to receive(:convert_to_bigint_column).with(:id).and_return("id_convert_to_bigint") subject end end context 'when old_bigint_column_naming is false' do it 'calls convert_to_type_column' do expect(model).to receive(:convert_to_type_column).with(:id, :int, :bigint).and_return("id_convert_to_bigint") subject end end end describe '#initialize_conversion_of_integer_to_bigint' do let(:table) { :test_table } let(:column) { :id } let(:tmp_column) { model.convert_to_bigint_column(column) } before do model.create_table table, id: false do |t| t.integer :id, primary_key: true t.integer :non_nullable_column, null: false t.integer :nullable_column t.timestamps end end context 'when the target table does not exist' do it 'raises an error' do expect { model.initialize_conversion_of_integer_to_bigint(:this_table_is_not_real, column) } .to raise_error('Table this_table_is_not_real does not exist') end end context 'when the primary key does not exist' do it 'raises an error' do expect { model.initialize_conversion_of_integer_to_bigint(table, column, primary_key: :foobar) } .to raise_error("Column foobar does not exist on #{table}") end end context 'when the column to migrate does not exist' do it 'raises an error' do expect { model.initialize_conversion_of_integer_to_bigint(table, :this_column_is_not_real) } .to raise_error(ArgumentError, "Column this_column_is_not_real does not exist on #{table}") end end context 'when the column to convert is the primary key' do it 'creates a not-null bigint column and installs triggers' do expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false) expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column]) model.initialize_conversion_of_integer_to_bigint(table, column) end end context 'when the column to convert is not the primary key, but non-nullable' do let(:column) { :non_nullable_column } it 'creates a not-null bigint column and installs triggers' do expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false) expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column]) model.initialize_conversion_of_integer_to_bigint(table, column) end end context 'when the column to convert is not the primary key, but nullable' do let(:column) { :nullable_column } it 'creates a nullable bigint column and installs triggers' do expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: nil) expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column]) model.initialize_conversion_of_integer_to_bigint(table, column) end end context 'when multiple columns are given' do it 'creates the correct columns and installs the trigger' do columns_to_convert = %i[id non_nullable_column nullable_column] temporary_columns = columns_to_convert.map { |column| model.convert_to_bigint_column(column) } expect(model).to receive(:add_column).with(table, temporary_columns[0], :bigint, default: 0, null: false) expect(model).to receive(:add_column).with(table, temporary_columns[1], :bigint, default: 0, null: false) expect(model).to receive(:add_column).with(table, temporary_columns[2], :bigint, default: nil) expect(model).to receive(:install_rename_triggers).with(table, columns_to_convert, temporary_columns) model.initialize_conversion_of_integer_to_bigint(table, columns_to_convert) end end end describe '#restore_conversion_of_integer_to_bigint' do let(:table) { :test_table } let(:column) { :id } let(:tmp_column) { model.convert_to_bigint_column(column) } before do model.create_table table, id: false do |t| t.bigint :id, primary_key: true t.bigint :build_id, null: false t.timestamps end end context 'when the target table does not exist' do it 'raises an error' do expect { model.restore_conversion_of_integer_to_bigint(:this_table_is_not_real, column) } .to raise_error('Table this_table_is_not_real does not exist') end end context 'when the column to migrate does not exist' do it 'raises an error' do expect { model.restore_conversion_of_integer_to_bigint(table, :this_column_is_not_real) } .to raise_error(ArgumentError, "Column this_column_is_not_real does not exist on #{table}") end end context 'when a single column is given' do let(:column_to_convert) { 'id' } let(:temporary_column) { model.convert_to_bigint_column(column_to_convert) } it 'creates the correct columns and installs the trigger' do expect(model).to receive(:add_column).with(table, temporary_column, :int, default: 0, null: false) expect(model).to receive(:install_rename_triggers).with(table, [column_to_convert], [temporary_column]) model.restore_conversion_of_integer_to_bigint(table, column_to_convert) end end context 'when multiple columns are given' do let(:columns_to_convert) { %i[id build_id] } let(:temporary_columns) { columns_to_convert.map { |column| model.convert_to_bigint_column(column) } } it 'creates the correct columns and installs the trigger' do expect(model).to receive(:add_column).with(table, temporary_columns[0], :int, default: 0, null: false) expect(model).to receive(:add_column).with(table, temporary_columns[1], :int, default: 0, null: false) expect(model).to receive(:install_rename_triggers).with(table, columns_to_convert, temporary_columns) model.restore_conversion_of_integer_to_bigint(table, columns_to_convert) end end end describe '#revert_initialize_conversion_of_integer_to_bigint' do let(:table) { :test_table } before do model.create_table table, id: false do |t| t.integer :id, primary_key: true t.integer :other_id end model.initialize_conversion_of_integer_to_bigint(table, columns) end context 'when single column is given' do let(:columns) { :id } it 'removes column, trigger, and function' do temporary_column = model.convert_to_bigint_column(columns) trigger_name = model.rename_trigger_name(table, :id, temporary_column) model.revert_initialize_conversion_of_integer_to_bigint(table, columns) expect(model.column_exists?(table, temporary_column)).to eq(false) expect_trigger_not_to_exist(table, trigger_name) expect_function_not_to_exist(trigger_name) end end context 'when multiple columns are given' do let(:columns) { [:id, :other_id] } it 'removes column, trigger, and function' do temporary_columns = columns.map { |column| model.convert_to_bigint_column(column) } trigger_name = model.rename_trigger_name(table, columns, temporary_columns) model.revert_initialize_conversion_of_integer_to_bigint(table, columns) temporary_columns.each do |column| expect(model.column_exists?(table, column)).to eq(false) end expect_trigger_not_to_exist(table, trigger_name) expect_function_not_to_exist(trigger_name) end end end describe '#backfill_conversion_of_integer_to_bigint' do let(:table) { :_test_backfill_table } let(:column) { :id } let(:tmp_column) { model.convert_to_bigint_column(column) } before do model.create_table table, id: false do |t| t.integer :id, primary_key: true t.text :message, null: false t.integer :other_id t.timestamps end allow(model).to receive(:transaction_open?).and_return(false) end context 'when the target table does not exist' do it 'raises an error' do expect { model.backfill_conversion_of_integer_to_bigint(:this_table_is_not_real, column) } .to raise_error('Table this_table_is_not_real does not exist') end end context 'when the primary key does not exist' do it 'raises an error' do expect { model.backfill_conversion_of_integer_to_bigint(table, column, primary_key: :foobar) } .to raise_error("Column foobar does not exist on #{table}") end end context 'when the column to convert does not exist' do let(:column) { :foobar } it 'raises an error' do expect { model.backfill_conversion_of_integer_to_bigint(table, column) } .to raise_error(ArgumentError, "Column #{column} does not exist on #{table}") end end context 'when the temporary column does not exist' do it 'raises an error' do expect { model.backfill_conversion_of_integer_to_bigint(table, column) } .to raise_error(ArgumentError, "Column #{tmp_column} does not exist on #{table}") end end context 'when the conversion is properly initialized' do let(:model_class) do Class.new(ActiveRecord::Base) do self.table_name = :_test_backfill_table end end let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.with_status(:active) } before do model.initialize_conversion_of_integer_to_bigint(table, columns) model_class.create!(message: 'hello') model_class.create!(message: 'so long') end context 'when a single column is being converted' do let(:columns) { column } it 'creates the batched migration tracking record' do last_record = model_class.create!(message: 'goodbye') expect do model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1) end.to change { migration_relation.count }.by(1) expect(migration_relation.last).to have_attributes( job_class_name: 'CopyColumnUsingBackgroundMigrationJob', table_name: table.to_s, column_name: column.to_s, min_value: 1, max_value: last_record.id, interval: 120, batch_size: 2, sub_batch_size: 1, job_arguments: [[column.to_s], [model.convert_to_bigint_column(column)]] ) end end context 'when multiple columns are being converted' do let(:other_column) { :other_id } let(:other_tmp_column) { model.convert_to_bigint_column(other_column) } let(:columns) { [column, other_column] } it 'creates the batched migration tracking record' do last_record = model_class.create!(message: 'goodbye', other_id: 50) expect do model.backfill_conversion_of_integer_to_bigint(table, columns, batch_size: 2, sub_batch_size: 1) end.to change { migration_relation.count }.by(1) expect(migration_relation.last).to have_attributes( job_class_name: 'CopyColumnUsingBackgroundMigrationJob', table_name: table.to_s, column_name: column.to_s, min_value: 1, max_value: last_record.id, interval: 120, batch_size: 2, sub_batch_size: 1, job_arguments: [[column.to_s, other_column.to_s], [tmp_column, other_tmp_column]] ) end end end end describe '#revert_backfill_conversion_of_integer_to_bigint' do let(:table) { :_test_backfill_table } let(:primary_key) { :id } before do model.create_table table, id: false do |t| t.integer primary_key, primary_key: true t.text :message, null: false t.integer :other_id t.timestamps end allow(model).to receive(:transaction_open?).and_return(false) model.initialize_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key) model.backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key) end context 'when a single column is being converted' do let(:columns) { :id } it 'deletes the batched migration tracking record' do expect do model.revert_backfill_conversion_of_integer_to_bigint(table, columns) end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1) end end context 'when a multiple columns are being converted' do let(:columns) { [:id, :other_id] } it 'deletes the batched migration tracking record' do expect do model.revert_backfill_conversion_of_integer_to_bigint(table, columns) end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1) end end context 'when primary key column has custom name' do let(:primary_key) { :other_pk } let(:columns) { :other_id } it 'deletes the batched migration tracking record' do expect do model.revert_backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key) end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1) end end end describe '#index_exists_by_name?' do it 'returns true if an index exists' do ActiveRecord::Migration.connection.execute( 'CREATE INDEX test_index_for_index_exists ON projects (path);' ) expect(model.index_exists_by_name?(:projects, 'test_index_for_index_exists')) .to be_truthy end it 'returns false if the index does not exist' do expect(model.index_exists_by_name?(:projects, 'this_does_not_exist')) .to be_falsy end context 'when an index with a function exists' do before do ActiveRecord::Migration.connection.execute( 'CREATE INDEX test_index ON projects (LOWER(path));' ) end it 'returns true if an index exists' do expect(model.index_exists_by_name?(:projects, 'test_index')) .to be_truthy end end context 'when an index exists for a table with the same name in another schema' do before do ActiveRecord::Migration.connection.execute( 'CREATE SCHEMA new_test_schema' ) ActiveRecord::Migration.connection.execute( 'CREATE TABLE new_test_schema.projects (id integer, name character varying)' ) ActiveRecord::Migration.connection.execute( 'CREATE INDEX test_index_on_name ON new_test_schema.projects (LOWER(name));' ) end it 'returns false if the index does not exist in the current schema' do expect(model.index_exists_by_name?(:projects, 'test_index_on_name')) .to be_falsy end end end describe '#create_or_update_plan_limit' do before do stub_const('Plan', Class.new(ActiveRecord::Base)) stub_const('PlanLimits', Class.new(ActiveRecord::Base)) Plan.class_eval do self.table_name = 'plans' end PlanLimits.class_eval do self.table_name = 'plan_limits' end end it 'properly escapes names' do expect(model).to receive(:execute).with <<~SQL INSERT INTO plan_limits (plan_id, "project_hooks") SELECT id, '10' FROM plans WHERE name = 'free' LIMIT 1 ON CONFLICT (plan_id) DO UPDATE SET "project_hooks" = EXCLUDED."project_hooks"; SQL model.create_or_update_plan_limit('project_hooks', 'free', 10) end context 'when plan does not exist' do it 'does not create any plan limits' do expect { model.create_or_update_plan_limit('project_hooks', 'plan_name', 10) } .not_to change { PlanLimits.count } end end context 'when plan does exist' do let!(:plan) { Plan.create!(name: 'plan_name') } context 'when limit does not exist' do it 'inserts a new plan limits' do expect { model.create_or_update_plan_limit('project_hooks', 'plan_name', 10) } .to change { PlanLimits.count }.by(1) expect(PlanLimits.pluck(:project_hooks)).to contain_exactly(10) end end context 'when limit does exist' do let!(:plan_limit) { PlanLimits.create!(plan_id: plan.id) } it 'updates an existing plan limits' do expect { model.create_or_update_plan_limit('project_hooks', 'plan_name', 999) } .not_to change { PlanLimits.count } expect(plan_limit.reload.project_hooks).to eq(999) end end end end describe '#backfill_iids' do include MigrationsHelpers let_it_be(:issue_base_type_enum) { 0 } let_it_be(:issue_type) { table(:work_item_types).find_by(base_type: issue_base_type_enum) } let(:issue_class) do Class.new(ActiveRecord::Base) do include AtomicInternalId self.table_name = 'issues' self.inheritance_column = :_type_disabled belongs_to :project, class_name: "::Project", inverse_of: nil has_internal_id :iid, scope: :project, init: ->(s, _scope) { s&.project&.issues&.maximum(:iid) }, presence: false before_validation -> { self.work_item_type_id = ::WorkItems::Type.default_issue_type.id } end end let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:issues) { table(:issues) } def setup namespace = namespaces.create!(name: 'foo', path: 'foo', type: Namespaces::UserNamespace.sti_name) project_namespace = namespaces.create!(name: 'project-foo', path: 'project-foo', type: 'Project', parent_id: namespace.id, visibility_level: 20) projects.create!(namespace_id: namespace.id, project_namespace_id: project_namespace.id) end it 'generates iids properly for models created after the migration' do project = setup model.backfill_iids('issues') issue = issue_class.create!(project_id: project.id, namespace_id: project.project_namespace_id) expect(issue.iid).to eq(1) end it 'generates iids properly for models created after the migration when iids are backfilled' do project = setup issue_a = issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: issue_type.id) model.backfill_iids('issues') issue_b = issue_class.create!(project_id: project.id, namespace_id: project.project_namespace_id) expect(issue_a.reload.iid).to eq(1) expect(issue_b.iid).to eq(2) end it 'generates iids properly for models created after the migration across multiple projects' do project_a = setup project_b = setup issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id) issues.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id) issues.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id) model.backfill_iids('issues') issue_a = issue_class.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id) issue_b = issue_class.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id) expect(issue_a.iid).to eq(2) expect(issue_b.iid).to eq(3) end context 'when the first model is created for a project after the migration' do it 'generates an iid' do project_a = setup project_b = setup issue_a = issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id) model.backfill_iids('issues') issue_b = issue_class.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id) expect(issue_a.reload.iid).to eq(1) expect(issue_b.reload.iid).to eq(1) end end context 'when a row already has an iid set in the database' do it 'backfills iids' do project = setup issue_a = issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: issue_type.id, iid: 1) issue_b = issues.create!(project_id: project.id, namespace_id: project.project_namespace_id, work_item_type_id: issue_type.id, iid: 2) model.backfill_iids('issues') expect(issue_a.reload.iid).to eq(1) expect(issue_b.reload.iid).to eq(2) end it 'backfills for multiple projects' do project_a = setup project_b = setup issue_a = issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id, iid: 1) issue_b = issues.create!(project_id: project_b.id, namespace_id: project_b.project_namespace_id, work_item_type_id: issue_type.id, iid: 1) issue_c = issues.create!(project_id: project_a.id, namespace_id: project_a.project_namespace_id, work_item_type_id: issue_type.id, iid: 2) model.backfill_iids('issues') expect(issue_a.reload.iid).to eq(1) expect(issue_b.reload.iid).to eq(1) expect(issue_c.reload.iid).to eq(2) end end end describe '#add_primary_key_using_index' do it "executes the statement to add the primary key" do expect(model).to receive(:execute).with /ALTER TABLE "test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/ model.add_primary_key_using_index(:test_table, :old_name, :new_name) end end context 'when changing the primary key of a given table' do before do model.create_table(:test_table, primary_key: :id) do |t| t.integer :partition_number, default: 1 end model.add_index(:test_table, :id, unique: true, name: :old_index_name) model.add_index(:test_table, [:id, :partition_number], unique: true, name: :new_index_name) end describe '#swap_primary_key' do it 'executes statements to swap primary key', :aggregate_failures do expect(model).to receive(:with_lock_retries).with(raise_on_exhaustion: true).ordered.and_yield expect(model).to receive(:execute).with(/ALTER TABLE "test_table" DROP CONSTRAINT "test_table_pkey" CASCADE/).and_call_original expect(model).to receive(:execute).with(/ALTER TABLE "test_table" ADD CONSTRAINT "test_table_pkey" PRIMARY KEY USING INDEX "new_index_name"/).and_call_original model.swap_primary_key(:test_table, :test_table_pkey, :new_index_name) end context 'when new index does not exist' do before do model.remove_index(:test_table, column: [:id, :partition_number]) end it 'raises ActiveRecord::StatementInvalid' do expect do model.swap_primary_key(:test_table, :test_table_pkey, :new_index_name) end.to raise_error(ActiveRecord::StatementInvalid) end end end describe '#unswap_primary_key' do it 'executes statements to unswap primary key' do expect(model).to receive(:with_lock_retries).with(raise_on_exhaustion: true).ordered.and_yield expect(model).to receive(:execute).with(/ALTER TABLE "test_table" DROP CONSTRAINT "test_table_pkey" CASCADE/).ordered.and_call_original expect(model).to receive(:execute).with(/ALTER TABLE "test_table" ADD CONSTRAINT "test_table_pkey" PRIMARY KEY USING INDEX "old_index_name"/).ordered.and_call_original model.unswap_primary_key(:test_table, :test_table_pkey, :old_index_name) end end end describe '#drop_sequence' do it "executes the statement to drop the sequence" do expect(model).to receive(:execute).with /ALTER TABLE "test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "test_table_id_seq"/ model.drop_sequence(:test_table, :test_column, :test_table_id_seq) end end describe '#add_sequence' do it "executes the statement to add the sequence" do expect(model).to receive(:execute).with "CREATE SEQUENCE \"test_table_id_seq\" START 1;\nALTER TABLE \"test_table\" ALTER COLUMN \"test_column\" SET DEFAULT nextval(\'test_table_id_seq\')\n" model.add_sequence(:test_table, :test_column, :test_table_id_seq, 1) end end describe "#partition?" do subject { model.partition?(table_name) } let(:table_name) { 'ci_builds_metadata' } context "when a partition table exist" do context 'when the view postgres_partitions exists' do it 'calls the view', :aggregate_failures do expect(Gitlab::Database::PostgresPartition).to receive(:partition_exists?).with(table_name).and_call_original expect(subject).to be_truthy end end context 'when the view postgres_partitions does not exist' do before do allow(model).to receive(:view_exists?).and_return(false) end it 'does not call the view', :aggregate_failures do expect(Gitlab::Database::PostgresPartition).to receive(:legacy_partition_exists?).with(table_name).and_call_original expect(subject).to be_truthy end end end context "when a partition table does not exist" do let(:table_name) { 'partition_does_not_exist' } it { is_expected.to be_falsey } end end end