diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2021-09-20 13:18:24 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2021-09-20 13:18:24 +0000 |
commit | 0653e08efd039a5905f3fa4f6e9cef9f5d2f799c (patch) | |
tree | 4dcc884cf6d81db44adae4aa99f8ec1233a41f55 /db/post_migrate | |
parent | 744144d28e3e7fddc117924fef88de5d9674fe4c (diff) | |
download | gitlab-ce-0653e08efd039a5905f3fa4f6e9cef9f5d2f799c.tar.gz |
Add latest changes from gitlab-org/gitlab@14-3-stable-eev14.3.0-rc42
Diffstat (limited to 'db/post_migrate')
44 files changed, 1741 insertions, 1 deletions
diff --git a/db/post_migrate/20201106134950_deduplicate_epic_iids.rb b/db/post_migrate/20201106134950_deduplicate_epic_iids.rb index bc7daf9329d..8fddc81057b 100644 --- a/db/post_migrate/20201106134950_deduplicate_epic_iids.rb +++ b/db/post_migrate/20201106134950_deduplicate_epic_iids.rb @@ -85,7 +85,7 @@ class DeduplicateEpicIids < ActiveRecord::Migration[6.0] instance = subject.is_a?(::Class) ? nil : subject - subject.transaction(requires_new: true) do + subject.transaction(requires_new: true) do # rubocop:disable Performance/ActiveRecordSubtransactions InternalId.create!( **scope, usage: usage_value, diff --git a/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb b/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb new file mode 100644 index 00000000000..b99a61e8e63 --- /dev/null +++ b/db/post_migrate/20210622045705_finalize_events_bigint_conversion.rb @@ -0,0 +1,86 @@ +# frozen_string_literal: true + +class FinalizeEventsBigintConversion < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TABLE_NAME = 'events' + + def up + ensure_batched_background_migration_is_finished( + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: TABLE_NAME, + column_name: 'id', + job_arguments: [["id"], ["id_convert_to_bigint"]] + ) + + swap + end + + def down + swap + end + + private + + def swap + # This is to replace the existing "events_pkey" PRIMARY KEY, btree (id) + add_concurrent_index TABLE_NAME, :id_convert_to_bigint, unique: true, name: 'index_events_on_id_convert_to_bigint' + # This is to replace the existing "index_events_on_project_id_and_id" btree (project_id, id) + add_concurrent_index TABLE_NAME, [:project_id, :id_convert_to_bigint], name: 'index_events_on_project_id_and_id_convert_to_bigint' + # This is to replace the existing "index_events_on_project_id_and_id_desc_on_merged_action" btree (project_id, id DESC) WHERE action = 7 + add_concurrent_index TABLE_NAME, [:project_id, :id_convert_to_bigint], order: { id_convert_to_bigint: :desc }, + where: "action = 7", name: 'index_events_on_project_id_and_id_bigint_desc_on_merged_action' + + # Add a FK on `push_event_payloads(event_id)` to `id_convert_to_bigint`, the old FK (fk_36c74129da) + # will be removed when events_pkey constraint is droppped. + fk_event_id = concurrent_foreign_key_name(:push_event_payloads, :event_id) + fk_event_id_tmp = "#{fk_event_id}_tmp" + add_concurrent_foreign_key :push_event_payloads, TABLE_NAME, + column: :event_id, target_column: :id_convert_to_bigint, + name: fk_event_id_tmp, on_delete: :cascade, reverse_lock_order: true + + with_lock_retries(raise_on_exhaustion: true) do + # We'll need ACCESS EXCLUSIVE lock on the related tables, + # lets make sure it can be acquired from the start. + # Lock order should be + # 1. events + # 2. push_event_payloads + # in order to match the order in EventCreateService#create_push_event, + # and avoid deadlocks. + execute "LOCK TABLE #{TABLE_NAME}, push_event_payloads IN ACCESS EXCLUSIVE MODE" + + # Swap column names + temp_name = 'id_tmp' + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:id)} TO #{quote_column_name(temp_name)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:id_convert_to_bigint)} TO #{quote_column_name(:id)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(temp_name)} TO #{quote_column_name(:id_convert_to_bigint)}" + + # We need to update the trigger function in order to make PostgreSQL to + # regenerate the execution plan for it. This is to avoid type mismatch errors like + # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint) + execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" + + # Swap defaults + execute "ALTER SEQUENCE events_id_seq OWNED BY #{TABLE_NAME}.id" + change_column_default TABLE_NAME, :id, -> { "nextval('events_id_seq'::regclass)" } + change_column_default TABLE_NAME, :id_convert_to_bigint, 0 + + # Swap PK constraint + execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT events_pkey CASCADE" # this will drop fk_36c74129da + rename_index TABLE_NAME, 'index_events_on_id_convert_to_bigint', 'events_pkey' + execute "ALTER TABLE #{TABLE_NAME} ADD CONSTRAINT events_pkey PRIMARY KEY USING INDEX events_pkey" + + # Rename the rest of the indexes (we already hold an exclusive lock, so no need to use DROP INDEX CONCURRENTLY here + execute 'DROP INDEX index_events_on_project_id_and_id' + rename_index TABLE_NAME, 'index_events_on_project_id_and_id_convert_to_bigint', 'index_events_on_project_id_and_id' + execute 'DROP INDEX index_events_on_project_id_and_id_desc_on_merged_action' + rename_index TABLE_NAME, 'index_events_on_project_id_and_id_bigint_desc_on_merged_action', 'index_events_on_project_id_and_id_desc_on_merged_action' + + # Change the name of the temporary FK + rename_constraint(:push_event_payloads, fk_event_id_tmp, fk_event_id) + end + end +end diff --git a/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb b/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb new file mode 100644 index 00000000000..a8a9fe037ec --- /dev/null +++ b/db/post_migrate/20210701141346_finalize_ci_builds_stage_id_bigint_conversion.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +class FinalizeCiBuildsStageIdBigintConversion < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TABLE_NAME = 'ci_builds' + + def up + ensure_batched_background_migration_is_finished( + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: TABLE_NAME, + column_name: 'id', + job_arguments: [%w[id stage_id], %w[id_convert_to_bigint stage_id_convert_to_bigint]] + ) + + swap_columns + end + + def down + swap_columns + end + + private + + def swap_columns + # Create a copy of the original column's index on the new column + add_concurrent_index TABLE_NAME, :stage_id_convert_to_bigint, name: :index_ci_builds_on_converted_stage_id # rubocop:disable Migration/PreventIndexCreation + + # Create a copy of the original column's FK on the new column + add_concurrent_foreign_key TABLE_NAME, :ci_stages, column: :stage_id_convert_to_bigint, on_delete: :cascade, + reverse_lock_order: true + + with_lock_retries(raise_on_exhaustion: true) do + quoted_table_name = quote_table_name(TABLE_NAME) + quoted_referenced_table_name = quote_table_name(:ci_stages) + + # Acquire locks up-front, not just to the build table but the FK's referenced table + execute "LOCK TABLE #{quoted_referenced_table_name}, #{quoted_table_name} IN ACCESS EXCLUSIVE MODE" + + # Swap the column names of the two columns + temporary_name = 'stage_id_tmp' + execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(:stage_id)} TO #{quote_column_name(temporary_name)}" + execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(:stage_id_convert_to_bigint)} TO #{quote_column_name(:stage_id)}" + execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(temporary_name)} TO #{quote_column_name(:stage_id_convert_to_bigint)}" + + # Reset the function so PG drops the plan cache for the incorrect integer type + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME) + .name([:id, :stage_id], [:id_convert_to_bigint, :stage_id_convert_to_bigint]) + execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" + + # Remove the original column index, and rename the new column index to the original name + execute 'DROP INDEX index_ci_builds_on_stage_id' + rename_index TABLE_NAME, :index_ci_builds_on_converted_stage_id, :index_ci_builds_on_stage_id + + # Remove the original column foreign key, and rename the new column foreign key to the original name + remove_foreign_key TABLE_NAME, name: concurrent_foreign_key_name(TABLE_NAME, :stage_id) + rename_constraint( + TABLE_NAME, + concurrent_foreign_key_name(TABLE_NAME, :stage_id_convert_to_bigint), + concurrent_foreign_key_name(TABLE_NAME, :stage_id)) + end + end +end diff --git a/db/post_migrate/20210706112800_remove_cloud_license_enabled_from_application_settings.rb b/db/post_migrate/20210706112800_remove_cloud_license_enabled_from_application_settings.rb new file mode 100644 index 00000000000..7224e84c1b3 --- /dev/null +++ b/db/post_migrate/20210706112800_remove_cloud_license_enabled_from_application_settings.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class RemoveCloudLicenseEnabledFromApplicationSettings < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + def up + with_lock_retries do + remove_column :application_settings, :cloud_license_enabled + end + end + + def down + with_lock_retries do + add_column :application_settings, :cloud_license_enabled, :boolean, null: false, default: false + end + end +end diff --git a/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb b/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb new file mode 100644 index 00000000000..f75df04ba48 --- /dev/null +++ b/db/post_migrate/20210708011426_finalize_ci_builds_metadata_bigint_conversion.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +class FinalizeCiBuildsMetadataBigintConversion < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE_NAME = 'ci_builds_metadata' + + def up + ensure_batched_background_migration_is_finished( + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: TABLE_NAME, + column_name: 'id', + job_arguments: [["id"], ["id_convert_to_bigint"]] + ) + + ensure_batched_background_migration_is_finished( + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: TABLE_NAME, + column_name: 'id', + job_arguments: [["build_id"], ["build_id_convert_to_bigint"]] + ) + + swap + end + + def down + swap + end + + private + + def swap + # Indexes were pre-created on gitlab.com to avoid slowing down deployments + # + # rubocop:disable Migration/PreventIndexCreation + add_concurrent_index TABLE_NAME, :id_convert_to_bigint, unique: true, name: 'index_ci_builds_metadata_on_id_convert_to_bigint' + add_concurrent_index TABLE_NAME, :build_id_convert_to_bigint, where: 'has_exposed_artifacts IS TRUE', name: 'index_ci_builds_metadata_on_build_id_int8_and_exposed_artifacts' + create_covering_index TABLE_NAME, 'index_ci_builds_metadata_on_build_id_int8_where_interruptible' + add_concurrent_index TABLE_NAME, :build_id_convert_to_bigint, unique: true, name: 'index_ci_builds_metadata_on_build_id_convert_to_bigint' + # rubocop:enable Migration/PreventIndexCreation + + add_concurrent_foreign_key TABLE_NAME, :ci_builds, column: :build_id_convert_to_bigint, on_delete: :cascade, + reverse_lock_order: true + + with_lock_retries(raise_on_exhaustion: true) do + execute "LOCK TABLE ci_builds, #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE" + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + swap_column :id + swap_column :build_id + # rubocop:enable Migration/WithLockRetriesDisallowedMethod + + # We need to update the trigger function in order to make PostgreSQL to + # regenerate the execution plan for it. This is to avoid type mismatch errors like + # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" + execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:id, :id_convert_to_bigint))} RESET ALL" + execute "ALTER FUNCTION #{quote_table_name(Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(:build_id, :build_id_convert_to_bigint))} RESET ALL" + + # Swap defaults for PK + execute "ALTER SEQUENCE ci_builds_metadata_id_seq OWNED BY #{TABLE_NAME}.id" + change_column_default TABLE_NAME, :id, -> { "nextval('ci_builds_metadata_id_seq'::regclass)" } + change_column_default TABLE_NAME, :id_convert_to_bigint, 0 + + # Swap defaults for FK + change_column_default TABLE_NAME, :build_id, nil + change_column_default TABLE_NAME, :build_id_convert_to_bigint, 0 + + # Swap PK constraint + execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT ci_builds_metadata_pkey CASCADE" + rename_index TABLE_NAME, 'index_ci_builds_metadata_on_id_convert_to_bigint', 'ci_builds_metadata_pkey' + execute "ALTER TABLE #{TABLE_NAME} ADD CONSTRAINT ci_builds_metadata_pkey PRIMARY KEY USING INDEX ci_builds_metadata_pkey" + + # Rename the rest of the indexes (we already hold an exclusive lock, so no need to use DROP INDEX CONCURRENTLY here) + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + swap_index 'index_ci_builds_metadata_on_build_id', 'index_ci_builds_metadata_on_build_id_convert_to_bigint' + swap_index 'index_ci_builds_metadata_on_build_id_and_has_exposed_artifacts', 'index_ci_builds_metadata_on_build_id_int8_and_exposed_artifacts' + swap_index 'index_ci_builds_metadata_on_build_id_and_id_and_interruptible', 'index_ci_builds_metadata_on_build_id_int8_where_interruptible' + # rubocop:enable Migration/WithLockRetriesDisallowedMethod + + # Swap FK constraint + remove_foreign_key TABLE_NAME, name: concurrent_foreign_key_name(TABLE_NAME, :build_id) + rename_constraint( + TABLE_NAME, + concurrent_foreign_key_name(TABLE_NAME, :build_id_convert_to_bigint), + concurrent_foreign_key_name(TABLE_NAME, :build_id) + ) + end + end + + def swap_index(old, new) + execute "DROP INDEX #{old}" + rename_index TABLE_NAME, new, old + end + + def swap_column(name) + temp_name = "#{name}_tmp" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(name)} TO #{quote_column_name(temp_name)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:"#{name}_convert_to_bigint")} TO #{quote_column_name(name)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(temp_name)} TO #{quote_column_name(:"#{name}_convert_to_bigint")}" + end + + def create_covering_index(table, name) + return if index_exists_by_name?(table, name) + + disable_statement_timeout do + execute <<~SQL + CREATE INDEX CONCURRENTLY #{name} + ON #{table} (build_id_convert_to_bigint) INCLUDE (id_convert_to_bigint) + WHERE interruptible = true + SQL + end + end +end diff --git a/db/post_migrate/20210721122840_remove_seat_link_enabled_from_application_settings.rb b/db/post_migrate/20210721122840_remove_seat_link_enabled_from_application_settings.rb new file mode 100644 index 00000000000..20118dbbac8 --- /dev/null +++ b/db/post_migrate/20210721122840_remove_seat_link_enabled_from_application_settings.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +class RemoveSeatLinkEnabledFromApplicationSettings < ActiveRecord::Migration[6.1] + def up + remove_column :application_settings, :seat_link_enabled + end + + def down + add_column :application_settings, :seat_link_enabled, :boolean, null: false, default: true + end +end diff --git a/db/post_migrate/20210730104800_schedule_extract_project_topics_into_separate_table.rb b/db/post_migrate/20210730104800_schedule_extract_project_topics_into_separate_table.rb new file mode 100644 index 00000000000..3102561a129 --- /dev/null +++ b/db/post_migrate/20210730104800_schedule_extract_project_topics_into_separate_table.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +class ScheduleExtractProjectTopicsIntoSeparateTable < ActiveRecord::Migration[6.0] + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + BATCH_SIZE = 1_000 + DELAY_INTERVAL = 2.minutes + MIGRATION = 'ExtractProjectTopicsIntoSeparateTable' + INDEX_NAME = 'tmp_index_taggings_on_id_where_taggable_type_project' + INDEX_CONDITION = "taggable_type = 'Project'" + + disable_ddl_transaction! + + class Tagging < ActiveRecord::Base + include ::EachBatch + + self.table_name = 'taggings' + end + + def up + # this index is used in 20210730104800_schedule_extract_project_topics_into_separate_table + add_concurrent_index :taggings, :id, where: INDEX_CONDITION, name: INDEX_NAME # rubocop:disable Migration/PreventIndexCreation + + queue_background_migration_jobs_by_range_at_intervals( + Tagging.where(taggable_type: 'Project'), + MIGRATION, + DELAY_INTERVAL, + batch_size: BATCH_SIZE, + track_jobs: true + ) + end + + def down + remove_concurrent_index_by_name :taggings, INDEX_NAME + end +end diff --git a/db/post_migrate/20210731132939_backfill_stage_event_hash.rb b/db/post_migrate/20210731132939_backfill_stage_event_hash.rb new file mode 100644 index 00000000000..2c4dc904387 --- /dev/null +++ b/db/post_migrate/20210731132939_backfill_stage_event_hash.rb @@ -0,0 +1,115 @@ +# frozen_string_literal: true + +class BackfillStageEventHash < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + BATCH_SIZE = 100 + EVENT_ID_IDENTIFIER_MAPPING = { + 1 => :issue_created, + 2 => :issue_first_mentioned_in_commit, + 3 => :issue_closed, + 4 => :issue_first_added_to_board, + 5 => :issue_first_associated_with_milestone, + 7 => :issue_last_edited, + 8 => :issue_label_added, + 9 => :issue_label_removed, + 10 => :issue_deployed_to_production, + 100 => :merge_request_created, + 101 => :merge_request_first_deployed_to_production, + 102 => :merge_request_last_build_finished, + 103 => :merge_request_last_build_started, + 104 => :merge_request_merged, + 105 => :merge_request_closed, + 106 => :merge_request_last_edited, + 107 => :merge_request_label_added, + 108 => :merge_request_label_removed, + 109 => :merge_request_first_commit_at, + 1000 => :code_stage_start, + 1001 => :issue_stage_end, + 1002 => :plan_stage_start + }.freeze + + LABEL_BASED_EVENTS = Set.new([8, 9, 107, 108]).freeze + + class GroupStage < ActiveRecord::Base + include EachBatch + + self.table_name = 'analytics_cycle_analytics_group_stages' + end + + class ProjectStage < ActiveRecord::Base + include EachBatch + + self.table_name = 'analytics_cycle_analytics_project_stages' + end + + class StageEventHash < ActiveRecord::Base + self.table_name = 'analytics_cycle_analytics_stage_event_hashes' + end + + def up + GroupStage.reset_column_information + ProjectStage.reset_column_information + StageEventHash.reset_column_information + + update_stage_table(GroupStage) + update_stage_table(ProjectStage) + + add_not_null_constraint :analytics_cycle_analytics_group_stages, :stage_event_hash_id + add_not_null_constraint :analytics_cycle_analytics_project_stages, :stage_event_hash_id + end + + def down + remove_not_null_constraint :analytics_cycle_analytics_group_stages, :stage_event_hash_id + remove_not_null_constraint :analytics_cycle_analytics_project_stages, :stage_event_hash_id + end + + private + + def update_stage_table(klass) + klass.each_batch(of: BATCH_SIZE) do |relation| + klass.transaction do + records = relation.where(stage_event_hash_id: nil).lock!.to_a # prevent concurrent modification (unlikely to happen) + records = delete_invalid_records(records) + next if records.empty? + + hashes_by_stage = records.to_h { |stage| [stage, calculate_stage_events_hash(stage)] } + hashes = hashes_by_stage.values.uniq + + StageEventHash.insert_all(hashes.map { |hash| { hash_sha256: hash } }) + + stage_event_hashes_by_hash = StageEventHash.where(hash_sha256: hashes).index_by(&:hash_sha256) + records.each do |stage| + stage.update!(stage_event_hash_id: stage_event_hashes_by_hash[hashes_by_stage[stage]].id) + end + end + end + end + + def calculate_stage_events_hash(stage) + start_event_hash = calculate_event_hash(stage.start_event_identifier, stage.start_event_label_id) + end_event_hash = calculate_event_hash(stage.end_event_identifier, stage.end_event_label_id) + + Digest::SHA256.hexdigest("#{start_event_hash}-#{end_event_hash}") + end + + def calculate_event_hash(event_identifier, label_id = nil) + str = EVENT_ID_IDENTIFIER_MAPPING.fetch(event_identifier).to_s + str << "-#{label_id}" if LABEL_BASED_EVENTS.include?(event_identifier) + + Digest::SHA256.hexdigest(str) + end + + # Invalid records are safe to delete, since they are not working properly anyway + def delete_invalid_records(records) + to_be_deleted = records.select do |record| + EVENT_ID_IDENTIFIER_MAPPING[record.start_event_identifier].nil? || + EVENT_ID_IDENTIFIER_MAPPING[record.end_event_identifier].nil? + end + + to_be_deleted.each(&:delete) + records - to_be_deleted + end +end diff --git a/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb b/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb new file mode 100644 index 00000000000..beb15e77878 --- /dev/null +++ b/db/post_migrate/20210806131706_finalize_taggins_bigint_conversion.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +class FinalizeTagginsBigintConversion < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TABLE_NAME = 'taggings' + + def up + ensure_batched_background_migration_is_finished( + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: TABLE_NAME, + column_name: 'id', + job_arguments: [%w[id taggable_id], %w[id_convert_to_bigint taggable_id_convert_to_bigint]] + ) + + swap + end + + def down + swap + end + + private + + def swap + # rubocop:disable Migration/PreventIndexCreation + add_concurrent_index TABLE_NAME, :id_convert_to_bigint, unique: true, name: 'index_taggings_on_id_convert_to_bigint' + + # This is to replace the existing "index_taggings_on_taggable_id_and_taggable_type" btree (taggable_id, taggable_type) + add_concurrent_index TABLE_NAME, [:taggable_id_convert_to_bigint, :taggable_type], name: 'i_taggings_on_taggable_id_convert_to_bigint_and_taggable_type' + + # This is to replace the existing "index_taggings_on_taggable_id_and_taggable_type_and_context" btree (taggable_id, taggable_type, context) + add_concurrent_index TABLE_NAME, [:taggable_id_convert_to_bigint, :taggable_type, :context], name: 'i_taggings_on_taggable_bigint_and_taggable_type_and_context' + + # This is to replace the existing "taggings_idx" btree (tag_id, taggable_id, taggable_type, context, tagger_id, tagger_type) + add_concurrent_index TABLE_NAME, [:tag_id, :taggable_id_convert_to_bigint, :taggable_type, :context, :tagger_id, :tagger_type], unique: true, name: 'taggings_idx_tmp' + + # This is to replace the existing "tmp_index_taggings_on_id_where_taggable_type_project" btree (id) WHERE taggable_type::text = 'Project'::text + add_concurrent_index TABLE_NAME, :id_convert_to_bigint, where: "taggable_type = 'Project'", name: 'tmp_index_taggings_on_id_bigint_where_taggable_type_project' + # rubocop:enable Migration/PreventIndexCreation + + with_lock_retries(raise_on_exhaustion: true) do + # We'll need ACCESS EXCLUSIVE lock on the related tables, + # lets make sure it can be acquired from the start + execute "LOCK TABLE #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE" + + # Swap column names + temp_name = 'taggable_id_tmp' + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:taggable_id)} TO #{quote_column_name(temp_name)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:taggable_id_convert_to_bigint)} TO #{quote_column_name(:taggable_id)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(temp_name)} TO #{quote_column_name(:taggable_id_convert_to_bigint)}" + + temp_name = 'id_tmp' + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:id)} TO #{quote_column_name(temp_name)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(:id_convert_to_bigint)} TO #{quote_column_name(:id)}" + execute "ALTER TABLE #{quote_table_name(TABLE_NAME)} RENAME COLUMN #{quote_column_name(temp_name)} TO #{quote_column_name(:id_convert_to_bigint)}" + + # We need to update the trigger function in order to make PostgreSQL to + # regenerate the execution plan for it. This is to avoid type mismatch errors like + # "type of parameter 15 (bigint) does not match that when preparing the plan (integer)" + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name([:id, :taggable_id], [:id_convert_to_bigint, :taggable_id_convert_to_bigint]) + execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" + + # Swap defaults + execute "ALTER SEQUENCE taggings_id_seq OWNED BY #{TABLE_NAME}.id" + change_column_default TABLE_NAME, :id, -> { "nextval('taggings_id_seq'::regclass)" } + change_column_default TABLE_NAME, :id_convert_to_bigint, 0 + + # Swap PK constraint + execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT taggings_pkey CASCADE" + rename_index TABLE_NAME, 'index_taggings_on_id_convert_to_bigint', 'taggings_pkey' + execute "ALTER TABLE #{TABLE_NAME} ADD CONSTRAINT taggings_pkey PRIMARY KEY USING INDEX taggings_pkey" + + # Rename the index on the `bigint` column to match the new column name + # (we already hold an exclusive lock, so no need to use DROP INDEX CONCURRENTLY here) + execute 'DROP INDEX index_taggings_on_taggable_id_and_taggable_type' + rename_index TABLE_NAME, 'i_taggings_on_taggable_id_convert_to_bigint_and_taggable_type', 'index_taggings_on_taggable_id_and_taggable_type' + execute 'DROP INDEX index_taggings_on_taggable_id_and_taggable_type_and_context' + rename_index TABLE_NAME, 'i_taggings_on_taggable_bigint_and_taggable_type_and_context', 'index_taggings_on_taggable_id_and_taggable_type_and_context' + execute 'DROP INDEX taggings_idx' + rename_index TABLE_NAME, 'taggings_idx_tmp', 'taggings_idx' + execute 'DROP INDEX tmp_index_taggings_on_id_where_taggable_type_project' + rename_index TABLE_NAME, 'tmp_index_taggings_on_id_bigint_where_taggable_type_project', 'tmp_index_taggings_on_id_where_taggable_type_project' + end + end +end diff --git a/db/post_migrate/20210813151908_replace_external_wiki_triggers.rb b/db/post_migrate/20210813151908_replace_external_wiki_triggers.rb new file mode 100644 index 00000000000..d11baae42e2 --- /dev/null +++ b/db/post_migrate/20210813151908_replace_external_wiki_triggers.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +class ReplaceExternalWikiTriggers < ActiveRecord::Migration[6.1] + include Gitlab::Database::SchemaHelpers + + def up + replace_triggers('type_new', 'Integrations::ExternalWiki') + + # we need an extra trigger to handle when type_new is updated by the + # `integrations_set_type_new` trigger. + # This can be removed when this trigger has been removed. + execute(<<~SQL.squish) + CREATE TRIGGER #{trigger_name(:type_new_updated)} + AFTER UPDATE OF type_new ON integrations FOR EACH ROW + WHEN ((new.type_new)::text = 'Integrations::ExternalWiki'::text AND new.project_id IS NOT NULL) + EXECUTE FUNCTION set_has_external_wiki(); + SQL + end + + def down + execute("DROP TRIGGER IF EXISTS #{trigger_name(:type_new_updated)} ON integrations;") + replace_triggers('type', 'ExternalWikiService') + end + + private + + def replace_triggers(column_name, value) + triggers(column_name, value).each do |event, condition| + trigger = trigger_name(event) + + # create duplicate trigger, using the defined condition + execute(<<~SQL.squish) + CREATE TRIGGER #{trigger}_new AFTER #{event.upcase} ON integrations FOR EACH ROW + WHEN (#{condition}) + EXECUTE FUNCTION set_has_external_wiki(); + SQL + + # Swap the triggers in place, so that the new trigger has the canonical name + execute("ALTER TRIGGER #{trigger} ON integrations RENAME TO #{trigger}_old;") + execute("ALTER TRIGGER #{trigger}_new ON integrations RENAME TO #{trigger};") + + # remove the old, now redundant trigger + execute("DROP TRIGGER IF EXISTS #{trigger}_old ON integrations;") + end + end + + def trigger_name(event) + "trigger_has_external_wiki_on_#{event}" + end + + def triggers(column_name, value) + { + delete: "#{matches_value('old', column_name, value)} AND #{project_not_null('old')}", + insert: "(new.active = true) AND #{matches_value('new', column_name, value)} AND #{project_not_null('new')}", + update: "#{matches_value('new', column_name, value)} AND (old.active <> new.active) AND #{project_not_null('new')}" + } + end + + def project_not_null(row) + "(#{row}.project_id IS NOT NULL)" + end + + def matches_value(row, column_name, value) + "((#{row}.#{column_name})::text = '#{value}'::text)" + end +end diff --git a/db/post_migrate/20210817024335_prepare_indexes_for_events_bigint_conversion.rb b/db/post_migrate/20210817024335_prepare_indexes_for_events_bigint_conversion.rb new file mode 100644 index 00000000000..1d102d6216c --- /dev/null +++ b/db/post_migrate/20210817024335_prepare_indexes_for_events_bigint_conversion.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +class PrepareIndexesForEventsBigintConversion < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + TABLE_NAME = 'events' + + def up + prepare_async_index TABLE_NAME, :id_convert_to_bigint, unique: true, + name: :index_events_on_id_convert_to_bigint + + prepare_async_index TABLE_NAME, [:project_id, :id_convert_to_bigint], + name: :index_events_on_project_id_and_id_convert_to_bigint + + prepare_async_index TABLE_NAME, [:project_id, :id_convert_to_bigint], order: { id_convert_to_bigint: :desc }, + where: 'action = 7', name: :index_events_on_project_id_and_id_bigint_desc_on_merged_action + end + + def down + unprepare_async_index_by_name TABLE_NAME, :index_events_on_id_convert_to_bigint + unprepare_async_index_by_name TABLE_NAME, :index_events_on_project_id_and_id_convert_to_bigint + unprepare_async_index_by_name TABLE_NAME, :index_events_on_project_id_and_id_bigint_desc_on_merged_action + end +end diff --git a/db/post_migrate/20210818185845_backfill_projects_with_coverage.rb b/db/post_migrate/20210818185845_backfill_projects_with_coverage.rb new file mode 100644 index 00000000000..003b7536767 --- /dev/null +++ b/db/post_migrate/20210818185845_backfill_projects_with_coverage.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +class BackfillProjectsWithCoverage < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + MIGRATION = 'BackfillProjectsWithCoverage' + DELAY_INTERVAL = 2.minutes + BATCH_SIZE = 10_000 + SUB_BATCH_SIZE = 1_000 + + disable_ddl_transaction! + + class CiDailyBuildGroupReportResult < ActiveRecord::Base + include EachBatch + + self.table_name = 'ci_daily_build_group_report_results' + end + + def up + queue_background_migration_jobs_by_range_at_intervals( + CiDailyBuildGroupReportResult, + MIGRATION, + DELAY_INTERVAL, + batch_size: BATCH_SIZE, + other_job_arguments: [SUB_BATCH_SIZE] + ) + end + + def down + # noop + end +end diff --git a/db/post_migrate/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session.rb b/db/post_migrate/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session.rb new file mode 100644 index 00000000000..bbcee556020 --- /dev/null +++ b/db/post_migrate/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +class DropTemporaryColumnsAndTriggersForCiBuildsRunnerSession < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TABLE = 'ci_builds_runner_session' + TEMPORARY_COLUMN = 'build_id_convert_to_bigint' + MAIN_COLUMN = 'build_id' + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + with_lock_retries do + cleanup_conversion_of_integer_to_bigint(TABLE, MAIN_COLUMN) + end + end + + def down + check_trigger_permissions!(TABLE) + + with_lock_retries do + add_column(TABLE, TEMPORARY_COLUMN, :int, default: 0, null: false) + install_rename_triggers(TABLE, MAIN_COLUMN, TEMPORARY_COLUMN) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod +end diff --git a/db/post_migrate/20210823113259_steal_merge_request_diff_commit_users_migration.rb b/db/post_migrate/20210823113259_steal_merge_request_diff_commit_users_migration.rb new file mode 100644 index 00000000000..7e893d62c28 --- /dev/null +++ b/db/post_migrate/20210823113259_steal_merge_request_diff_commit_users_migration.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +# See https://docs.gitlab.com/ee/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class StealMergeRequestDiffCommitUsersMigration < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + def up + job = Gitlab::Database::BackgroundMigrationJob + .for_migration_class('MigrateMergeRequestDiffCommitUsers') + .pending + .last + + return unless job + + # We schedule in one hour so we don't end up running the migrations while a + # deployment is still wrapping up. Not that that really matters, but it + # prevents from too much happening during a deployment window. + migrate_in(1.hour, 'StealMigrateMergeRequestDiffCommitUsers', job.arguments) + end + + def down + # no-op + end +end diff --git a/db/post_migrate/20210823132600_remove_duplicate_dast_site_tokens.rb b/db/post_migrate/20210823132600_remove_duplicate_dast_site_tokens.rb new file mode 100644 index 00000000000..35cf3b55200 --- /dev/null +++ b/db/post_migrate/20210823132600_remove_duplicate_dast_site_tokens.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +class RemoveDuplicateDastSiteTokens < ActiveRecord::Migration[6.1] + disable_ddl_transaction! + + class DastSiteToken < ApplicationRecord + self.table_name = 'dast_site_tokens' + self.inheritance_column = :_type_disabled + + scope :duplicates, -> do + all_duplicates = select(:project_id, :url) + .distinct + .group(:project_id, :url) + .having('count(*) > 1') + .pluck('array_agg(id) as ids') + + duplicate_ids = extract_duplicate_ids(all_duplicates) + + where(id: duplicate_ids) + end + + def self.extract_duplicate_ids(duplicates) + duplicates.flat_map { |ids| ids.first(ids.size - 1) } + end + end + + def up + DastSiteToken.duplicates.delete_all + end + + def down + end +end diff --git a/db/post_migrate/20210823142036_drop_temporary_trigger_for_ci_job_artifacts.rb b/db/post_migrate/20210823142036_drop_temporary_trigger_for_ci_job_artifacts.rb new file mode 100644 index 00000000000..42b25a192d5 --- /dev/null +++ b/db/post_migrate/20210823142036_drop_temporary_trigger_for_ci_job_artifacts.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +class DropTemporaryTriggerForCiJobArtifacts < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TABLE = 'ci_job_artifacts' + TEMPORARY_COLUMNS = %w(id_convert_to_bigint job_id_convert_to_bigint) + MAIN_COLUMNS = %w(id job_id) + TRIGGER = 'trigger_be1804f21693' + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + check_trigger_permissions!(TABLE) + + with_lock_retries do + remove_rename_triggers(TABLE, TRIGGER) + end + end + + def down + check_trigger_permissions!(TABLE) + + with_lock_retries do + install_rename_triggers(TABLE, MAIN_COLUMNS, TEMPORARY_COLUMNS, trigger_name: TRIGGER) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod +end diff --git a/db/post_migrate/20210823193234_remove_allow_editing_commit_messages_from_project_settings.rb b/db/post_migrate/20210823193234_remove_allow_editing_commit_messages_from_project_settings.rb new file mode 100644 index 00000000000..638361d0e83 --- /dev/null +++ b/db/post_migrate/20210823193234_remove_allow_editing_commit_messages_from_project_settings.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class RemoveAllowEditingCommitMessagesFromProjectSettings < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + def up + return unless column_exists?(:project_settings, :allow_editing_commit_messages) + + with_lock_retries do + remove_column :project_settings, :allow_editing_commit_messages + end + end + + def down + with_lock_retries do + add_column :project_settings, :allow_editing_commit_messages, :boolean, default: false, null: false + end + end +end diff --git a/db/post_migrate/20210824102624_add_project_namespace_index_to_project.rb b/db/post_migrate/20210824102624_add_project_namespace_index_to_project.rb new file mode 100644 index 00000000000..d88a31fca7d --- /dev/null +++ b/db/post_migrate/20210824102624_add_project_namespace_index_to_project.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class AddProjectNamespaceIndexToProject < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + INDEX_NAME = 'index_projects_on_project_namespace_id' + + def up + add_concurrent_index :projects, :project_namespace_id, name: INDEX_NAME, unique: true + end + + def down + remove_concurrent_index_by_name :projects, INDEX_NAME + end +end diff --git a/db/post_migrate/20210824102750_add_project_namespace_foreign_key_to_project.rb b/db/post_migrate/20210824102750_add_project_namespace_foreign_key_to_project.rb new file mode 100644 index 00000000000..5cb42fd61a7 --- /dev/null +++ b/db/post_migrate/20210824102750_add_project_namespace_foreign_key_to_project.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class AddProjectNamespaceForeignKeyToProject < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TARGET_COLUMN = :project_namespace_id + + def up + add_concurrent_foreign_key :projects, :namespaces, column: TARGET_COLUMN, on_delete: :cascade + end + + def down + with_lock_retries do + remove_foreign_key_if_exists(:projects, column: TARGET_COLUMN) + end + end +end diff --git a/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb b/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb new file mode 100644 index 00000000000..0a0fda7e870 --- /dev/null +++ b/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +class PrepareCiBuildsMetadataAndCiBuildAsyncIndexes < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + def up + prepare_async_index :ci_builds_metadata, :id_convert_to_bigint, unique: true, + name: :index_ci_builds_metadata_on_id_convert_to_bigint + + prepare_async_index :ci_builds_metadata, :build_id_convert_to_bigint, unique: true, + name: :index_ci_builds_metadata_on_build_id_convert_to_bigint + + prepare_async_index :ci_builds_metadata, :build_id_convert_to_bigint, where: 'has_exposed_artifacts IS TRUE', + name: :index_ci_builds_metadata_on_build_id_int8_and_exposed_artifacts + + prepare_async_index_from_sql(:ci_builds_metadata, :index_ci_builds_metadata_on_build_id_int8_where_interruptible, <<~SQL.squish) + CREATE INDEX CONCURRENTLY "index_ci_builds_metadata_on_build_id_int8_where_interruptible" + ON "ci_builds_metadata" ("build_id_convert_to_bigint") INCLUDE ("id_convert_to_bigint") + WHERE interruptible = true + SQL + + prepare_async_index :ci_builds, :id_convert_to_bigint, unique: true, + name: :index_ci_builds_on_converted_id + end + + def down + unprepare_async_index_by_name :ci_builds, :index_ci_builds_on_converted_id + + unprepare_async_index_by_name :ci_builds_metadata, :index_ci_builds_metadata_on_build_id_int8_where_interruptible + + unprepare_async_index_by_name :ci_builds_metadata, :index_ci_builds_metadata_on_build_id_int8_and_exposed_artifacts + + unprepare_async_index_by_name :ci_builds_metadata, :index_ci_builds_metadata_on_build_id_convert_to_bigint + + unprepare_async_index_by_name :ci_builds_metadata, :index_ci_builds_metadata_on_id_convert_to_bigint + end + + private + + def prepare_async_index_from_sql(table_name, index_name, definition) + return unless async_index_creation_available? + + return if index_name_exists?(table_name, index_name) + + async_index = Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.safe_find_or_create_by!(name: index_name) do |rec| + rec.table_name = table_name + rec.definition = definition + end + + Gitlab::AppLogger.info( + message: 'Prepared index for async creation', + table_name: async_index.table_name, + index_name: async_index.name) + end +end diff --git a/db/post_migrate/20210825150212_cleanup_remaining_orphan_invites.rb b/db/post_migrate/20210825150212_cleanup_remaining_orphan_invites.rb new file mode 100644 index 00000000000..d892e6897af --- /dev/null +++ b/db/post_migrate/20210825150212_cleanup_remaining_orphan_invites.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +class CleanupRemainingOrphanInvites < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TMP_INDEX_NAME = 'tmp_idx_members_with_orphaned_invites' + + QUERY_CONDITION = "invite_token IS NOT NULL AND user_id IS NOT NULL" + + def up + membership = define_batchable_model('members') + + add_concurrent_index :members, :id, where: QUERY_CONDITION, name: TMP_INDEX_NAME + + membership.where(QUERY_CONDITION).pluck(:id).each_slice(10) do |group| + membership.where(id: group).where(QUERY_CONDITION).update_all(invite_token: nil) + end + + remove_concurrent_index_by_name :members, TMP_INDEX_NAME + end + + def down + remove_concurrent_index_by_name :members, TMP_INDEX_NAME if index_exists_by_name?(:members, TMP_INDEX_NAME) + end +end diff --git a/db/post_migrate/20210825182303_remove_duplicate_dast_site_tokens_with_same_token.rb b/db/post_migrate/20210825182303_remove_duplicate_dast_site_tokens_with_same_token.rb new file mode 100644 index 00000000000..4d8e18ba8ed --- /dev/null +++ b/db/post_migrate/20210825182303_remove_duplicate_dast_site_tokens_with_same_token.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +class RemoveDuplicateDastSiteTokensWithSameToken < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + INDEX_NAME = 'index_dast_site_token_on_token' + + # rubocop: disable Migration/AddIndex + def up + execute("WITH duplicate_tokens AS( + SELECT id, rank() OVER (PARTITION BY token ORDER BY id) r FROM dast_site_tokens + ) + DELETE FROM dast_site_tokens c USING duplicate_tokens t + WHERE c.id = t.id AND t.r > 1;") + + add_index :dast_site_tokens, :token, name: INDEX_NAME, unique: true + end + + # rubocop: disable Migration/RemoveIndex + def down + remove_index :dast_site_tokens, :token, name: INDEX_NAME + end +end diff --git a/db/post_migrate/20210825193548_add_fk_to_iteration_cadence_id_on_boards.rb b/db/post_migrate/20210825193548_add_fk_to_iteration_cadence_id_on_boards.rb new file mode 100644 index 00000000000..eb879d9bc7a --- /dev/null +++ b/db/post_migrate/20210825193548_add_fk_to_iteration_cadence_id_on_boards.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class AddFkToIterationCadenceIdOnBoards < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + INDEX_NAME = 'index_boards_on_iteration_cadence_id' + + def up + add_concurrent_index :boards, :iteration_cadence_id, name: INDEX_NAME + add_concurrent_foreign_key :boards, :iterations_cadences, column: :iteration_cadence_id + end + + def down + with_lock_retries do + remove_foreign_key_if_exists :boards, column: :iteration_cadence_id + end + remove_concurrent_index_by_name :boards, INDEX_NAME + end +end diff --git a/db/post_migrate/20210825193652_backfill_cadence_id_for_boards_scoped_to_iteration.rb b/db/post_migrate/20210825193652_backfill_cadence_id_for_boards_scoped_to_iteration.rb new file mode 100644 index 00000000000..f350fbe3d12 --- /dev/null +++ b/db/post_migrate/20210825193652_backfill_cadence_id_for_boards_scoped_to_iteration.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +class BackfillCadenceIdForBoardsScopedToIteration < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + BATCH_SIZE = 1000 + DELAY = 2.minutes.to_i + MIGRATION = 'BackfillIterationCadenceIdForBoards' + + class MigrationBoard < ApplicationRecord + include EachBatch + + self.table_name = 'boards' + end + + def up + schedule_backfill_group_boards + schedule_backfill_project_boards + end + + def down + MigrationBoard.where.not(iteration_cadence_id: nil).each_batch(of: BATCH_SIZE) do |batch, index| + range = batch.pluck(Arel.sql('MIN(id)'), Arel.sql('MAX(id)')).first + delay = index * DELAY + + migrate_in(delay, MIGRATION, ['none', 'down', *range]) + end + end + + private + + def schedule_backfill_project_boards + MigrationBoard.where(iteration_id: -4).where.not(project_id: nil).where(iteration_cadence_id: nil).each_batch(of: BATCH_SIZE) do |batch, index| + range = batch.pluck(Arel.sql('MIN(id)'), Arel.sql('MAX(id)')).first + delay = index * DELAY + + migrate_in(delay, MIGRATION, ['project', 'up', *range]) + end + end + + def schedule_backfill_group_boards + MigrationBoard.where(iteration_id: -4).where.not(group_id: nil).where(iteration_cadence_id: nil).each_batch(of: BATCH_SIZE) do |batch, index| + range = batch.pluck(Arel.sql('MIN(id)'), Arel.sql('MAX(id)')).first + delay = index * DELAY + + migrate_in(delay, MIGRATION, ['group', 'up', *range]) + end + end +end diff --git a/db/post_migrate/20210826110839_prepare_indexes_for_ci_job_artifacts_expire_at_unlocked.rb b/db/post_migrate/20210826110839_prepare_indexes_for_ci_job_artifacts_expire_at_unlocked.rb new file mode 100644 index 00000000000..e11bb25d83c --- /dev/null +++ b/db/post_migrate/20210826110839_prepare_indexes_for_ci_job_artifacts_expire_at_unlocked.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +class PrepareIndexesForCiJobArtifactsExpireAtUnlocked < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + TABLE_NAME = 'ci_job_artifacts' + INDEX_NAME = 'ci_job_artifacts_expire_at_unlocked_idx' + + def up + prepare_async_index TABLE_NAME, [:expire_at], where: 'locked = 0', name: INDEX_NAME + end + + def down + unprepare_async_index_by_name TABLE_NAME, INDEX_NAME + end +end diff --git a/db/post_migrate/20210826171758_initialize_throttle_unauthenticated_api_columns.rb b/db/post_migrate/20210826171758_initialize_throttle_unauthenticated_api_columns.rb new file mode 100644 index 00000000000..7615931464b --- /dev/null +++ b/db/post_migrate/20210826171758_initialize_throttle_unauthenticated_api_columns.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +# Initialize the new `throttle_unauthenticated_api_*` columns with the current values +# from the `throttle_unauthenticated_*` columns, which will now only apply to web requests. +# +# The columns for the unauthenticated web rate limit will be renamed later +# in https://gitlab.com/gitlab-org/gitlab/-/issues/340031. +class InitializeThrottleUnauthenticatedApiColumns < ActiveRecord::Migration[6.1] + class ApplicationSetting < ActiveRecord::Base + self.table_name = :application_settings + end + + def up + ApplicationSetting.update_all(%q{ + throttle_unauthenticated_api_enabled = throttle_unauthenticated_enabled, + throttle_unauthenticated_api_requests_per_period = throttle_unauthenticated_requests_per_period, + throttle_unauthenticated_api_period_in_seconds = throttle_unauthenticated_period_in_seconds + }) + end + + def down + end +end diff --git a/db/post_migrate/20210826193907_add_unique_index_dast_site_token_project_id_and_url.rb b/db/post_migrate/20210826193907_add_unique_index_dast_site_token_project_id_and_url.rb new file mode 100644 index 00000000000..1e65d5647e4 --- /dev/null +++ b/db/post_migrate/20210826193907_add_unique_index_dast_site_token_project_id_and_url.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class AddUniqueIndexDastSiteTokenProjectIdAndUrl < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + INDEX_NAME = 'index_dast_site_token_on_project_id_and_url' + + def up + add_concurrent_index :dast_site_tokens, [:project_id, :url], name: INDEX_NAME, unique: true + end + + def down + remove_concurrent_index_by_name :dast_site_tokens, name: INDEX_NAME + end +end diff --git a/db/post_migrate/20210830104800_reschedule_extract_project_topics_into_separate_table.rb b/db/post_migrate/20210830104800_reschedule_extract_project_topics_into_separate_table.rb new file mode 100644 index 00000000000..d6b2db7790f --- /dev/null +++ b/db/post_migrate/20210830104800_reschedule_extract_project_topics_into_separate_table.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class RescheduleExtractProjectTopicsIntoSeparateTable < ActiveRecord::Migration[6.0] + include Gitlab::Database::MigrationHelpers + + MIGRATION = 'ExtractProjectTopicsIntoSeparateTable' + DELAY_INTERVAL = 4.minutes + + disable_ddl_transaction! + + def up + requeue_background_migration_jobs_by_range_at_intervals(MIGRATION, DELAY_INTERVAL) + end + + def down + # no-op + end +end diff --git a/db/post_migrate/20210831123008_drop_temporary_trigger_for_ci_sources_pipelines.rb b/db/post_migrate/20210831123008_drop_temporary_trigger_for_ci_sources_pipelines.rb new file mode 100644 index 00000000000..a3fe0e6bd7a --- /dev/null +++ b/db/post_migrate/20210831123008_drop_temporary_trigger_for_ci_sources_pipelines.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +class DropTemporaryTriggerForCiSourcesPipelines < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE = 'ci_sources_pipelines' + TEMPORARY_COLUMN = 'source_job_id_convert_to_bigint' + MAIN_COLUMN = 'source_job_id' + TRIGGER = 'trigger_8485e97c00e3' + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + check_trigger_permissions!(TABLE) + + with_lock_retries do + remove_rename_triggers(TABLE, TRIGGER) + end + end + + def down + check_trigger_permissions!(TABLE) + + with_lock_retries do + install_rename_triggers(TABLE, MAIN_COLUMN, TEMPORARY_COLUMN, trigger_name: TRIGGER) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod +end diff --git a/db/post_migrate/20210901044202_push_event_payloads_bigint_conversion_remove_triggers.rb b/db/post_migrate/20210901044202_push_event_payloads_bigint_conversion_remove_triggers.rb new file mode 100644 index 00000000000..e11f0d46563 --- /dev/null +++ b/db/post_migrate/20210901044202_push_event_payloads_bigint_conversion_remove_triggers.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +class PushEventPayloadsBigintConversionRemoveTriggers < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + TABLE_NAME = :push_event_payloads + TRIGGER_NAME = 'trigger_07c94931164e' + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + check_trigger_permissions!(TABLE_NAME) + + with_lock_retries do + remove_rename_triggers(TABLE_NAME, TRIGGER_NAME) + end + end + + def down + check_trigger_permissions!(TABLE_NAME) + + with_lock_retries do + install_rename_triggers(TABLE_NAME, :event_id, :event_id_convert_to_bigint, trigger_name: TRIGGER_NAME) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod +end diff --git a/db/post_migrate/20210901044237_events_bigint_conversion_remove_triggers.rb b/db/post_migrate/20210901044237_events_bigint_conversion_remove_triggers.rb new file mode 100644 index 00000000000..bfe3af2efa8 --- /dev/null +++ b/db/post_migrate/20210901044237_events_bigint_conversion_remove_triggers.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +class EventsBigintConversionRemoveTriggers < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE_NAME = :events + TRIGGER_NAME = :trigger_69523443cc10 + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + check_trigger_permissions!(TABLE_NAME) + + with_lock_retries do + remove_rename_triggers(TABLE_NAME, TRIGGER_NAME) + end + end + + def down + check_trigger_permissions!(TABLE_NAME) + + with_lock_retries do + install_rename_triggers(TABLE_NAME, :id, :id_convert_to_bigint, trigger_name: TRIGGER_NAME) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod +end diff --git a/db/post_migrate/20210901153324_slice_merge_request_diff_commit_migrations.rb b/db/post_migrate/20210901153324_slice_merge_request_diff_commit_migrations.rb new file mode 100644 index 00000000000..8ee7feae1a6 --- /dev/null +++ b/db/post_migrate/20210901153324_slice_merge_request_diff_commit_migrations.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +class SliceMergeRequestDiffCommitMigrations < ActiveRecord::Migration[6.1] + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + BATCH_SIZE = 5_000 + MIGRATION_CLASS = 'MigrateMergeRequestDiffCommitUsers' + STEAL_MIGRATION_CLASS = 'StealMigrateMergeRequestDiffCommitUsers' + + def up + old_jobs = Gitlab::Database::BackgroundMigrationJob + .for_migration_class(MIGRATION_CLASS) + .pending + .to_a + + return if old_jobs.empty? + + transaction do + # This ensures we stop processing the old ranges, as the background + # migrations skip already processed jobs. + Gitlab::Database::BackgroundMigrationJob + .for_migration_class(MIGRATION_CLASS) + .pending + .update_all(status: :succeeded) + + rows = [] + + old_jobs.each do |job| + min, max = job.arguments + + while min < max + rows << { + class_name: MIGRATION_CLASS, + arguments: [min, min + BATCH_SIZE], + created_at: Time.now.utc, + updated_at: Time.now.utc + } + + min += BATCH_SIZE + end + end + + Gitlab::Database::BackgroundMigrationJob.insert_all!(rows) + end + + job = Gitlab::Database::BackgroundMigrationJob + .for_migration_class(MIGRATION_CLASS) + .pending + .first + + migrate_in(1.hour, STEAL_MIGRATION_CLASS, job.arguments) + end + + def down + # no-op + end +end diff --git a/db/post_migrate/20210901184511_prepare_async_indexes_for_ci_builds.rb b/db/post_migrate/20210901184511_prepare_async_indexes_for_ci_builds.rb new file mode 100644 index 00000000000..47795c5d646 --- /dev/null +++ b/db/post_migrate/20210901184511_prepare_async_indexes_for_ci_builds.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +class PrepareAsyncIndexesForCiBuilds < Gitlab::Database::Migration[1.0] + def up + prepare_async_index :ci_builds, :stage_id_convert_to_bigint, name: :index_ci_builds_on_converted_stage_id + + prepare_async_index :ci_builds, [:commit_id, :artifacts_expire_at, :id_convert_to_bigint], + where: "type::text = 'Ci::Build'::text + AND (retried = false OR retried IS NULL) + AND (name::text = ANY (ARRAY['sast'::character varying::text, + 'secret_detection'::character varying::text, + 'dependency_scanning'::character varying::text, + 'container_scanning'::character varying::text, + 'dast'::character varying::text]))", + name: :index_ci_builds_on_commit_id_expire_at_and_converted_id + + prepare_async_index :ci_builds, [:project_id, :id_convert_to_bigint], + name: :index_ci_builds_on_project_and_converted_id + + prepare_async_index :ci_builds, [:runner_id, :id_convert_to_bigint], + order: { id_convert_to_bigint: :desc }, + name: :index_ci_builds_on_runner_id_and_converted_id_desc + + prepare_async_index :ci_builds, [:resource_group_id, :id_convert_to_bigint], + where: 'resource_group_id IS NOT NULL', + name: :index_ci_builds_on_resource_group_and_converted_id + + prepare_async_index :ci_builds, [:name, :id_convert_to_bigint], + where: "(name::text = ANY (ARRAY['container_scanning'::character varying::text, + 'dast'::character varying::text, + 'dependency_scanning'::character varying::text, + 'license_management'::character varying::text, + 'sast'::character varying::text, + 'secret_detection'::character varying::text, + 'coverage_fuzzing'::character varying::text, + 'license_scanning'::character varying::text]) + ) AND type::text = 'Ci::Build'::text", + name: :index_security_ci_builds_on_name_and_converted_id_parser + + prepare_async_index_from_sql(:ci_builds, :index_ci_builds_runner_id_and_converted_id_pending_covering, <<~SQL.squish) + CREATE INDEX CONCURRENTLY index_ci_builds_runner_id_and_converted_id_pending_covering + ON ci_builds (runner_id, id_convert_to_bigint) INCLUDE (project_id) + WHERE status::text = 'pending'::text AND type::text = 'Ci::Build'::text + SQL + end + + def down + unprepare_async_index_by_name :ci_builds, :index_ci_builds_runner_id_and_converted_id_pending_covering + + unprepare_async_index_by_name :ci_builds, :index_security_ci_builds_on_name_and_converted_id_parser + + unprepare_async_index_by_name :ci_builds, :index_ci_builds_on_resource_group_and_converted_id + + unprepare_async_index_by_name :ci_builds, :index_ci_builds_on_runner_id_and_converted_id_desc + + unprepare_async_index_by_name :ci_builds, :index_ci_builds_on_project_and_converted_id + + unprepare_async_index_by_name :ci_builds, :index_ci_builds_on_commit_id_expire_at_and_converted_id + + unprepare_async_index_by_name :ci_builds, :index_ci_builds_on_converted_stage_id + end + + private + + def prepare_async_index_from_sql(table_name, index_name, definition) + return unless async_index_creation_available? + + return if index_name_exists?(table_name, index_name) + + async_index = Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.find_or_create_by!(name: index_name) do |rec| + rec.table_name = table_name + rec.definition = definition + end + + Gitlab::AppLogger.info( + message: 'Prepared index for async creation', + table_name: async_index.table_name, + index_name: async_index.name) + end +end diff --git a/db/post_migrate/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs.rb b/db/post_migrate/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs.rb new file mode 100644 index 00000000000..f04c5cd6561 --- /dev/null +++ b/db/post_migrate/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +class DropTemporaryColumnsAndTriggersForCiBuildNeeds < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE = 'ci_build_needs' + TEMPORARY_COLUMN = 'build_id_convert_to_bigint' + MAIN_COLUMN = 'build_id' + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + with_lock_retries do + cleanup_conversion_of_integer_to_bigint(TABLE, MAIN_COLUMN) + end + end + + def down + check_trigger_permissions!(TABLE) + + with_lock_retries do + add_column(TABLE, TEMPORARY_COLUMN, :int, default: 0, null: false) + install_rename_triggers(TABLE, MAIN_COLUMN, TEMPORARY_COLUMN) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod +end diff --git a/db/post_migrate/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks.rb b/db/post_migrate/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks.rb new file mode 100644 index 00000000000..44bec402cae --- /dev/null +++ b/db/post_migrate/20210906100316_drop_temporary_columns_and_triggers_for_ci_build_trace_chunks.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +class DropTemporaryColumnsAndTriggersForCiBuildTraceChunks < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE = 'ci_build_trace_chunks' + COLUMN = 'build_id' + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + with_lock_retries do + cleanup_conversion_of_integer_to_bigint(TABLE, COLUMN) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod + + def down + restore_conversion_of_integer_to_bigint(TABLE, COLUMN) + end +end diff --git a/db/post_migrate/20210907021940_cleanup_bigint_conversion_for_ci_stages.rb b/db/post_migrate/20210907021940_cleanup_bigint_conversion_for_ci_stages.rb new file mode 100644 index 00000000000..096b5b3bf42 --- /dev/null +++ b/db/post_migrate/20210907021940_cleanup_bigint_conversion_for_ci_stages.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class CleanupBigintConversionForCiStages < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE = :ci_stages + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + with_lock_retries do + cleanup_conversion_of_integer_to_bigint(TABLE, :id) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod + + def down + restore_conversion_of_integer_to_bigint(TABLE, :id) + end +end diff --git a/db/post_migrate/20210907033745_cleanup_bigint_conversion_for_deployments.rb b/db/post_migrate/20210907033745_cleanup_bigint_conversion_for_deployments.rb new file mode 100644 index 00000000000..2d71c11cfa8 --- /dev/null +++ b/db/post_migrate/20210907033745_cleanup_bigint_conversion_for_deployments.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class CleanupBigintConversionForDeployments < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE = :deployments + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + with_lock_retries do + cleanup_conversion_of_integer_to_bigint(TABLE, :deployable_id) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod + + def down + restore_conversion_of_integer_to_bigint(TABLE, :deployable_id) + end +end diff --git a/db/post_migrate/20210907041000_cleanup_bigint_conversion_for_geo_job_artifact_deleted_events.rb b/db/post_migrate/20210907041000_cleanup_bigint_conversion_for_geo_job_artifact_deleted_events.rb new file mode 100644 index 00000000000..26f00454029 --- /dev/null +++ b/db/post_migrate/20210907041000_cleanup_bigint_conversion_for_geo_job_artifact_deleted_events.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class CleanupBigintConversionForGeoJobArtifactDeletedEvents < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE = :geo_job_artifact_deleted_events + + # rubocop:disable Migration/WithLockRetriesDisallowedMethod + def up + with_lock_retries do + cleanup_conversion_of_integer_to_bigint(TABLE, :job_artifact_id) + end + end + # rubocop:enable Migration/WithLockRetriesDisallowedMethod + + def down + restore_conversion_of_integer_to_bigint(TABLE, :job_artifact_id) + end +end diff --git a/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb b/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb new file mode 100644 index 00000000000..872eef5fd31 --- /dev/null +++ b/db/post_migrate/20210907211557_finalize_ci_builds_bigint_conversion.rb @@ -0,0 +1,222 @@ +# frozen_string_literal: true + +class FinalizeCiBuildsBigintConversion < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + TABLE_NAME = 'ci_builds' + PK_INDEX_NAME = 'index_ci_builds_on_converted_id' + + SECONDARY_INDEXES = [ + { + original_name: :index_ci_builds_on_commit_id_artifacts_expired_at_and_id, + temporary_name: :index_ci_builds_on_commit_id_expire_at_and_converted_id, + columns: [:commit_id, :artifacts_expire_at, :id_convert_to_bigint], + options: { + where: "type::text = 'Ci::Build'::text + AND (retried = false OR retried IS NULL) + AND (name::text = ANY (ARRAY['sast'::character varying::text, + 'secret_detection'::character varying::text, + 'dependency_scanning'::character varying::text, + 'container_scanning'::character varying::text, + 'dast'::character varying::text]))" + } + }, + { + original_name: :index_ci_builds_on_project_id_and_id, + temporary_name: :index_ci_builds_on_project_and_converted_id, + columns: [:project_id, :id_convert_to_bigint], + options: {} + }, + { + original_name: :index_ci_builds_on_runner_id_and_id_desc, + temporary_name: :index_ci_builds_on_runner_id_and_converted_id_desc, + columns: [:runner_id, :id_convert_to_bigint], + options: { order: { id_convert_to_bigint: :desc } } + }, + { + original_name: :index_for_resource_group, + temporary_name: :index_ci_builds_on_resource_group_and_converted_id, + columns: [:resource_group_id, :id_convert_to_bigint], + options: { where: 'resource_group_id IS NOT NULL' } + }, + { + original_name: :index_security_ci_builds_on_name_and_id_parser_features, + temporary_name: :index_security_ci_builds_on_name_and_converted_id_parser, + columns: [:name, :id_convert_to_bigint], + options: { + where: "(name::text = ANY (ARRAY['container_scanning'::character varying::text, + 'dast'::character varying::text, + 'dependency_scanning'::character varying::text, + 'license_management'::character varying::text, + 'sast'::character varying::text, + 'secret_detection'::character varying::text, + 'coverage_fuzzing'::character varying::text, + 'license_scanning'::character varying::text]) + ) AND type::text = 'Ci::Build'::text" + } + } + ].freeze + + MANUAL_INDEX_NAMES = { + original_name: :index_ci_builds_runner_id_pending_covering, + temporary_name: :index_ci_builds_runner_id_and_converted_id_pending_covering + }.freeze + + REFERENCING_FOREIGN_KEYS = [ + [:ci_build_needs, :build_id, :cascade, 'fk_rails_'], + [:ci_build_pending_states, :build_id, :cascade, 'fk_rails_'], + [:ci_build_report_results, :build_id, :cascade, 'fk_rails_'], + [:ci_build_trace_chunks, :build_id, :cascade, 'fk_rails_'], + [:ci_build_trace_metadata, :build_id, :cascade, 'fk_rails_'], + [:ci_builds_runner_session, :build_id, :cascade, 'fk_rails_'], + [:ci_builds_metadata, :build_id, :cascade, 'fk_'], + [:ci_job_artifacts, :job_id, :cascade, 'fk_rails_'], + [:ci_job_variables, :job_id, :cascade, 'fk_rails_'], + [:ci_pending_builds, :build_id, :cascade, 'fk_rails_'], + [:ci_resources, :build_id, :nullify, 'fk_'], + [:ci_running_builds, :build_id, :cascade, 'fk_rails_'], + [:ci_sources_pipelines, :source_job_id, :cascade, 'fk_'], + [:ci_unit_test_failures, :build_id, :cascade, 'fk_'], + [:dast_scanner_profiles_builds, :ci_build_id, :cascade, 'fk_'], + [:dast_site_profiles_builds, :ci_build_id, :cascade, 'fk_'], + [:pages_deployments, :ci_build_id, :nullify, 'fk_rails_'], + [:requirements_management_test_reports, :build_id, :nullify, 'fk_rails_'], + [:security_scans, :build_id, :cascade, 'fk_rails_'], + [:terraform_state_versions, :ci_build_id, :nullify, 'fk_'] + ].freeze + + def up + ensure_batched_background_migration_is_finished( + job_class_name: 'CopyColumnUsingBackgroundMigrationJob', + table_name: TABLE_NAME, + column_name: 'id', + job_arguments: [%w[id stage_id], %w[id_convert_to_bigint stage_id_convert_to_bigint]] + ) + + # Remove this upfront since this table is being dropped, and doesn't need to be migrated + if foreign_key_exists?(:dep_ci_build_trace_sections, TABLE_NAME, column: :build_id) + remove_foreign_key(:dep_ci_build_trace_sections, TABLE_NAME, column: :build_id) + end + + # Remove this unexpected FK if it exists - https://gitlab.com/gitlab-com/gl-infra/production/-/issues/5531#note_676576081 + if foreign_key_exists?(:ci_resources, TABLE_NAME, column: :build_id, name: 'fk_rails_e169a8e3d5') + remove_foreign_key(:ci_resources, TABLE_NAME, column: :build_id, name: 'fk_rails_e169a8e3d5') + end + + swap_columns + end + + def down + swap_columns + end + + private + + def swap_columns + # Copy existing indexes from the original column to the new column + create_indexes + # Copy existing FKs from the original column to the new column + create_referencing_foreign_keys + + # Remove existing FKs from the referencing tables, so we don't have to lock on them when we drop the existing PK + replace_referencing_foreign_keys + + with_lock_retries(raise_on_exhaustion: true) do + quoted_table_name = quote_table_name(TABLE_NAME) + + # Swap the original and new column names + temporary_name = 'id_tmp' + execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(:id)} TO #{quote_column_name(temporary_name)}" + execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(:id_convert_to_bigint)} TO #{quote_column_name(:id)}" + execute "ALTER TABLE #{quoted_table_name} RENAME COLUMN #{quote_column_name(temporary_name)} TO #{quote_column_name(:id_convert_to_bigint)}" + + # Reset the function so PG drops the plan cache for the incorrect integer type + function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME) + .name([:id, :stage_id], [:id_convert_to_bigint, :stage_id_convert_to_bigint]) + execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL" + + # Swap defaults of the two columns, and change ownership of the sequence to the new id + execute "ALTER SEQUENCE ci_builds_id_seq OWNED BY #{TABLE_NAME}.id" + change_column_default TABLE_NAME, :id, -> { "nextval('ci_builds_id_seq'::regclass)" } + change_column_default TABLE_NAME, :id_convert_to_bigint, 0 + + # Swap the PK constraint from the original column to the new column + # We deliberately don't CASCADE here because the old FKs should be removed already + execute "ALTER TABLE #{quoted_table_name} DROP CONSTRAINT ci_builds_pkey" + rename_index TABLE_NAME, PK_INDEX_NAME, 'ci_builds_pkey' + execute "ALTER TABLE #{quoted_table_name} ADD CONSTRAINT ci_builds_pkey PRIMARY KEY USING INDEX ci_builds_pkey" + + # Remove old column indexes and change new column indexes to have the original names + rename_secondary_indexes # rubocop:disable Migration/WithLockRetriesDisallowedMethod + end + end + + def create_indexes + add_concurrent_index TABLE_NAME, :id_convert_to_bigint, unique: true, name: PK_INDEX_NAME + + SECONDARY_INDEXES.each do |index_definition| + options = index_definition[:options] + options[:name] = index_definition[:temporary_name] + + add_concurrent_index(TABLE_NAME, index_definition[:columns], options) + end + + unless index_name_exists?(TABLE_NAME, MANUAL_INDEX_NAMES[:temporary_name]) + execute(<<~SQL) + CREATE INDEX CONCURRENTLY #{MANUAL_INDEX_NAMES[:temporary_name]} + ON ci_builds (runner_id, id_convert_to_bigint) INCLUDE (project_id) + WHERE status::text = 'pending'::text AND type::text = 'Ci::Build'::text + SQL + end + end + + def rename_secondary_indexes + (SECONDARY_INDEXES + [MANUAL_INDEX_NAMES]).each do |index_definition| + remove_index(TABLE_NAME, name: index_definition[:original_name]) # rubocop:disable Migration/RemoveIndex + rename_index(TABLE_NAME, index_definition[:temporary_name], index_definition[:original_name]) + end + end + + def create_referencing_foreign_keys + REFERENCING_FOREIGN_KEYS.each do |(from_table, column, on_delete, prefix)| + # Don't attempt to create the FK if one already exists from the table to the new column + # The check in `add_concurrent_foreign_key` already checks for this, but it looks for the foreign key + # with the new name only (containing the `_tmp` suffix). + # + # Since we might partially rename FKs and re-run the migration, we also have to check and see if a FK exists + # on those columns that might not match the `_tmp` name. + next if foreign_key_exists?(from_table, TABLE_NAME, column: column, primary_key: :id_convert_to_bigint) + + temporary_name = "#{concurrent_foreign_key_name(from_table, column, prefix: prefix)}_tmp" + + add_concurrent_foreign_key( + from_table, + TABLE_NAME, + column: column, + target_column: :id_convert_to_bigint, + name: temporary_name, + on_delete: on_delete, + reverse_lock_order: true) + end + end + + def replace_referencing_foreign_keys + REFERENCING_FOREIGN_KEYS.each do |(from_table, column, _, prefix)| + existing_name = concurrent_foreign_key_name(from_table, column, prefix: prefix) + + # Don't attempt to replace the FK unless it exists and points at the original column. + # This could happen if the migration is re-run due to failing midway. + next unless foreign_key_exists?(from_table, TABLE_NAME, column: column, primary_key: :id, name: existing_name) + + with_lock_retries do + # Explicitly lock table in order of parent, child to attempt to avoid deadlocks + execute "LOCK TABLE #{TABLE_NAME}, #{from_table} IN ACCESS EXCLUSIVE MODE" + + temporary_name = "#{existing_name}_tmp" + + remove_foreign_key(from_table, TABLE_NAME, column: column, primary_key: :id, name: existing_name) + rename_constraint(from_table, temporary_name, existing_name) + end + end + end +end diff --git a/db/post_migrate/20210908132335_disable_job_token_scope_when_unused.rb b/db/post_migrate/20210908132335_disable_job_token_scope_when_unused.rb new file mode 100644 index 00000000000..2e6ad12f928 --- /dev/null +++ b/db/post_migrate/20210908132335_disable_job_token_scope_when_unused.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +class DisableJobTokenScopeWhenUnused < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + class ProjectCiCdSetting < ApplicationRecord + include EachBatch + + self.table_name = 'project_ci_cd_settings' + end + + module Ci + module JobToken + class ProjectScopeLink < ApplicationRecord + self.table_name = 'ci_job_token_project_scope_links' + end + end + end + + def up + # Disabling job token scope after db/migrate/20210902171808_set_default_job_token_scope_false.rb + # if users haven't configured it. + ProjectCiCdSetting.each_batch(of: 10_000) do |settings| + with_enabled_but_unused_scope(settings).each_batch(of: 500) do |settings_to_update| + settings_to_update.update_all(job_token_scope_enabled: false) + end + end + end + + def down + # irreversible data migration + + # The migration relies on the state of `job_token_scope_enabled` and + # updates it based on whether the feature is used or not. + # + # The inverse migration would be to set `job_token_scope_enabled: true` + # for those projects that have the feature disabled and unused. But there + # could be also existing cases where the feature is disabled and unused. + # For example, old projects. + end + + private + + # The presence of ProjectScopeLinks means that the job token scope + # is configured and we need to leave it enabled. Unused job token scope + # can be disabled since they weren't configured. + def with_enabled_but_unused_scope(settings) + settings + .where(job_token_scope_enabled: true) + .where.not(project_id: Ci::JobToken::ProjectScopeLink.select(:source_project_id)) + end +end diff --git a/db/post_migrate/20210909104800_reschedule_extract_project_topics_into_separate_table_2.rb b/db/post_migrate/20210909104800_reschedule_extract_project_topics_into_separate_table_2.rb new file mode 100644 index 00000000000..ad31a40f324 --- /dev/null +++ b/db/post_migrate/20210909104800_reschedule_extract_project_topics_into_separate_table_2.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +class RescheduleExtractProjectTopicsIntoSeparateTable2 < Gitlab::Database::Migration[1.0] + MIGRATION = 'ExtractProjectTopicsIntoSeparateTable' + DELAY_INTERVAL = 4.minutes + + disable_ddl_transaction! + + def up + requeue_background_migration_jobs_by_range_at_intervals(MIGRATION, DELAY_INTERVAL) + end + + def down + # no-op + end +end diff --git a/db/post_migrate/20210909152027_remove_container_registry_enabled.rb b/db/post_migrate/20210909152027_remove_container_registry_enabled.rb new file mode 100644 index 00000000000..d97faaf58d2 --- /dev/null +++ b/db/post_migrate/20210909152027_remove_container_registry_enabled.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class RemoveContainerRegistryEnabled < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + def up + with_lock_retries do + remove_column :projects, :container_registry_enabled + end + end + + def down + with_lock_retries do + add_column :projects, :container_registry_enabled, :boolean # rubocop:disable Migration/AddColumnsToWideTables + end + end +end diff --git a/db/post_migrate/20210914094840_add_gin_index_on_pending_builds_namespace_traversal_ids.rb b/db/post_migrate/20210914094840_add_gin_index_on_pending_builds_namespace_traversal_ids.rb new file mode 100644 index 00000000000..3584c7136e3 --- /dev/null +++ b/db/post_migrate/20210914094840_add_gin_index_on_pending_builds_namespace_traversal_ids.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +class AddGinIndexOnPendingBuildsNamespaceTraversalIds < Gitlab::Database::Migration[1.0] + disable_ddl_transaction! + + INDEX_NAME = 'index_gin_ci_pending_builds_on_namespace_traversal_ids' + + def up + add_concurrent_index :ci_pending_builds, :namespace_traversal_ids, name: INDEX_NAME, using: :gin + end + + def down + remove_concurrent_index_by_name :ci_pending_builds, INDEX_NAME + end +end diff --git a/db/post_migrate/20210915202900_prepare_index_resource_group_status_commit_id_for_ci_builds.rb b/db/post_migrate/20210915202900_prepare_index_resource_group_status_commit_id_for_ci_builds.rb new file mode 100644 index 00000000000..42d21806405 --- /dev/null +++ b/db/post_migrate/20210915202900_prepare_index_resource_group_status_commit_id_for_ci_builds.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +class PrepareIndexResourceGroupStatusCommitIdForCiBuilds < Gitlab::Database::Migration[1.0] + INDEX_NAME = 'index_ci_builds_on_resource_group_and_status_and_commit_id' + + def up + prepare_async_index :ci_builds, [:resource_group_id, :status, :commit_id], + where: 'resource_group_id IS NOT NULL', + name: INDEX_NAME + end + + def down + unprepare_async_index_by_name :ci_builds, INDEX_NAME + end +end |