summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert Speicher <rspeicher@gmail.com>2019-06-17 16:24:10 +0000
committerRobert Speicher <rspeicher@gmail.com>2019-06-17 16:24:10 +0000
commit0ca3c672c739b118258cd12d3f5de954bded6895 (patch)
tree0da6f44a940aea75fd2f4b45acf3fd523016f6b8
parent982d62a0452b2e806e363aa7be1f1a169227fc7f (diff)
parentd5baece038d043881279ef1182cc977ddec60294 (diff)
downloadgitlab-ce-0ca3c672c739b118258cd12d3f5de954bded6895.tar.gz
Merge branch 'backport-schema-changes' into 'master'
Backport the EE schema to CE Closes gitlab-ee#9686 See merge request gitlab-org/gitlab-ce!26940
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml2
-rw-r--r--changelogs/unreleased/backport-schema-changes.yml5
-rw-r--r--danger/database/Dangerfile1
-rw-r--r--db/migrate/20161007073613_create_user_activities.rb25
-rw-r--r--db/migrate/20161117114805_remove_undeleted_groups.rb105
-rw-r--r--db/migrate/20171121135738_clean_up_from_merge_request_diffs_and_commits.rb36
-rw-r--r--db/migrate/20171216111734_clean_up_for_members.rb31
-rw-r--r--db/migrate/20180502122856_create_project_mirror_data.rb24
-rw-r--r--db/migrate/20180503131624_create_remote_mirrors.rb2
-rw-r--r--db/migrate/20180503141722_add_remote_mirror_available_overridden_to_projects.rb2
-rw-r--r--db/migrate/20180503193542_add_indexes_to_remote_mirror.rb1
-rw-r--r--db/migrate/20180503193953_add_mirror_available_to_application_settings.rb2
-rw-r--r--db/migrate/20190402150158_backport_enterprise_schema.rb2144
-rw-r--r--db/migrate/20190403161806_update_designs_index.rb21
-rw-r--r--db/migrate/20190409224933_add_name_to_geo_nodes.rb26
-rw-r--r--db/migrate/20190410173409_add_name_index_to_geo_nodes.rb21
-rw-r--r--db/migrate/20190412183653_remove_url_index_from_geo_nodes.rb21
-rw-r--r--db/migrate/20190414185432_add_comment_to_vulnerability_feedback.rb17
-rw-r--r--db/migrate/20190415172035_update_insights_foreign_keys.rb25
-rw-r--r--db/migrate/20190418132750_add_foreign_key_from_vulnerability_feedback_to_users.rb19
-rw-r--r--db/migrate/20190419121952_add_bridged_pipeline_id_to_bridges.rb15
-rw-r--r--db/migrate/20190419123057_add_bridged_pipeline_id_foreign_key.rb23
-rw-r--r--db/migrate/20190423124640_add_index_to_projects_mirror_user_id.rb17
-rw-r--r--db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb21
-rw-r--r--db/migrate/20190528173628_add_index_for_code_owner_rule_type_on_approval_merge_request_rules.rb55
-rw-r--r--db/migrate/20190603124955_add_index_to_count_pending_mirror_updates.rb17
-rw-r--r--db/post_migrate/20161128170531_drop_user_activities_table.rb28
-rw-r--r--db/post_migrate/20170502101023_cleanup_namespaceless_pending_delete_projects.rb47
-rw-r--r--db/post_migrate/20170703130158_schedule_merge_request_diff_migrations.rb33
-rw-r--r--db/post_migrate/20170926150348_schedule_merge_request_diff_migrations_take_two.rb32
-rw-r--r--db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb28
-rw-r--r--db/post_migrate/20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb37
-rw-r--r--db/post_migrate/20171207150343_remove_soft_removed_objects.rb208
-rw-r--r--db/post_migrate/20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb37
-rw-r--r--db/post_migrate/20180521162137_migrate_remaining_mr_metrics_populating_background_migration.rb44
-rw-r--r--db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb26
-rw-r--r--db/post_migrate/20180723130817_delete_inconsistent_internal_id_records.rb47
-rw-r--r--db/post_migrate/20180906051323_remove_orphaned_label_links.rb43
-rw-r--r--db/post_migrate/20180913051323_consume_remaining_diff_files_deletion_jobs.rb23
-rw-r--r--db/post_migrate/20180916014356_populate_external_pipeline_source.rb33
-rw-r--r--db/post_migrate/20181014121030_enqueue_redact_links.rb65
-rw-r--r--db/post_migrate/20181204154019_populate_mr_metrics_with_events_data.rb38
-rw-r--r--db/post_migrate/20190402224749_schedule_merge_request_assignees_migration_progress_check.rb18
-rw-r--r--db/post_migrate/20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb55
-rw-r--r--db/post_migrate/20190404231137_remove_alternate_url_from_geo_nodes.rb21
-rw-r--r--db/post_migrate/20190418132125_populate_project_statistics_packages_size.rb46
-rw-r--r--db/post_migrate/20190511144331_remove_users_support_type.rb27
-rw-r--r--db/post_migrate/20190520201748_populate_rule_type_on_approval_merge_request_rules.rb34
-rw-r--r--db/schema.rb1200
-rw-r--r--doc/development/import_export.md3
-rw-r--r--lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb56
-rw-r--r--lib/gitlab/background_migration/delete_diff_files.rb81
-rw-r--r--lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb149
-rw-r--r--lib/gitlab/background_migration/merge_request_assignees_migration_progress_check.rb43
-rw-r--r--lib/gitlab/background_migration/populate_external_pipeline_source.rb50
-rw-r--r--lib/gitlab/background_migration/populate_import_state.rb39
-rw-r--r--lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb132
-rw-r--r--lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved.rb99
-rw-r--r--lib/gitlab/background_migration/redact_links.rb51
-rw-r--r--lib/gitlab/background_migration/redact_links/redactable.rb21
-rw-r--r--lib/gitlab/background_migration/rollback_import_state_data.rb40
-rw-r--r--lib/gitlab/background_migration/schedule_diff_files_deletion.rb44
-rw-r--r--lib/gitlab/database/migration_helpers.rb14
-rw-r--r--lib/gitlab/import_export/config.rb78
-rw-r--r--lib/gitlab/import_export/import_export.yml35
-rw-r--r--lib/gitlab/import_export/reader.rb2
-rw-r--r--lib/tasks/gitlab/import_export.rake2
-rw-r--r--spec/db/schema_spec.rb25
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb32
-rw-r--r--spec/lib/gitlab/background_migration/delete_diff_files_spec.rb81
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb326
-rw-r--r--spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb95
-rw-r--r--spec/lib/gitlab/background_migration/populate_external_pipeline_source_spec.rb72
-rw-r--r--spec/lib/gitlab/background_migration/populate_import_state_spec.rb38
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved_spec.rb57
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb132
-rw-r--r--spec/lib/gitlab/background_migration/redact_links_spec.rb96
-rw-r--r--spec/lib/gitlab/background_migration/rollback_import_state_data_spec.rb28
-rw-r--r--spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb43
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb1
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb27
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml104
-rw-r--r--spec/lib/gitlab/import_export/attribute_configuration_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/config_spec.rb164
-rw-r--r--spec/lib/gitlab/import_export/model_configuration_spec.rb26
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml54
-rw-r--r--spec/migrations/active_record/schema_spec.rb3
-rw-r--r--spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb57
-rw-r--r--spec/migrations/clean_up_for_members_spec.rb83
-rw-r--r--spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb34
-rw-r--r--spec/migrations/create_missing_namespace_for_internal_users_spec.rb41
-rw-r--r--spec/migrations/delete_inconsistent_internal_id_records_spec.rb161
-rw-r--r--spec/migrations/enqueue_delete_diff_files_workers_spec.rb17
-rw-r--r--spec/migrations/enqueue_redact_links_spec.rb42
-rw-r--r--spec/migrations/migrate_import_attributes_data_from_projects_to_project_mirror_data_spec.rb56
-rw-r--r--spec/migrations/migrate_remaining_mr_metrics_populating_background_migration_spec.rb36
-rw-r--r--spec/migrations/populate_mr_metrics_with_events_data_spec.rb47
-rw-r--r--spec/migrations/populate_project_statistics_packages_size_spec.rb37
-rw-r--r--spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb39
-rw-r--r--spec/migrations/remove_orphaned_label_links_spec.rb46
-rw-r--r--spec/migrations/remove_soft_removed_objects_spec.rb99
-rw-r--r--spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb31
-rw-r--r--spec/migrations/schedule_merge_request_assignees_migration_progress_check_spec.rb16
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_spec.rb46
-rw-r--r--spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb46
-rw-r--r--spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb30
-rw-r--r--spec/services/ci/retry_build_service_spec.rb2
108 files changed, 4716 insertions, 3455 deletions
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index da4541ab5c4..68280506da2 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -111,7 +111,7 @@
variables:
SETUP_DB: "false"
script:
- - git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v9.3.0
+ - git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v11.11.0
- git checkout -f FETCH_HEAD
- sed -i "s/gem 'oj', '~> 2.17.4'//" Gemfile
- sed -i "s/gem 'bootsnap', '~> 1.0.0'/gem 'bootsnap'/" Gemfile
diff --git a/changelogs/unreleased/backport-schema-changes.yml b/changelogs/unreleased/backport-schema-changes.yml
new file mode 100644
index 00000000000..58f3ca72b0b
--- /dev/null
+++ b/changelogs/unreleased/backport-schema-changes.yml
@@ -0,0 +1,5 @@
+---
+title: Backport the EE schema and migrations to CE
+merge_request: 26940
+author: Yorick Peterse
+type: other
diff --git a/danger/database/Dangerfile b/danger/database/Dangerfile
index 38ccbd94edb..4dadf60ad24 100644
--- a/danger/database/Dangerfile
+++ b/danger/database/Dangerfile
@@ -13,7 +13,6 @@ DB_FILES = [
'lib/gitlab/github_import/',
'lib/gitlab/sql/',
'rubocop/cop/migration',
- 'ee/db/',
'ee/lib/gitlab/database/'
].freeze
diff --git a/db/migrate/20161007073613_create_user_activities.rb b/db/migrate/20161007073613_create_user_activities.rb
index be3ecb17eef..dc972bf4664 100644
--- a/db/migrate/20161007073613_create_user_activities.rb
+++ b/db/migrate/20161007073613_create_user_activities.rb
@@ -1,7 +1,28 @@
+
class CreateUserActivities < ActiveRecord::Migration[4.2]
- DOWNTIME = false
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = true
+
+ # When a migration requires downtime you **must** uncomment the following
+ # constant and define a short and easy to understand explanation as to why the
+ # migration requires downtime.
+ DOWNTIME_REASON = 'Adding foreign key'.freeze
+
+ # When using the methods "add_concurrent_index" or "add_column_with_default"
+ # you must disable the use of transactions as these methods can not run in an
+ # existing transaction. When using "add_concurrent_index" make sure that this
+ # method is the _only_ method called in the migration, any other changes
+ # should go in a separate migration. This ensures that upon failure _only_ the
+ # index creation fails and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
- # This migration is a no-op. It just exists to match EE.
def change
+ create_table :user_activities do |t|
+ t.belongs_to :user, index: { unique: true }, foreign_key: { on_delete: :cascade }
+ t.datetime :last_activity_at, null: false
+ end
end
end
diff --git a/db/migrate/20161117114805_remove_undeleted_groups.rb b/db/migrate/20161117114805_remove_undeleted_groups.rb
deleted file mode 100644
index 0a4fe1c05b7..00000000000
--- a/db/migrate/20161117114805_remove_undeleted_groups.rb
+++ /dev/null
@@ -1,105 +0,0 @@
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class RemoveUndeletedGroups < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
- def up
- is_ee = defined?(Gitlab::License)
-
- if is_ee
- execute <<-EOF.strip_heredoc
- DELETE FROM path_locks
- WHERE project_id IN (
- SELECT project_id
- FROM projects
- WHERE namespace_id IN (#{namespaces_pending_removal})
- );
- EOF
-
- execute <<-EOF.strip_heredoc
- DELETE FROM remote_mirrors
- WHERE project_id IN (
- SELECT project_id
- FROM projects
- WHERE namespace_id IN (#{namespaces_pending_removal})
- );
- EOF
- end
-
- execute <<-EOF.strip_heredoc
- DELETE FROM lists
- WHERE label_id IN (
- SELECT id
- FROM labels
- WHERE group_id IN (#{namespaces_pending_removal})
- );
- EOF
-
- execute <<-EOF.strip_heredoc
- DELETE FROM lists
- WHERE board_id IN (
- SELECT id
- FROM boards
- WHERE project_id IN (
- SELECT project_id
- FROM projects
- WHERE namespace_id IN (#{namespaces_pending_removal})
- )
- );
- EOF
-
- execute <<-EOF.strip_heredoc
- DELETE FROM labels
- WHERE group_id IN (#{namespaces_pending_removal});
- EOF
-
- execute <<-EOF.strip_heredoc
- DELETE FROM boards
- WHERE project_id IN (
- SELECT project_id
- FROM projects
- WHERE namespace_id IN (#{namespaces_pending_removal})
- )
- EOF
-
- execute <<-EOF.strip_heredoc
- DELETE FROM projects
- WHERE namespace_id IN (#{namespaces_pending_removal});
- EOF
-
- if is_ee
- # EE adds these columns but we have to make sure this data is cleaned up
- # here before we run the DELETE below. An alternative would be patching
- # this migration in EE but this will only result in a mess and confusing
- # migrations.
- execute <<-EOF.strip_heredoc
- DELETE FROM protected_branch_push_access_levels
- WHERE group_id IN (#{namespaces_pending_removal});
- EOF
-
- execute <<-EOF.strip_heredoc
- DELETE FROM protected_branch_merge_access_levels
- WHERE group_id IN (#{namespaces_pending_removal});
- EOF
- end
-
- # This removes namespaces that were supposed to be deleted but still reside
- # in the database.
- execute "DELETE FROM namespaces WHERE deleted_at IS NOT NULL;"
- end
-
- def down
- # This is an irreversible migration;
- # If someone is trying to rollback for other reasons, we should not throw an Exception.
- # raise ActiveRecord::IrreversibleMigration
- end
-
- def namespaces_pending_removal
- "SELECT id FROM (
- SELECT id
- FROM namespaces
- WHERE deleted_at IS NOT NULL
- ) namespace_ids"
- end
-end
diff --git a/db/migrate/20171121135738_clean_up_from_merge_request_diffs_and_commits.rb b/db/migrate/20171121135738_clean_up_from_merge_request_diffs_and_commits.rb
deleted file mode 100644
index 6be7b75492d..00000000000
--- a/db/migrate/20171121135738_clean_up_from_merge_request_diffs_and_commits.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-class CleanUpFromMergeRequestDiffsAndCommits < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
-
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
-
- include ::EachBatch
- end
-
- disable_ddl_transaction!
-
- def up
- Gitlab::BackgroundMigration.steal('DeserializeMergeRequestDiffsAndCommits')
-
- # The literal '--- []\n' value is created by the import process and treated
- # as null by the application, so we can ignore those - even if we were
- # migrating, it wouldn't create any rows.
- literal_prefix = Gitlab::Database.postgresql? ? 'E' : ''
- non_empty = "
- (st_commits IS NOT NULL AND st_commits != #{literal_prefix}'--- []\n')
- OR
- (st_diffs IS NOT NULL AND st_diffs != #{literal_prefix}'--- []\n')
- ".squish
-
- MergeRequestDiff.where(non_empty).each_batch(of: 500) do |relation, index|
- range = relation.pluck('MIN(id)', 'MAX(id)').first
-
- Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits.new.perform(*range)
- end
- end
-
- def down
- end
-end
diff --git a/db/migrate/20171216111734_clean_up_for_members.rb b/db/migrate/20171216111734_clean_up_for_members.rb
deleted file mode 100644
index 2fefc6c7fd1..00000000000
--- a/db/migrate/20171216111734_clean_up_for_members.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class CleanUpForMembers < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- # Set this constant to true if this migration requires downtime.
- DOWNTIME = false
-
- disable_ddl_transaction!
-
- class Member < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'members'
- end
-
- def up
- condition = <<~EOF.squish
- invite_token IS NULL AND
- NOT EXISTS (SELECT 1 FROM users WHERE users.id = members.user_id)
- EOF
-
- Member.each_batch(of: 10_000) do |batch|
- batch.where(condition).delete_all
- end
- end
-
- def down
- end
-end
diff --git a/db/migrate/20180502122856_create_project_mirror_data.rb b/db/migrate/20180502122856_create_project_mirror_data.rb
index 8bc114afc0c..9781815a97b 100644
--- a/db/migrate/20180502122856_create_project_mirror_data.rb
+++ b/db/migrate/20180502122856_create_project_mirror_data.rb
@@ -4,17 +4,25 @@ class CreateProjectMirrorData < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
- return if table_exists?(:project_mirror_data)
-
- create_table :project_mirror_data do |t|
- t.references :project, index: true, foreign_key: { on_delete: :cascade }
- t.string :status
- t.string :jid
- t.text :last_error
+ if table_exists?(:project_mirror_data)
+ add_column :project_mirror_data, :status, :string unless column_exists?(:project_mirror_data, :status)
+ add_column :project_mirror_data, :jid, :string unless column_exists?(:project_mirror_data, :jid)
+ add_column :project_mirror_data, :last_error, :text unless column_exists?(:project_mirror_data, :last_error)
+ else
+ create_table :project_mirror_data do |t|
+ t.references :project, index: true, foreign_key: { on_delete: :cascade }
+ t.string :status
+ t.string :jid
+ t.text :last_error
+ end
end
end
def down
- drop_table(:project_mirror_data) if table_exists?(:project_mirror_data)
+ remove_column :project_mirror_data, :status
+ remove_column :project_mirror_data, :jid
+ remove_column :project_mirror_data, :last_error
+
+ # ee/db/migrate/20170509153720_create_project_mirror_data_ee.rb will remove the table.
end
end
diff --git a/db/migrate/20180503131624_create_remote_mirrors.rb b/db/migrate/20180503131624_create_remote_mirrors.rb
index 9f4bd463e66..288ae365f0f 100644
--- a/db/migrate/20180503131624_create_remote_mirrors.rb
+++ b/db/migrate/20180503131624_create_remote_mirrors.rb
@@ -29,6 +29,6 @@ class CreateRemoteMirrors < ActiveRecord::Migration[4.2]
end
def down
- drop_table(:remote_mirrors) if table_exists?(:remote_mirrors)
+ # ee/db/migrate/20160321161032_create_remote_mirrors_ee.rb will remove the table
end
end
diff --git a/db/migrate/20180503141722_add_remote_mirror_available_overridden_to_projects.rb b/db/migrate/20180503141722_add_remote_mirror_available_overridden_to_projects.rb
index 5e2ef5706ee..1d99d46b7d6 100644
--- a/db/migrate/20180503141722_add_remote_mirror_available_overridden_to_projects.rb
+++ b/db/migrate/20180503141722_add_remote_mirror_available_overridden_to_projects.rb
@@ -10,6 +10,6 @@ class AddRemoteMirrorAvailableOverriddenToProjects < ActiveRecord::Migration[4.2
end
def down
- remove_column(:projects, :remote_mirror_available_overridden) if column_exists?(:projects, :remote_mirror_available_overridden)
+ # ee/db/migrate/20171017130239_add_remote_mirror_available_overridden_to_projects_ee.rb will remove the column.
end
end
diff --git a/db/migrate/20180503193542_add_indexes_to_remote_mirror.rb b/db/migrate/20180503193542_add_indexes_to_remote_mirror.rb
index bdf05c4e69b..19bed8d0500 100644
--- a/db/migrate/20180503193542_add_indexes_to_remote_mirror.rb
+++ b/db/migrate/20180503193542_add_indexes_to_remote_mirror.rb
@@ -10,6 +10,7 @@ class AddIndexesToRemoteMirror < ActiveRecord::Migration[4.2]
end
def down
+ # ee/db/migrate/20170208144550_add_index_to_mirrors_last_update_at_fields.rb will remove the index.
# rubocop:disable Migration/RemoveIndex
remove_index :remote_mirrors, :last_successful_update_at if index_exists? :remote_mirrors, :last_successful_update_at
end
diff --git a/db/migrate/20180503193953_add_mirror_available_to_application_settings.rb b/db/migrate/20180503193953_add_mirror_available_to_application_settings.rb
index 64f65cd23be..d6a04035d48 100644
--- a/db/migrate/20180503193953_add_mirror_available_to_application_settings.rb
+++ b/db/migrate/20180503193953_add_mirror_available_to_application_settings.rb
@@ -10,6 +10,6 @@ class AddMirrorAvailableToApplicationSettings < ActiveRecord::Migration[4.2]
end
def down
- remove_column(:application_settings, :mirror_available) if column_exists?(:application_settings, :mirror_available)
+ # ee/db/migrate/20171017125928_add_remote_mirror_available_to_application_settings.rb will remove the column.
end
end
diff --git a/db/migrate/20190402150158_backport_enterprise_schema.rb b/db/migrate/20190402150158_backport_enterprise_schema.rb
new file mode 100644
index 00000000000..610a8808383
--- /dev/null
+++ b/db/migrate/20190402150158_backport_enterprise_schema.rb
@@ -0,0 +1,2144 @@
+# frozen_string_literal: true
+
+# rubocop: disable Metrics/AbcSize
+# rubocop: disable Migration/Datetime
+class BackportEnterpriseSchema < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ APPLICATION_SETTINGS_COLUMNS = [
+ { type: :boolean, name: :elasticsearch_indexing, default: false, null: false },
+ { type: :boolean, name: :elasticsearch_search, default: false, null: false },
+ { type: :integer, name: :shared_runners_minutes, default: 0, null: false },
+ { type: :bigint, name: :repository_size_limit, default: 0, null: true },
+ { type: :string, name: :elasticsearch_url, default: "http://localhost:9200" },
+ { type: :boolean, name: :elasticsearch_aws, default: false, null: false },
+ { type: :string, name: :elasticsearch_aws_region, default: "us-east-1", null: true },
+ { type: :string, name: :elasticsearch_aws_access_key, default: nil, null: true },
+ { type: :string, name: :elasticsearch_aws_secret_access_key, default: nil, null: true },
+ { type: :integer, name: :geo_status_timeout, default: 10, null: true },
+ { type: :boolean, name: :elasticsearch_experimental_indexer, default: nil, null: true },
+ { type: :boolean, name: :check_namespace_plan, default: false, null: false },
+ { type: :integer, name: :mirror_max_delay, default: 300, null: false },
+ { type: :integer, name: :mirror_max_capacity, default: 100, null: false },
+ { type: :integer, name: :mirror_capacity_threshold, default: 50, null: false },
+ { type: :boolean, name: :slack_app_enabled, default: false },
+ { type: :string, name: :slack_app_id },
+ { type: :string, name: :slack_app_secret },
+ { type: :string, name: :slack_app_verification_token },
+ { type: :boolean, name: :allow_group_owners_to_manage_ldap, default: true, null: false },
+ { type: :integer, name: :default_project_creation, default: 2, null: false },
+ { type: :string, name: :email_additional_text },
+ { type: :integer, name: :file_template_project_id },
+ { type: :boolean, name: :pseudonymizer_enabled, default: false, null: false },
+ { type: :boolean, name: :snowplow_enabled, default: false, null: false },
+ { type: :string, name: :snowplow_collector_uri },
+ { type: :string, name: :snowplow_site_id },
+ { type: :string, name: :snowplow_cookie_domain },
+ { type: :integer, name: :custom_project_templates_group_id },
+ { type: :boolean, name: :elasticsearch_limit_indexing, default: false, null: false },
+ { type: :string, name: :geo_node_allowed_ips, default: '0.0.0.0/0, ::/0' }
+ ].freeze
+
+ NAMESPACE_COLUMNS = [
+ { type: :integer, name: :custom_project_templates_group_id },
+ { type: :integer, name: :file_template_project_id },
+ { type: :string, name: :ldap_sync_error },
+ { type: :datetime, name: :ldap_sync_last_successful_update_at },
+ { type: :datetime, name: :ldap_sync_last_sync_at },
+ { type: :datetime, name: :ldap_sync_last_update_at },
+ { type: :integer, name: :plan_id },
+ { type: :integer, name: :project_creation_level },
+ { type: :bigint, name: :repository_size_limit },
+ { type: :string, name: :saml_discovery_token },
+ { type: :integer, name: :shared_runners_minutes_limit },
+ { type: :datetime_with_timezone, name: :trial_ends_on },
+ { type: :integer, name: :extra_shared_runners_minutes_limit }
+ ].freeze
+
+ PROJECT_MIRROR_DATA_COLUMNS = [
+ { type: :datetime_with_timezone, name: :last_successful_update_at },
+ { type: :datetime_with_timezone, name: :last_update_at },
+ { type: :datetime, name: :last_update_scheduled_at },
+ { type: :datetime, name: :last_update_started_at },
+ { type: :datetime, name: :next_execution_timestamp }
+ ].freeze
+
+ PROJECTS_COLUMNS = [
+ { type: :boolean, name: :disable_overriding_approvers_per_merge_request },
+ { type: :string, name: :external_webhook_token },
+ { type: :text, name: :issues_template },
+ { type: :boolean, name: :merge_requests_author_approval },
+ { type: :boolean, name: :merge_requests_disable_committers_approval },
+ { type: :boolean, name: :merge_requests_require_code_owner_approval },
+ { type: :text, name: :merge_requests_template },
+ { type: :datetime, name: :mirror_last_successful_update_at },
+ { type: :datetime, name: :mirror_last_update_at },
+ { type: :boolean, name: :mirror_overwrites_diverged_branches },
+ { type: :integer, name: :mirror_user_id },
+ { type: :boolean, name: :only_mirror_protected_branches },
+ { type: :boolean, name: :packages_enabled },
+ { type: :boolean, name: :pull_mirror_available_overridden },
+ { type: :bigint, name: :repository_size_limit },
+ { type: :boolean, name: :require_password_to_approve }
+ ].freeze
+
+ USERS_COLUMNS = [
+ { type: :datetime, name: :admin_email_unsubscribed_at },
+ { type: :boolean, name: :email_opted_in },
+ { type: :datetime, name: :email_opted_in_at },
+ { type: :string, name: :email_opted_in_ip },
+ { type: :integer, name: :email_opted_in_source_id },
+ { type: :integer, name: :group_view },
+ { type: :integer, name: :managing_group_id },
+ { type: :text, name: :note },
+ { type: :integer, name: :roadmap_layout, limit: 2 },
+ { type: :boolean, name: :support_bot },
+ { type: :integer, name: :bot_type, limit: 2 }
+ ].freeze
+
+ class ApplicationSetting < ActiveRecord::Base
+ self.table_name = 'application_settings'
+ end
+
+ class ProtectedBranchMergeAccessLevels < ActiveRecord::Base
+ self.table_name = 'protected_branch_merge_access_levels'
+ end
+
+ class ProtectedBranchPushAccessLevels < ActiveRecord::Base
+ self.table_name = 'protected_branch_push_access_levels'
+ end
+
+ class Project < ActiveRecord::Base
+ self.table_name = 'projects'
+ end
+
+ def up
+ create_missing_tables
+
+ update_appearances
+ update_application_settings
+ update_boards_table
+ update_clusters_applications_prometheus
+ update_identities
+ update_issues
+ update_lists
+ update_members
+ update_merge_requests
+ update_notes
+ update_ci_builds
+ update_environments
+ update_namespaces
+ update_notification_settings
+ update_project_mirror_data
+ update_project_statistics
+ update_projects
+ update_protected_branch_merge_access_levels
+ update_protected_branch_push_access_levels
+ update_resource_label_events
+ update_user_preferences
+ update_users
+ update_web_hooks
+ update_geo_nodes
+
+ add_missing_foreign_keys
+ end
+
+ def down
+ # This migration can not be reverted in a production environment, as doing
+ # so would lead to data loss for existing EE installations.
+ return if !Rails.env.test? && !Rails.env.development?
+
+ remove_foreign_keys
+ remove_tables
+
+ revert_appearances
+ revert_application_settings
+ revert_boards_table
+ revert_clusters_applications_prometheus
+ revert_identities
+ revert_issues
+ revert_lists
+ revert_members
+ revert_merge_requests
+ revert_notes
+ revert_ci_builds
+ revert_environments
+ revert_namespaces
+ revert_notification_settings
+ revert_project_mirror_data
+ revert_project_statistics
+ revert_projects
+ revert_protected_branch_merge_access_levels
+ revert_protected_branch_push_access_levels
+ revert_resource_label_events
+ revert_user_preferences
+ revert_users
+ revert_web_hooks
+ end
+
+ def add_column_if_not_exists(table, name, *args)
+ add_column(table, name, *args) unless column_exists?(table, name)
+ end
+
+ def remove_column_if_exists(table, column)
+ remove_column(table, column) if column_exists?(table, column)
+ end
+
+ def drop_table_if_exists(table)
+ drop_table(table) if table_exists?(table)
+ end
+
+ def add_column_with_default_if_not_exists(table, name, *args)
+ unless column_exists?(table, name)
+ add_column_with_default(table, name, *args)
+ end
+ end
+
+ def add_missing_columns(table, columns)
+ columns.each do |column|
+ next if table.column_exists?(column[:name])
+
+ # We can't use (public_)send here as this doesn't work with
+ # `datetime_with_timezone` for some reason.
+ table.column(
+ column[:name],
+ column[:type],
+ default: column[:default],
+ null: column.fetch(:null, true),
+ limit: column[:limit]
+ )
+ end
+ end
+
+ def remove_columns(table, columns)
+ columns.each do |column|
+ remove_column_if_exists(table, column[:name])
+ end
+ end
+
+ def create_table_if_not_exists(name, *args, &block)
+ return if table_exists?(name)
+
+ create_table(name, *args, &block)
+ end
+
+ def add_concurrent_foreign_key(source, target, column:, on_delete: nil, name: nil)
+ # We don't want redundant VALIDATE CONSTRAINT statements to run for existing
+ # foreign keys, as this can take a long time on large installations such as
+ # GitLab.com.
+ return if foreign_key_exists?(source, target, column: column)
+
+ super
+ end
+
+ def update_appearances
+ add_column_if_not_exists(:appearances, :updated_by, :integer)
+ end
+
+ def revert_appearances
+ remove_column_if_exists(:namespaces, :updated_by)
+ end
+
+ def update_application_settings
+ # In the CE schema this column allows NULL values even though there is a
+ # default value. In EE this column is not allowed to be NULL. This means
+ # that if we want to add a NOT NULL clause below, we must ensure no existing
+ # data would violate this clause.
+ ApplicationSetting
+ .where(password_authentication_enabled_for_git: nil)
+ .update_all(password_authentication_enabled_for_git: true)
+
+ change_column_null(
+ :application_settings,
+ :password_authentication_enabled_for_git,
+ false
+ )
+
+ # This table will only have a single row, and all operations here will be
+ # very fast. As such we merge all of this into a single ALTER TABLE
+ # statement.
+ change_table(:application_settings) do |t|
+ t.text(:help_text) unless t.column_exists?(:help_text)
+
+ add_missing_columns(t, APPLICATION_SETTINGS_COLUMNS)
+ end
+
+ add_concurrent_index(
+ :application_settings,
+ :custom_project_templates_group_id
+ )
+
+ add_concurrent_index(
+ :application_settings,
+ :file_template_project_id
+ )
+ end
+
+ def revert_application_settings
+ change_column_null(
+ :application_settings,
+ :password_authentication_enabled_for_git,
+ true
+ )
+
+ remove_concurrent_index(
+ :application_settings,
+ :custom_project_templates_group_id
+ )
+
+ remove_concurrent_index(
+ :application_settings,
+ :file_template_project_id
+ )
+
+ remove_columns(:application_settings, APPLICATION_SETTINGS_COLUMNS)
+ end
+
+ def update_boards_table
+ add_column_if_not_exists(:boards, :milestone_id, :integer)
+ add_column_if_not_exists(:boards, :weight, :integer)
+
+ add_column_with_default_if_not_exists(
+ :boards,
+ :name,
+ :string,
+ default: 'Development'
+ )
+
+ add_concurrent_index(:boards, :milestone_id)
+ end
+
+ def revert_boards_table
+ remove_concurrent_index(:boards, :milestone_id)
+ remove_column_if_exists(:boards, :name)
+ remove_column_if_exists(:boards, :weight)
+ remove_column_if_exists(:boards, :milestone_id)
+ end
+
+ def update_clusters_applications_prometheus
+ add_column_if_not_exists(
+ :clusters_applications_prometheus,
+ :encrypted_alert_manager_token,
+ :string
+ )
+
+ add_column_if_not_exists(
+ :clusters_applications_prometheus,
+ :encrypted_alert_manager_token_iv,
+ :string
+ )
+
+ add_column_if_not_exists(
+ :clusters_applications_prometheus,
+ :last_update_started_at,
+ :datetime_with_timezone
+ )
+ end
+
+ def revert_clusters_applications_prometheus
+ remove_column_if_exists(
+ :clusters_applications_prometheus,
+ :encrypted_alert_manager_token
+ )
+
+ remove_column_if_exists(
+ :clusters_applications_prometheus,
+ :encrypted_alert_manager_token_iv
+ )
+
+ remove_column_if_exists(
+ :clusters_applications_prometheus,
+ :last_update_started_at
+ )
+ end
+
+ def update_identities
+ add_column_if_not_exists(:identities, :saml_provider_id, :integer)
+ add_column_if_not_exists(:identities, :secondary_extern_uid, :string)
+
+ add_concurrent_index(
+ :identities,
+ :saml_provider_id,
+ where: 'saml_provider_id IS NOT NULL'
+ )
+ end
+
+ def revert_identities
+ remove_column_if_exists(:identities, :saml_provider_id)
+ remove_column_if_exists(:identities, :secondary_extern_uid)
+ end
+
+ def update_issues
+ add_column_if_not_exists(:issues, :service_desk_reply_to, :string)
+ add_column_if_not_exists(:issues, :weight, :integer)
+ end
+
+ def revert_issues
+ remove_column_if_exists(:issues, :service_desk_reply_to)
+ remove_column_if_exists(:issues, :weight)
+ end
+
+ def update_lists
+ add_column_if_not_exists(:lists, :milestone_id, :integer)
+ add_column_if_not_exists(:lists, :user_id, :integer)
+
+ add_concurrent_index(:lists, :milestone_id)
+ add_concurrent_index(:lists, :user_id)
+ end
+
+ def revert_lists
+ remove_column_if_exists(:lists, :milestone_id)
+ remove_column_if_exists(:lists, :user_id)
+ end
+
+ def update_members
+ add_column_with_default_if_not_exists(
+ :members,
+ :ldap,
+ :boolean,
+ default: false
+ )
+
+ add_column_with_default_if_not_exists(
+ :members,
+ :override,
+ :boolean,
+ default: false
+ )
+ end
+
+ def revert_members
+ remove_column_if_exists(:members, :ldap)
+ remove_column_if_exists(:members, :override)
+ end
+
+ def update_merge_requests
+ add_column_if_not_exists(:merge_requests, :approvals_before_merge, :integer)
+ end
+
+ def revert_merge_requests
+ remove_column_if_exists(:merge_requests, :approvals_before_merge)
+ end
+
+ def update_notes
+ add_column_if_not_exists(:notes, :review_id, :bigint)
+ add_concurrent_index(:notes, :review_id)
+ end
+
+ def revert_notes
+ remove_column_if_exists(:notes, :review_id)
+ end
+
+ def update_ci_builds
+ add_concurrent_index(
+ :ci_builds,
+ [:name],
+ name: 'index_ci_builds_on_name_for_security_products_values',
+ where: "
+ (
+ (name)::text = ANY (
+ ARRAY[
+ ('container_scanning'::character varying)::text,
+ ('dast'::character varying)::text,
+ ('dependency_scanning'::character varying)::text,
+ ('license_management'::character varying)::text,
+ ('sast'::character varying)::text
+ ]
+ )
+ )"
+ )
+ end
+
+ def revert_ci_builds
+ remove_concurrent_index_by_name(
+ :ci_builds,
+ 'index_ci_builds_on_name_for_security_products_values'
+ )
+ end
+
+ def update_environments
+ return unless Gitlab::Database.postgresql?
+ return if index_exists?(:environments, :name, name: 'index_environments_on_name_varchar_pattern_ops')
+
+ execute('CREATE INDEX CONCURRENTLY index_environments_on_name_varchar_pattern_ops ON environments (name varchar_pattern_ops);')
+ end
+
+ def revert_environments
+ return unless Gitlab::Database.postgresql?
+
+ remove_concurrent_index_by_name(
+ :environments,
+ 'index_environments_on_name_varchar_pattern_ops'
+ )
+ end
+
+ def update_namespaces
+ change_table(:namespaces) do |t|
+ add_missing_columns(t, NAMESPACE_COLUMNS)
+ end
+
+ add_column_with_default_if_not_exists(
+ :namespaces,
+ :ldap_sync_status,
+ :string,
+ default: 'ready'
+ )
+
+ add_column_with_default_if_not_exists(
+ :namespaces,
+ :membership_lock,
+ :boolean,
+ default: false,
+ allow_null: true
+ )
+
+ # When `add_concurrent_index` runs, it for some reason incorrectly
+ # determines this index does not exist when it does. To work around this, we
+ # check the existence by name ourselves.
+ unless index_exists_by_name?(:namespaces, 'index_namespaces_on_custom_project_templates_group_id_and_type')
+ add_concurrent_index(
+ :namespaces,
+ %i[custom_project_templates_group_id type],
+ where: "(custom_project_templates_group_id IS NOT NULL)"
+ )
+ end
+
+ add_concurrent_index(:namespaces, :file_template_project_id)
+ add_concurrent_index(:namespaces, :ldap_sync_last_successful_update_at)
+ add_concurrent_index(:namespaces, :ldap_sync_last_update_at)
+ add_concurrent_index(:namespaces, :plan_id)
+ add_concurrent_index(
+ :namespaces,
+ :trial_ends_on,
+ where: "(trial_ends_on IS NOT NULL)"
+ )
+
+ unless index_exists_by_name?(:namespaces, 'index_namespaces_on_shared_and_extra_runners_minutes_limit')
+ add_concurrent_index(
+ :namespaces,
+ %i[shared_runners_minutes_limit extra_shared_runners_minutes_limit],
+ name: 'index_namespaces_on_shared_and_extra_runners_minutes_limit'
+ )
+ end
+ end
+
+ def revert_namespaces
+ remove_columns(:namespaces, NAMESPACE_COLUMNS)
+ remove_column_if_exists(:namespaces, :ldap_sync_status)
+ remove_column_if_exists(:namespaces, :membership_lock)
+
+ remove_concurrent_index_by_name(
+ :namespaces,
+ 'index_namespaces_on_shared_and_extra_runners_minutes_limit'
+ )
+ end
+
+ def update_notification_settings
+ add_column_if_not_exists(:notification_settings, :new_epic, :boolean)
+ end
+
+ def revert_notification_settings
+ remove_column_if_exists(:notification_settings, :new_epic)
+ end
+
+ def update_project_mirror_data
+ change_table(:project_mirror_data) do |t|
+ add_missing_columns(t, PROJECT_MIRROR_DATA_COLUMNS)
+ end
+
+ add_column_with_default_if_not_exists(
+ :project_mirror_data,
+ :retry_count,
+ :integer,
+ default: 0
+ )
+
+ add_concurrent_index(:project_mirror_data, :last_successful_update_at)
+
+ add_concurrent_index(
+ :project_mirror_data,
+ %i[next_execution_timestamp retry_count],
+ name: 'index_mirror_data_on_next_execution_and_retry_count'
+ )
+ end
+
+ def revert_project_mirror_data
+ remove_columns(:project_mirror_data, PROJECT_MIRROR_DATA_COLUMNS)
+
+ remove_concurrent_index_by_name(
+ :project_mirror_data,
+ 'index_mirror_data_on_next_execution_and_retry_count'
+ )
+
+ remove_column_if_exists(:project_statistics, :retry_count)
+ end
+
+ def update_project_statistics
+ add_column_with_default_if_not_exists(
+ :project_statistics,
+ :shared_runners_seconds,
+ :bigint,
+ default: 0
+ )
+
+ add_column_if_not_exists(
+ :project_statistics,
+ :shared_runners_seconds_last_reset,
+ :datetime
+ )
+ end
+
+ def revert_project_statistics
+ remove_column_if_exists(:project_statistics, :shared_runners_seconds)
+
+ remove_column_if_exists(
+ :project_statistics,
+ :shared_runners_seconds_last_reset
+ )
+ end
+
+ def update_projects
+ change_table(:projects) do |t|
+ add_missing_columns(t, PROJECTS_COLUMNS)
+ end
+
+ change_column_null(:projects, :merge_requests_rebase_enabled, true)
+
+ add_column_with_default_if_not_exists(
+ :projects,
+ :mirror,
+ :boolean,
+ default: false
+ )
+
+ add_column_with_default_if_not_exists(
+ :projects,
+ :mirror_trigger_builds,
+ :boolean,
+ default: false
+ )
+
+ add_column_with_default_if_not_exists(
+ :projects,
+ :reset_approvals_on_push,
+ :boolean,
+ default: true,
+ allow_null: true
+ )
+
+ add_column_with_default_if_not_exists(
+ :projects,
+ :service_desk_enabled,
+ :boolean,
+ default: true,
+ allow_null: true
+ )
+
+ add_column_with_default_if_not_exists(
+ :projects,
+ :approvals_before_merge,
+ :integer,
+ default: 0
+ )
+
+ add_concurrent_index(
+ :projects,
+ %i[archived pending_delete merge_requests_require_code_owner_approval],
+ name: 'projects_requiring_code_owner_approval',
+ where: '((pending_delete = false) AND (archived = false) AND (merge_requests_require_code_owner_approval = true))'
+ )
+
+ add_concurrent_index(
+ :projects,
+ %i[id repository_storage last_repository_updated_at],
+ name: 'idx_projects_on_repository_storage_last_repository_updated_at'
+ )
+
+ add_concurrent_index(
+ :projects,
+ :id,
+ name: 'index_projects_on_mirror_and_mirror_trigger_builds_both_true',
+ where: '((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))'
+ )
+
+ add_concurrent_index(:projects, :mirror_last_successful_update_at)
+ end
+
+ def revert_projects
+ remove_columns(:projects, PROJECTS_COLUMNS)
+
+ Project
+ .where(merge_requests_rebase_enabled: nil)
+ .update_all(merge_requests_rebase_enabled: false)
+
+ change_column_null(:projects, :merge_requests_rebase_enabled, false)
+
+ remove_column_if_exists(:projects, :mirror)
+ remove_column_if_exists(:projects, :mirror_trigger_builds)
+ remove_column_if_exists(:projects, :reset_approvals_on_push)
+ remove_column_if_exists(:projects, :service_desk_enabled)
+ remove_column_if_exists(:projects, :approvals_before_merge)
+
+ remove_concurrent_index_by_name(
+ :projects,
+ 'projects_requiring_code_owner_approval'
+ )
+
+ remove_concurrent_index_by_name(
+ :projects,
+ 'idx_projects_on_repository_storage_last_repository_updated_at'
+ )
+
+ remove_concurrent_index_by_name(
+ :projects,
+ 'index_projects_on_mirror_and_mirror_trigger_builds_both_true'
+ )
+ end
+
+ def update_protected_branch_merge_access_levels
+ change_column_null(:protected_branch_merge_access_levels, :access_level, true)
+
+ add_column_if_not_exists(
+ :protected_branch_merge_access_levels,
+ :group_id,
+ :integer
+ )
+
+ add_column_if_not_exists(
+ :protected_branch_merge_access_levels,
+ :user_id,
+ :integer
+ )
+
+ add_concurrent_index(:protected_branch_merge_access_levels, :group_id)
+ add_concurrent_index(:protected_branch_merge_access_levels, :user_id)
+ end
+
+ def revert_protected_branch_merge_access_levels
+ ProtectedBranchMergeAccessLevels
+ .where(access_level: nil)
+ .update_all(access_level: false)
+
+ change_column_null(
+ :protected_branch_merge_access_levels,
+ :access_level,
+ false
+ )
+
+ remove_column_if_exists(:protected_branch_merge_access_levels, :group_id)
+ remove_column_if_exists(:protected_branch_merge_access_levels, :user_id)
+ end
+
+ def update_protected_branch_push_access_levels
+ change_column_null(
+ :protected_branch_push_access_levels,
+ :access_level,
+ true
+ )
+
+ add_column_if_not_exists(
+ :protected_branch_push_access_levels,
+ :group_id,
+ :integer
+ )
+
+ add_column_if_not_exists(
+ :protected_branch_push_access_levels,
+ :user_id,
+ :integer
+ )
+
+ add_concurrent_index(:protected_branch_push_access_levels, :group_id)
+ add_concurrent_index(:protected_branch_push_access_levels, :user_id)
+ end
+
+ def revert_protected_branch_push_access_levels
+ ProtectedBranchPushAccessLevels
+ .where(access_level: nil)
+ .update_all(access_level: false)
+
+ change_column_null(
+ :protected_branch_push_access_levels,
+ :access_level,
+ false
+ )
+
+ remove_column_if_exists(:protected_branch_push_access_levels, :group_id)
+ remove_column_if_exists(:protected_branch_push_access_levels, :user_id)
+ end
+
+ def update_resource_label_events
+ add_column_if_not_exists(:resource_label_events, :epic_id, :integer)
+ add_concurrent_index(:resource_label_events, :epic_id)
+ end
+
+ def revert_resource_label_events
+ remove_column_if_exists(:resource_label_events, :epic_id)
+ end
+
+ def update_user_preferences
+ add_column_with_default_if_not_exists(
+ :user_preferences,
+ :epic_notes_filter,
+ :integer,
+ default: 0,
+ limit: 2
+ )
+
+ add_column_if_not_exists(:user_preferences, :epics_sort, :string)
+ add_column_if_not_exists(:user_preferences, :roadmap_epics_state, :integer)
+ add_column_if_not_exists(:user_preferences, :roadmaps_sort, :string)
+ end
+
+ def revert_user_preferences
+ remove_column_if_exists(:user_preferences, :epic_notes_filter)
+ remove_column_if_exists(:user_preferences, :epics_sort)
+ remove_column_if_exists(:user_preferences, :roadmap_epics_state)
+ remove_column_if_exists(:user_preferences, :roadmaps_sort)
+ end
+
+ def update_users
+ add_column_with_default_if_not_exists(
+ :users,
+ :auditor,
+ :boolean,
+ default: false
+ )
+
+ change_table(:users) do |t|
+ add_missing_columns(t, USERS_COLUMNS)
+ end
+
+ add_concurrent_index(:users, :group_view)
+ add_concurrent_index(:users, :managing_group_id)
+ add_concurrent_index(:users, :support_bot)
+ add_concurrent_index(:users, :bot_type)
+
+ add_concurrent_index(
+ :users,
+ :state,
+ name: 'index_users_on_state_and_internal_attrs',
+ where: '((ghost <> true) AND (support_bot <> true))'
+ )
+
+ internal_index = 'index_users_on_state_and_internal'
+
+ remove_concurrent_index(:users, :state, name: internal_index)
+
+ add_concurrent_index(
+ :users,
+ :state,
+ name: internal_index,
+ where: '((ghost <> true) AND (bot_type IS NULL))'
+ )
+ end
+
+ def revert_users
+ remove_column_if_exists(:users, :auditor)
+ remove_columns(:users, USERS_COLUMNS)
+
+ remove_concurrent_index_by_name(
+ :users,
+ 'index_users_on_state_and_internal_attrs'
+ )
+
+ internal_index = 'index_users_on_state_and_internal'
+
+ remove_concurrent_index(:users, :state, name: internal_index)
+ add_concurrent_index(:users, :state, name: internal_index)
+ end
+
+ def update_web_hooks
+ add_column_if_not_exists(:web_hooks, :group_id, :integer)
+ end
+
+ def revert_web_hooks
+ remove_column_if_exists(:web_hooks, :group_id)
+ end
+
+ def update_geo_nodes
+ add_column_if_not_exists(:geo_nodes, :internal_url, :string)
+ end
+
+ def revert_geo_nodes
+ remove_column_if_exists(:geo_nodes, :internal_url)
+ end
+
+ def create_missing_tables
+ create_table_if_not_exists "approval_merge_request_rule_sources", id: :bigserial do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.bigint "approval_project_rule_id", null: false
+ t.index %w[approval_merge_request_rule_id], name: "index_approval_merge_request_rule_sources_1", unique: true, using: :btree
+ t.index %w[approval_project_rule_id], name: "index_approval_merge_request_rule_sources_2", using: :btree
+ end
+
+ create_table_if_not_exists "approval_merge_request_rules", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "merge_request_id", null: false
+ t.integer "approvals_required", limit: 2, default: 0, null: false
+ t.boolean "code_owner", default: false, null: false
+ t.string "name", null: false
+ t.index %w[merge_request_id code_owner name], name: "approval_rule_name_index_for_code_owners", unique: true, where: "(code_owner = true)", using: :btree
+ t.index %w[merge_request_id code_owner], name: "index_approval_merge_request_rules_1", using: :btree
+ end
+
+ create_table_if_not_exists "approval_merge_request_rules_approved_approvers", id: :bigserial do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.integer "user_id", null: false
+ t.index %w[approval_merge_request_rule_id user_id], name: "index_approval_merge_request_rules_approved_approvers_1", unique: true, using: :btree
+ t.index %w[user_id], name: "index_approval_merge_request_rules_approved_approvers_2", using: :btree
+ end
+
+ create_table_if_not_exists "approval_merge_request_rules_groups", id: :bigserial do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.integer "group_id", null: false
+ t.index %w[approval_merge_request_rule_id group_id], name: "index_approval_merge_request_rules_groups_1", unique: true, using: :btree
+ t.index %w[group_id], name: "index_approval_merge_request_rules_groups_2", using: :btree
+ end
+
+ create_table_if_not_exists "approval_merge_request_rules_users", id: :bigserial do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.integer "user_id", null: false
+ t.index %w[approval_merge_request_rule_id user_id], name: "index_approval_merge_request_rules_users_1", unique: true, using: :btree
+ t.index %w[user_id], name: "index_approval_merge_request_rules_users_2", using: :btree
+ end
+
+ create_table_if_not_exists "approval_project_rules", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.integer "approvals_required", limit: 2, default: 0, null: false
+ t.string "name", null: false
+ t.index %w[project_id], name: "index_approval_project_rules_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "approval_project_rules_groups", id: :bigserial do |t|
+ t.bigint "approval_project_rule_id", null: false
+ t.integer "group_id", null: false
+ t.index %w[approval_project_rule_id group_id], name: "index_approval_project_rules_groups_1", unique: true, using: :btree
+ t.index %w[group_id], name: "index_approval_project_rules_groups_2", using: :btree
+ end
+
+ create_table_if_not_exists "approval_project_rules_users", id: :bigserial do |t|
+ t.bigint "approval_project_rule_id", null: false
+ t.integer "user_id", null: false
+ t.index %w[approval_project_rule_id user_id], name: "index_approval_project_rules_users_1", unique: true, using: :btree
+ t.index %w[user_id], name: "index_approval_project_rules_users_2", using: :btree
+ end
+
+ create_table_if_not_exists "approvals" do |t|
+ t.integer "merge_request_id", null: false
+ t.integer "user_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index %w[merge_request_id], name: "index_approvals_on_merge_request_id", using: :btree
+ end
+
+ create_table_if_not_exists "approver_groups" do |t|
+ t.integer "target_id", null: false
+ t.string "target_type", null: false
+ t.integer "group_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index %w[group_id], name: "index_approver_groups_on_group_id", using: :btree
+ t.index %w[target_id target_type], name: "index_approver_groups_on_target_id_and_target_type", using: :btree
+ end
+
+ create_table_if_not_exists "approvers" do |t|
+ t.integer "target_id", null: false
+ t.string "target_type"
+ t.integer "user_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index %w[target_id target_type], name: "index_approvers_on_target_id_and_target_type", using: :btree
+ t.index %w[user_id], name: "index_approvers_on_user_id", using: :btree
+ end
+
+ create_table_if_not_exists "board_assignees" do |t|
+ t.integer "board_id", null: false
+ t.integer "assignee_id", null: false
+ t.index %w[assignee_id], name: "index_board_assignees_on_assignee_id", using: :btree
+ t.index %w[board_id assignee_id], name: "index_board_assignees_on_board_id_and_assignee_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "board_labels" do |t|
+ t.integer "board_id", null: false
+ t.integer "label_id", null: false
+ t.index %w[board_id label_id], name: "index_board_labels_on_board_id_and_label_id", unique: true, using: :btree
+ t.index %w[label_id], name: "index_board_labels_on_label_id", using: :btree
+ end
+
+ create_table_if_not_exists "ci_sources_pipelines" do |t|
+ t.integer "project_id"
+ t.integer "pipeline_id"
+ t.integer "source_project_id"
+ t.integer "source_job_id"
+ t.integer "source_pipeline_id"
+ t.index ["pipeline_id"], name: "index_ci_sources_pipelines_on_pipeline_id", using: :btree
+ t.index ["project_id"], name: "index_ci_sources_pipelines_on_project_id", using: :btree
+ t.index ["source_job_id"], name: "index_ci_sources_pipelines_on_source_job_id", using: :btree
+ t.index ["source_pipeline_id"], name: "index_ci_sources_pipelines_on_source_pipeline_id", using: :btree
+ t.index ["source_project_id"], name: "index_ci_sources_pipelines_on_source_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "design_management_designs", id: :bigserial, force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.integer "issue_id", null: false
+ t.string "filename", null: false
+ t.index %w[issue_id filename], name: "index_design_management_designs_on_issue_id_and_filename", unique: true, using: :btree
+ t.index ["project_id"], name: "index_design_management_designs_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "design_management_designs_versions", id: false, force: :cascade do |t|
+ t.bigint "design_id", null: false
+ t.bigint "version_id", null: false
+ t.index %w[design_id version_id], name: "design_management_designs_versions_uniqueness", unique: true, using: :btree
+ t.index ["design_id"], name: "index_design_management_designs_versions_on_design_id", using: :btree
+ t.index ["version_id"], name: "index_design_management_designs_versions_on_version_id", using: :btree
+ end
+
+ create_table_if_not_exists "design_management_versions", id: :bigserial, force: :cascade do |t|
+ t.binary "sha", null: false
+ t.index ["sha"], name: "index_design_management_versions_on_sha", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "draft_notes", id: :bigserial do |t|
+ t.integer "merge_request_id", null: false
+ t.integer "author_id", null: false
+ t.boolean "resolve_discussion", default: false, null: false
+ t.string "discussion_id"
+ t.text "note", null: false
+ t.text "position"
+ t.text "original_position"
+ t.text "change_position"
+ t.index ["author_id"], name: "index_draft_notes_on_author_id", using: :btree
+ t.index ["discussion_id"], name: "index_draft_notes_on_discussion_id", using: :btree
+ t.index ["merge_request_id"], name: "index_draft_notes_on_merge_request_id", using: :btree
+ end
+
+ create_table_if_not_exists "elasticsearch_indexed_namespaces", id: false do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "namespace_id"
+ t.index ["namespace_id"], name: "index_elasticsearch_indexed_namespaces_on_namespace_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "elasticsearch_indexed_projects", id: false do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id"
+ t.index ["project_id"], name: "index_elasticsearch_indexed_projects_on_project_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "epic_issues" do |t|
+ t.integer "epic_id", null: false
+ t.integer "issue_id", null: false
+ t.integer "relative_position", default: 1073741823, null: false
+ t.index ["epic_id"], name: "index_epic_issues_on_epic_id", using: :btree
+ t.index ["issue_id"], name: "index_epic_issues_on_issue_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "epic_metrics" do |t|
+ t.integer "epic_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["epic_id"], name: "index_epic_metrics", using: :btree
+ end
+
+ create_table_if_not_exists "epics" do |t|
+ t.integer "milestone_id"
+ t.integer "group_id", null: false
+ t.integer "author_id", null: false
+ t.integer "assignee_id"
+ t.integer "iid", null: false
+ t.integer "cached_markdown_version"
+ t.integer "updated_by_id"
+ t.integer "last_edited_by_id"
+ t.integer "lock_version"
+ t.date "start_date"
+ t.date "end_date"
+ t.datetime "last_edited_at"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.string "title", null: false
+ t.string "title_html", null: false
+ t.text "description"
+ t.text "description_html"
+ t.integer "start_date_sourcing_milestone_id"
+ t.integer "due_date_sourcing_milestone_id"
+ t.date "start_date_fixed"
+ t.date "due_date_fixed"
+ t.boolean "start_date_is_fixed"
+ t.boolean "due_date_is_fixed"
+ t.integer "state", limit: 2, default: 1, null: false
+ t.integer "closed_by_id"
+ t.datetime "closed_at"
+ t.integer "parent_id"
+ t.integer "relative_position"
+ t.index ["assignee_id"], name: "index_epics_on_assignee_id", using: :btree
+ t.index ["author_id"], name: "index_epics_on_author_id", using: :btree
+ t.index ["closed_by_id"], name: "index_epics_on_closed_by_id", using: :btree
+ t.index ["end_date"], name: "index_epics_on_end_date", using: :btree
+ t.index ["group_id"], name: "index_epics_on_group_id", using: :btree
+ t.index ["iid"], name: "index_epics_on_iid", using: :btree
+ t.index ["milestone_id"], name: "index_milestone", using: :btree
+ t.index ["parent_id"], name: "index_epics_on_parent_id", using: :btree
+ t.index ["start_date"], name: "index_epics_on_start_date", using: :btree
+ end
+
+ create_table_if_not_exists "geo_cache_invalidation_events", id: :bigserial do |t|
+ t.string "key", null: false
+ end
+
+ create_table_if_not_exists "geo_event_log", id: :bigserial do |t|
+ t.datetime "created_at", null: false
+ t.bigint "repository_updated_event_id"
+ t.bigint "repository_deleted_event_id"
+ t.bigint "repository_renamed_event_id"
+ t.bigint "repositories_changed_event_id"
+ t.bigint "repository_created_event_id"
+ t.bigint "hashed_storage_migrated_event_id"
+ t.bigint "lfs_object_deleted_event_id"
+ t.bigint "hashed_storage_attachments_event_id"
+ t.bigint "upload_deleted_event_id"
+ t.bigint "job_artifact_deleted_event_id"
+ t.bigint "reset_checksum_event_id"
+ t.bigint "cache_invalidation_event_id"
+ t.index ["cache_invalidation_event_id"], name: "index_geo_event_log_on_cache_invalidation_event_id", where: "(cache_invalidation_event_id IS NOT NULL)", using: :btree
+ t.index ["hashed_storage_attachments_event_id"], name: "index_geo_event_log_on_hashed_storage_attachments_event_id", where: "(hashed_storage_attachments_event_id IS NOT NULL)", using: :btree
+ t.index ["hashed_storage_migrated_event_id"], name: "index_geo_event_log_on_hashed_storage_migrated_event_id", where: "(hashed_storage_migrated_event_id IS NOT NULL)", using: :btree
+ t.index ["job_artifact_deleted_event_id"], name: "index_geo_event_log_on_job_artifact_deleted_event_id", where: "(job_artifact_deleted_event_id IS NOT NULL)", using: :btree
+ t.index ["lfs_object_deleted_event_id"], name: "index_geo_event_log_on_lfs_object_deleted_event_id", where: "(lfs_object_deleted_event_id IS NOT NULL)", using: :btree
+ t.index ["repositories_changed_event_id"], name: "index_geo_event_log_on_repositories_changed_event_id", where: "(repositories_changed_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_created_event_id"], name: "index_geo_event_log_on_repository_created_event_id", where: "(repository_created_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_deleted_event_id"], name: "index_geo_event_log_on_repository_deleted_event_id", where: "(repository_deleted_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_renamed_event_id"], name: "index_geo_event_log_on_repository_renamed_event_id", where: "(repository_renamed_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", where: "(repository_updated_event_id IS NOT NULL)", using: :btree
+ t.index ["reset_checksum_event_id"], name: "index_geo_event_log_on_reset_checksum_event_id", where: "(reset_checksum_event_id IS NOT NULL)", using: :btree
+ t.index ["upload_deleted_event_id"], name: "index_geo_event_log_on_upload_deleted_event_id", where: "(upload_deleted_event_id IS NOT NULL)", using: :btree
+ end
+
+ create_table_if_not_exists "geo_hashed_storage_attachments_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.text "old_attachments_path", null: false
+ t.text "new_attachments_path", null: false
+ t.index ["project_id"], name: "index_geo_hashed_storage_attachments_events_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_hashed_storage_migrated_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "old_disk_path", null: false
+ t.text "new_disk_path", null: false
+ t.text "old_wiki_disk_path", null: false
+ t.text "new_wiki_disk_path", null: false
+ t.integer "old_storage_version", limit: 2
+ t.integer "new_storage_version", limit: 2, null: false
+ t.index ["project_id"], name: "index_geo_hashed_storage_migrated_events_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_job_artifact_deleted_events", id: :bigserial do |t|
+ t.integer "job_artifact_id", null: false
+ t.string "file_path", null: false
+ t.index ["job_artifact_id"], name: "index_geo_job_artifact_deleted_events_on_job_artifact_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_lfs_object_deleted_events", id: :bigserial do |t|
+ t.integer "lfs_object_id", null: false
+ t.string "oid", null: false
+ t.string "file_path", null: false
+ t.index ["lfs_object_id"], name: "index_geo_lfs_object_deleted_events_on_lfs_object_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_node_namespace_links" do |t|
+ t.integer "geo_node_id", null: false
+ t.integer "namespace_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index %w[geo_node_id namespace_id], name: "index_geo_node_namespace_links_on_geo_node_id_and_namespace_id", unique: true, using: :btree
+ t.index ["geo_node_id"], name: "index_geo_node_namespace_links_on_geo_node_id", using: :btree
+ t.index ["namespace_id"], name: "index_geo_node_namespace_links_on_namespace_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_node_statuses" do |t|
+ t.integer "geo_node_id", null: false
+ t.integer "db_replication_lag_seconds"
+ t.integer "repositories_synced_count"
+ t.integer "repositories_failed_count"
+ t.integer "lfs_objects_count"
+ t.integer "lfs_objects_synced_count"
+ t.integer "lfs_objects_failed_count"
+ t.integer "attachments_count"
+ t.integer "attachments_synced_count"
+ t.integer "attachments_failed_count"
+ t.integer "last_event_id"
+ t.datetime "last_event_date"
+ t.integer "cursor_last_event_id"
+ t.datetime "cursor_last_event_date"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.datetime "last_successful_status_check_at"
+ t.string "status_message"
+ t.integer "replication_slots_count"
+ t.integer "replication_slots_used_count"
+ t.bigint "replication_slots_max_retained_wal_bytes"
+ t.integer "wikis_synced_count"
+ t.integer "wikis_failed_count"
+ t.integer "job_artifacts_count"
+ t.integer "job_artifacts_synced_count"
+ t.integer "job_artifacts_failed_count"
+ t.string "version"
+ t.string "revision"
+ t.integer "repositories_verified_count"
+ t.integer "repositories_verification_failed_count"
+ t.integer "wikis_verified_count"
+ t.integer "wikis_verification_failed_count"
+ t.integer "lfs_objects_synced_missing_on_primary_count"
+ t.integer "job_artifacts_synced_missing_on_primary_count"
+ t.integer "attachments_synced_missing_on_primary_count"
+ t.integer "repositories_checksummed_count"
+ t.integer "repositories_checksum_failed_count"
+ t.integer "repositories_checksum_mismatch_count"
+ t.integer "wikis_checksummed_count"
+ t.integer "wikis_checksum_failed_count"
+ t.integer "wikis_checksum_mismatch_count"
+ t.binary "storage_configuration_digest"
+ t.integer "repositories_retrying_verification_count"
+ t.integer "wikis_retrying_verification_count"
+ t.integer "projects_count"
+ t.index ["geo_node_id"], name: "index_geo_node_statuses_on_geo_node_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "geo_nodes" do |t|
+ t.boolean "primary"
+ t.integer "oauth_application_id"
+ t.boolean "enabled", default: true, null: false
+ t.string "access_key"
+ t.string "encrypted_secret_access_key"
+ t.string "encrypted_secret_access_key_iv"
+ t.string "clone_url_prefix"
+ t.integer "files_max_capacity", default: 10, null: false
+ t.integer "repos_max_capacity", default: 25, null: false
+ t.string "url", null: false
+ t.string "selective_sync_type"
+ t.text "selective_sync_shards"
+ t.integer "verification_max_capacity", default: 100, null: false
+ t.integer "minimum_reverification_interval", default: 7, null: false
+ t.string "alternate_url"
+ t.index ["access_key"], name: "index_geo_nodes_on_access_key", using: :btree
+ t.index ["primary"], name: "index_geo_nodes_on_primary", using: :btree
+ t.index ["url"], name: "index_geo_nodes_on_url", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "geo_repositories_changed_events", id: :bigserial do |t|
+ t.integer "geo_node_id", null: false
+ t.index ["geo_node_id"], name: "index_geo_repositories_changed_events_on_geo_node_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_repository_created_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "repo_path", null: false
+ t.text "wiki_path"
+ t.text "project_name", null: false
+ t.index ["project_id"], name: "index_geo_repository_created_events_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_repository_deleted_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "deleted_path", null: false
+ t.text "deleted_wiki_path"
+ t.text "deleted_project_name", null: false
+ t.index ["project_id"], name: "index_geo_repository_deleted_events_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_repository_renamed_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "old_path_with_namespace", null: false
+ t.text "new_path_with_namespace", null: false
+ t.text "old_wiki_path_with_namespace", null: false
+ t.text "new_wiki_path_with_namespace", null: false
+ t.text "old_path", null: false
+ t.text "new_path", null: false
+ t.index ["project_id"], name: "index_geo_repository_renamed_events_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_repository_updated_events", id: :bigserial do |t|
+ t.integer "branches_affected", null: false
+ t.integer "tags_affected", null: false
+ t.integer "project_id", null: false
+ t.integer "source", limit: 2, null: false
+ t.boolean "new_branch", default: false, null: false
+ t.boolean "remove_branch", default: false, null: false
+ t.text "ref"
+ t.index ["project_id"], name: "index_geo_repository_updated_events_on_project_id", using: :btree
+ t.index ["source"], name: "index_geo_repository_updated_events_on_source", using: :btree
+ end
+
+ create_table_if_not_exists "geo_reset_checksum_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.index ["project_id"], name: "index_geo_reset_checksum_events_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "geo_upload_deleted_events", id: :bigserial do |t|
+ t.integer "upload_id", null: false
+ t.string "file_path", null: false
+ t.integer "model_id", null: false
+ t.string "model_type", null: false
+ t.string "uploader", null: false
+ t.index ["upload_id"], name: "index_geo_upload_deleted_events_on_upload_id", using: :btree
+ end
+
+ create_table_if_not_exists "gitlab_subscriptions", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.date "start_date"
+ t.date "end_date"
+ t.date "trial_ends_on"
+ t.integer "namespace_id"
+ t.integer "hosted_plan_id"
+ t.integer "max_seats_used", default: 0
+ t.integer "seats", default: 0
+ t.boolean "trial", default: false
+ t.index ["hosted_plan_id"], name: "index_gitlab_subscriptions_on_hosted_plan_id", using: :btree
+ t.index ["namespace_id"], name: "index_gitlab_subscriptions_on_namespace_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "historical_data" do |t|
+ t.date "date", null: false
+ t.integer "active_user_count"
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ end
+
+ create_table_if_not_exists "index_statuses" do |t|
+ t.integer "project_id", null: false
+ t.datetime "indexed_at"
+ t.text "note"
+ t.string "last_commit"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["project_id"], name: "index_index_statuses_on_project_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "insights" do |t|
+ t.integer "namespace_id", null: false
+ t.integer "project_id", null: false
+ t.index ["namespace_id"], name: "index_insights_on_namespace_id", using: :btree
+ t.index ["project_id"], name: "index_insights_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "issue_links" do |t|
+ t.integer "source_id", null: false
+ t.integer "target_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index %w[source_id target_id], name: "index_issue_links_on_source_id_and_target_id", unique: true, using: :btree
+ t.index ["source_id"], name: "index_issue_links_on_source_id", using: :btree
+ t.index ["target_id"], name: "index_issue_links_on_target_id", using: :btree
+ end
+
+ create_table_if_not_exists "jira_connect_installations", id: :bigserial do |t|
+ t.string "client_key"
+ t.string "encrypted_shared_secret"
+ t.string "encrypted_shared_secret_iv"
+ t.string "base_url"
+ t.index ["client_key"], name: "index_jira_connect_installations_on_client_key", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "jira_connect_subscriptions", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.bigint "jira_connect_installation_id", null: false
+ t.integer "namespace_id", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.index %w[jira_connect_installation_id namespace_id], name: "idx_jira_connect_subscriptions_on_installation_id_namespace_id", unique: true, using: :btree
+ t.index ["jira_connect_installation_id"], name: "idx_jira_connect_subscriptions_on_installation_id", using: :btree
+ t.index ["namespace_id"], name: "index_jira_connect_subscriptions_on_namespace_id", using: :btree
+ end
+
+ create_table_if_not_exists "ldap_group_links" do |t|
+ t.string "cn"
+ t.integer "group_access", null: false
+ t.integer "group_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.string "provider"
+ t.string "filter"
+ end
+
+ create_table_if_not_exists "licenses" do |t|
+ t.text "data", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ end
+
+ create_table_if_not_exists "namespace_statistics" do |t|
+ t.integer "namespace_id", null: false
+ t.integer "shared_runners_seconds", default: 0, null: false
+ t.datetime "shared_runners_seconds_last_reset"
+ t.index ["namespace_id"], name: "index_namespace_statistics_on_namespace_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "operations_feature_flag_scopes", id: :bigserial do |t|
+ t.bigint "feature_flag_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.boolean "active", null: false
+ t.string "environment_scope", default: "*", null: false
+ t.index %w[feature_flag_id environment_scope], name: "index_feature_flag_scopes_on_flag_id_and_environment_scope", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "operations_feature_flags", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.boolean "active", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "name", null: false
+ t.text "description"
+ t.index %w[project_id name], name: "index_operations_feature_flags_on_project_id_and_name", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "operations_feature_flags_clients", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.string "token", null: false
+ t.index %w[project_id token], name: "index_operations_feature_flags_clients_on_project_id_and_token", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "packages_maven_metadata", id: :bigserial do |t|
+ t.bigint "package_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "app_group", null: false
+ t.string "app_name", null: false
+ t.string "app_version"
+ t.string "path", limit: 512, null: false
+ t.index %w[package_id path], name: "index_packages_maven_metadata_on_package_id_and_path", using: :btree
+ end
+
+ create_table_if_not_exists "packages_package_files", id: :bigserial do |t|
+ t.bigint "package_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.bigint "size"
+ t.integer "file_type"
+ t.integer "file_store"
+ t.binary "file_md5"
+ t.binary "file_sha1"
+ t.string "file_name", null: false
+ t.text "file", null: false
+ t.index %w[package_id file_name], name: "index_packages_package_files_on_package_id_and_file_name", using: :btree
+ end
+
+ create_table_if_not_exists "packages_packages", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "name", null: false
+ t.string "version"
+ t.integer "package_type", limit: 2, null: false
+ t.index ["project_id"], name: "index_packages_packages_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "path_locks" do |t|
+ t.string "path", null: false
+ t.integer "project_id"
+ t.integer "user_id"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["path"], name: "index_path_locks_on_path", using: :btree
+ t.index ["project_id"], name: "index_path_locks_on_project_id", using: :btree
+ t.index ["user_id"], name: "index_path_locks_on_user_id", using: :btree
+ end
+
+ create_table_if_not_exists "plans" do |t|
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.string "name"
+ t.string "title"
+ t.integer "active_pipelines_limit"
+ t.integer "pipeline_size_limit"
+ t.index ["name"], name: "index_plans_on_name", using: :btree
+ end
+
+ create_table_if_not_exists "project_alerting_settings", primary_key: "project_id", id: :integer do |t|
+ t.string "encrypted_token", null: false
+ t.string "encrypted_token_iv", null: false
+ end
+
+ create_table_if_not_exists "project_feature_usages", primary_key: "project_id", id: :integer do |t|
+ t.datetime "jira_dvcs_cloud_last_sync_at"
+ t.datetime "jira_dvcs_server_last_sync_at"
+ t.index %w[jira_dvcs_cloud_last_sync_at project_id], name: "idx_proj_feat_usg_on_jira_dvcs_cloud_last_sync_at_and_proj_id", where: "(jira_dvcs_cloud_last_sync_at IS NOT NULL)", using: :btree
+ t.index %w[jira_dvcs_server_last_sync_at project_id], name: "idx_proj_feat_usg_on_jira_dvcs_server_last_sync_at_and_proj_id", where: "(jira_dvcs_server_last_sync_at IS NOT NULL)", using: :btree
+ t.index ["project_id"], name: "index_project_feature_usages_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "project_incident_management_settings", primary_key: "project_id", id: :integer do |t|
+ t.boolean "create_issue", default: false, null: false
+ t.boolean "send_email", default: true, null: false
+ t.text "issue_template_key"
+ end
+
+ create_table_if_not_exists "project_repository_states" do |t|
+ t.integer "project_id", null: false
+ t.binary "repository_verification_checksum"
+ t.binary "wiki_verification_checksum"
+ t.string "last_repository_verification_failure"
+ t.string "last_wiki_verification_failure"
+ t.datetime_with_timezone "repository_retry_at"
+ t.datetime_with_timezone "wiki_retry_at"
+ t.integer "repository_retry_count"
+ t.integer "wiki_retry_count"
+ t.datetime_with_timezone "last_repository_verification_ran_at"
+ t.datetime_with_timezone "last_wiki_verification_ran_at"
+ t.index ["last_repository_verification_failure"], name: "idx_repository_states_on_repository_failure_partial", where: "(last_repository_verification_failure IS NOT NULL)", using: :btree
+ t.index ["last_wiki_verification_failure"], name: "idx_repository_states_on_wiki_failure_partial", where: "(last_wiki_verification_failure IS NOT NULL)", using: :btree
+ t.index %w[project_id last_repository_verification_ran_at], name: "idx_repository_states_on_last_repository_verification_ran_at", where: "((repository_verification_checksum IS NOT NULL) AND (last_repository_verification_failure IS NULL))", using: :btree
+ t.index %w[project_id last_wiki_verification_ran_at], name: "idx_repository_states_on_last_wiki_verification_ran_at", where: "((wiki_verification_checksum IS NOT NULL) AND (last_wiki_verification_failure IS NULL))", using: :btree
+ t.index ["project_id"], name: "idx_repository_states_outdated_checksums", where: "(((repository_verification_checksum IS NULL) AND (last_repository_verification_failure IS NULL)) OR ((wiki_verification_checksum IS NULL) AND (last_wiki_verification_failure IS NULL)))", using: :btree
+ t.index ["project_id"], name: "index_project_repository_states_on_project_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "project_tracing_settings", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.string "external_url", null: false
+ t.index ["project_id"], name: "index_project_tracing_settings_on_project_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "prometheus_alert_events", id: :bigserial do |t|
+ t.integer "project_id", null: false
+ t.integer "prometheus_alert_id", null: false
+ t.datetime_with_timezone "started_at", null: false
+ t.datetime_with_timezone "ended_at"
+ t.integer "status", limit: 2
+ t.string "payload_key"
+ t.index %w[project_id status], name: "index_prometheus_alert_events_on_project_id_and_status", using: :btree
+ t.index %w[prometheus_alert_id payload_key], name: "index_prometheus_alert_event_scoped_payload_key", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "prometheus_alerts" do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.float "threshold", null: false
+ t.integer "operator", null: false
+ t.integer "environment_id", null: false
+ t.integer "project_id", null: false
+ t.integer "prometheus_metric_id", null: false
+ t.index ["environment_id"], name: "index_prometheus_alerts_on_environment_id", using: :btree
+ t.index %w[project_id prometheus_metric_id environment_id], name: "index_prometheus_alerts_metric_environment", unique: true, using: :btree
+ t.index ["prometheus_metric_id"], name: "index_prometheus_alerts_on_prometheus_metric_id", using: :btree
+ end
+
+ create_table_if_not_exists "protected_branch_unprotect_access_levels" do |t|
+ t.integer "protected_branch_id", null: false
+ t.integer "access_level", default: 40
+ t.integer "user_id"
+ t.integer "group_id"
+ t.index ["group_id"], name: "index_protected_branch_unprotect_access_levels_on_group_id", using: :btree
+ t.index ["protected_branch_id"], name: "index_protected_branch_unprotect_access", using: :btree
+ t.index ["user_id"], name: "index_protected_branch_unprotect_access_levels_on_user_id", using: :btree
+ end
+
+ create_table_if_not_exists "protected_environment_deploy_access_levels" do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "access_level", default: 40
+ t.integer "protected_environment_id", null: false
+ t.integer "user_id"
+ t.integer "group_id"
+ t.index ["group_id"], name: "index_protected_environment_deploy_access_levels_on_group_id", using: :btree
+ t.index ["protected_environment_id"], name: "index_protected_environment_deploy_access", using: :btree
+ t.index ["user_id"], name: "index_protected_environment_deploy_access_levels_on_user_id", using: :btree
+ end
+
+ create_table_if_not_exists "protected_environments" do |t|
+ t.integer "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "name", null: false
+ t.index %w[project_id name], name: "index_protected_environments_on_project_id_and_name", unique: true, using: :btree
+ t.index ["project_id"], name: "index_protected_environments_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "push_rules" do |t|
+ t.string "force_push_regex"
+ t.string "delete_branch_regex"
+ t.string "commit_message_regex"
+ t.boolean "deny_delete_tag"
+ t.integer "project_id"
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.string "author_email_regex"
+ t.boolean "member_check", default: false, null: false
+ t.string "file_name_regex"
+ t.boolean "is_sample", default: false
+ t.integer "max_file_size", default: 0, null: false
+ t.boolean "prevent_secrets", default: false, null: false
+ t.string "branch_name_regex"
+ t.boolean "reject_unsigned_commits"
+ t.boolean "commit_committer_check"
+ t.boolean "regexp_uses_re2", default: true
+ t.string "commit_message_negative_regex"
+ t.index ["is_sample"], name: "index_push_rules_on_is_sample", where: "is_sample", using: :btree
+ t.index ["project_id"], name: "index_push_rules_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "reviews", id: :bigserial do |t|
+ t.integer "author_id"
+ t.integer "merge_request_id", null: false
+ t.integer "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.index ["author_id"], name: "index_reviews_on_author_id", using: :btree
+ t.index ["merge_request_id"], name: "index_reviews_on_merge_request_id", using: :btree
+ t.index ["project_id"], name: "index_reviews_on_project_id", using: :btree
+ end
+
+ create_table_if_not_exists "saml_providers" do |t|
+ t.integer "group_id", null: false
+ t.boolean "enabled", null: false
+ t.string "certificate_fingerprint", null: false
+ t.string "sso_url", null: false
+ t.boolean "enforced_sso", default: false, null: false
+ t.boolean "enforced_group_managed_accounts", default: false, null: false
+ t.index ["group_id"], name: "index_saml_providers_on_group_id", using: :btree
+ end
+
+ create_table_if_not_exists "scim_oauth_access_tokens" do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "group_id", null: false
+ t.string "token_encrypted", null: false
+ t.index %w[group_id token_encrypted], name: "index_scim_oauth_access_tokens_on_group_id_and_token_encrypted", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "slack_integrations" do |t|
+ t.integer "service_id", null: false
+ t.string "team_id", null: false
+ t.string "team_name", null: false
+ t.string "alias", null: false
+ t.string "user_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["service_id"], name: "index_slack_integrations_on_service_id", using: :btree
+ t.index %w[team_id alias], name: "index_slack_integrations_on_team_id_and_alias", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "smartcard_identities", id: :bigserial do |t|
+ t.integer "user_id", null: false
+ t.string "subject", null: false
+ t.string "issuer", null: false
+ t.index %w[subject issuer], name: "index_smartcard_identities_on_subject_and_issuer", unique: true, using: :btree
+ t.index ["user_id"], name: "index_smartcard_identities_on_user_id", using: :btree
+ end
+
+ create_table_if_not_exists "software_license_policies" do |t|
+ t.integer "project_id", null: false
+ t.integer "software_license_id", null: false
+ t.integer "approval_status", default: 0, null: false
+ t.index %w[project_id software_license_id], name: "index_software_license_policies_unique_per_project", unique: true, using: :btree
+ t.index ["software_license_id"], name: "index_software_license_policies_on_software_license_id", using: :btree
+ end
+
+ create_table_if_not_exists "software_licenses" do |t|
+ t.string "name", null: false
+ t.index ["name"], name: "index_software_licenses_on_name", using: :btree
+ end
+
+ create_table_if_not_exists "users_ops_dashboard_projects", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "user_id", null: false
+ t.integer "project_id", null: false
+ t.index ["project_id"], name: "index_users_ops_dashboard_projects_on_project_id", using: :btree
+ t.index %w[user_id project_id], name: "index_users_ops_dashboard_projects_on_user_id_and_project_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "vulnerability_feedback" do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "feedback_type", limit: 2, null: false
+ t.integer "category", limit: 2, null: false
+ t.integer "project_id", null: false
+ t.integer "author_id", null: false
+ t.integer "pipeline_id"
+ t.integer "issue_id"
+ t.string "project_fingerprint", limit: 40, null: false
+ t.integer "merge_request_id"
+ t.index ["author_id"], name: "index_vulnerability_feedback_on_author_id", using: :btree
+ t.index ["issue_id"], name: "index_vulnerability_feedback_on_issue_id", using: :btree
+ t.index ["merge_request_id"], name: "index_vulnerability_feedback_on_merge_request_id", using: :btree
+ t.index ["pipeline_id"], name: "index_vulnerability_feedback_on_pipeline_id", using: :btree
+ t.index %w[project_id category feedback_type project_fingerprint], name: "vulnerability_feedback_unique_idx", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "vulnerability_identifiers", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.binary "fingerprint", null: false
+ t.string "external_type", null: false
+ t.string "external_id", null: false
+ t.string "name", null: false
+ t.text "url"
+ t.index %w[project_id fingerprint], name: "index_vulnerability_identifiers_on_project_id_and_fingerprint", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "vulnerability_occurrence_identifiers", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.bigint "occurrence_id", null: false
+ t.bigint "identifier_id", null: false
+ t.index ["identifier_id"], name: "index_vulnerability_occurrence_identifiers_on_identifier_id", using: :btree
+ t.index %w[occurrence_id identifier_id], name: "index_vulnerability_occurrence_identifiers_on_unique_keys", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "vulnerability_occurrence_pipelines", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.bigint "occurrence_id", null: false
+ t.integer "pipeline_id", null: false
+ t.index %w[occurrence_id pipeline_id], name: "vulnerability_occurrence_pipelines_on_unique_keys", unique: true, using: :btree
+ t.index ["pipeline_id"], name: "index_vulnerability_occurrence_pipelines_on_pipeline_id", using: :btree
+ end
+
+ create_table_if_not_exists "vulnerability_occurrences", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "severity", limit: 2, null: false
+ t.integer "confidence", limit: 2, null: false
+ t.integer "report_type", limit: 2, null: false
+ t.integer "project_id", null: false
+ t.bigint "scanner_id", null: false
+ t.bigint "primary_identifier_id", null: false
+ t.binary "project_fingerprint", null: false
+ t.binary "location_fingerprint", null: false
+ t.string "uuid", limit: 36, null: false
+ t.string "name", null: false
+ t.string "metadata_version", null: false
+ t.text "raw_metadata", null: false
+ t.index ["primary_identifier_id"], name: "index_vulnerability_occurrences_on_primary_identifier_id", using: :btree
+ t.index %w[project_id primary_identifier_id location_fingerprint scanner_id], name: "index_vulnerability_occurrences_on_unique_keys", unique: true, using: :btree
+ t.index ["scanner_id"], name: "index_vulnerability_occurrences_on_scanner_id", using: :btree
+ t.index ["uuid"], name: "index_vulnerability_occurrences_on_uuid", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "vulnerability_scanners", id: :bigserial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.string "external_id", null: false
+ t.string "name", null: false
+ t.index %w[project_id external_id], name: "index_vulnerability_scanners_on_project_id_and_external_id", unique: true, using: :btree
+ end
+
+ create_table_if_not_exists "dependency_proxy_blobs", id: :serial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.text "file", null: false
+ t.string "file_name", null: false
+ t.integer "file_store"
+ t.integer "group_id", null: false
+ t.bigint "size"
+ t.datetime_with_timezone "updated_at", null: false
+ t.index %w[group_id file_name], name: "index_dependency_proxy_blobs_on_group_id_and_file_name", using: :btree
+ end
+
+ create_table_if_not_exists "dependency_proxy_group_settings", id: :serial do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.boolean "enabled", default: false, null: false
+ t.integer "group_id", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.index ["group_id"], name: "index_dependency_proxy_group_settings_on_group_id", using: :btree
+ end
+ end
+
+ def remove_tables
+ drop_table_if_exists "approval_merge_request_rule_sources"
+ drop_table_if_exists "approval_merge_request_rules"
+ drop_table_if_exists "approval_merge_request_rules_approved_approvers"
+ drop_table_if_exists "approval_merge_request_rules_groups"
+ drop_table_if_exists "approval_merge_request_rules_users"
+ drop_table_if_exists "approval_project_rules"
+ drop_table_if_exists "approval_project_rules_groups"
+ drop_table_if_exists "approval_project_rules_users"
+ drop_table_if_exists "approvals"
+ drop_table_if_exists "approver_groups"
+ drop_table_if_exists "approvers"
+ drop_table_if_exists "board_assignees"
+ drop_table_if_exists "board_labels"
+ drop_table_if_exists "ci_sources_pipelines"
+ drop_table_if_exists "design_management_designs_versions"
+ drop_table_if_exists "design_management_versions"
+ drop_table_if_exists "design_management_designs"
+ drop_table_if_exists "draft_notes"
+ drop_table_if_exists "elasticsearch_indexed_namespaces"
+ drop_table_if_exists "elasticsearch_indexed_projects"
+ drop_table_if_exists "epic_issues"
+ drop_table_if_exists "epic_metrics"
+ drop_table_if_exists "epics"
+ drop_table_if_exists "geo_cache_invalidation_events"
+ drop_table_if_exists "geo_event_log"
+ drop_table_if_exists "geo_hashed_storage_attachments_events"
+ drop_table_if_exists "geo_hashed_storage_migrated_events"
+ drop_table_if_exists "geo_job_artifact_deleted_events"
+ drop_table_if_exists "geo_lfs_object_deleted_events"
+ drop_table_if_exists "geo_node_namespace_links"
+ drop_table_if_exists "geo_node_statuses"
+ drop_table_if_exists "geo_nodes"
+ drop_table_if_exists "geo_repositories_changed_events"
+ drop_table_if_exists "geo_repository_created_events"
+ drop_table_if_exists "geo_repository_deleted_events"
+ drop_table_if_exists "geo_repository_renamed_events"
+ drop_table_if_exists "geo_repository_updated_events"
+ drop_table_if_exists "geo_reset_checksum_events"
+ drop_table_if_exists "geo_upload_deleted_events"
+ drop_table_if_exists "gitlab_subscriptions"
+ drop_table_if_exists "historical_data"
+ drop_table_if_exists "index_statuses"
+ drop_table_if_exists "insights"
+ drop_table_if_exists "issue_links"
+ drop_table_if_exists "jira_connect_subscriptions"
+ drop_table_if_exists "jira_connect_installations"
+ drop_table_if_exists "ldap_group_links"
+ drop_table_if_exists "licenses"
+ drop_table_if_exists "namespace_statistics"
+ drop_table_if_exists "operations_feature_flag_scopes"
+ drop_table_if_exists "operations_feature_flags"
+ drop_table_if_exists "operations_feature_flags_clients"
+ drop_table_if_exists "packages_maven_metadata"
+ drop_table_if_exists "packages_package_files"
+ drop_table_if_exists "packages_packages"
+ drop_table_if_exists "path_locks"
+ drop_table_if_exists "plans"
+ drop_table_if_exists "project_alerting_settings"
+ drop_table_if_exists "project_feature_usages"
+ drop_table_if_exists "project_incident_management_settings"
+ drop_table_if_exists "project_repository_states"
+ drop_table_if_exists "project_tracing_settings"
+ drop_table_if_exists "prometheus_alert_events"
+ drop_table_if_exists "prometheus_alerts"
+ drop_table_if_exists "protected_branch_unprotect_access_levels"
+ drop_table_if_exists "protected_environment_deploy_access_levels"
+ drop_table_if_exists "protected_environments"
+ drop_table_if_exists "push_rules"
+ drop_table_if_exists "reviews"
+ drop_table_if_exists "saml_providers"
+ drop_table_if_exists "scim_oauth_access_tokens"
+ drop_table_if_exists "slack_integrations"
+ drop_table_if_exists "smartcard_identities"
+ drop_table_if_exists "software_license_policies"
+ drop_table_if_exists "software_licenses"
+ drop_table_if_exists "users_ops_dashboard_projects"
+ drop_table_if_exists "vulnerability_feedback"
+ drop_table_if_exists "vulnerability_identifiers"
+ drop_table_if_exists "vulnerability_occurrence_identifiers"
+ drop_table_if_exists "vulnerability_occurrence_pipelines"
+ drop_table_if_exists "vulnerability_occurrences"
+ drop_table_if_exists "vulnerability_scanners"
+ drop_table_if_exists "dependency_proxy_blobs"
+ drop_table_if_exists "dependency_proxy_group_settings"
+ end
+
+ def add_missing_foreign_keys
+ add_concurrent_foreign_key("application_settings", "namespaces", column: "custom_project_templates_group_id", name: "fk_rails_b53e481273", on_delete: :nullify)
+ add_concurrent_foreign_key("application_settings", "projects", column: "file_template_project_id", name: "fk_ec757bd087", on_delete: :nullify)
+ add_concurrent_foreign_key("approval_merge_request_rule_sources", "approval_merge_request_rules", column: "approval_merge_request_rule_id", name: "fk_rails_e605a04f76", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rule_sources", "approval_project_rules", column: "approval_project_rule_id", name: "fk_rails_64e8ed3c7e", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules", "merge_requests", column: "merge_request_id", name: "fk_rails_004ce82224", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules_approved_approvers", "approval_merge_request_rules", column: "approval_merge_request_rule_id", name: "fk_rails_6577725edb", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules_approved_approvers", "users", column: "user_id", name: "fk_rails_8dc94cff4d", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules_groups", "approval_merge_request_rules", column: "approval_merge_request_rule_id", name: "fk_rails_5b2ecf6139", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules_groups", "namespaces", column: "group_id", name: "fk_rails_2020a7124a", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules_users", "approval_merge_request_rules", column: "approval_merge_request_rule_id", name: "fk_rails_80e6801803", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_merge_request_rules_users", "users", column: "user_id", name: "fk_rails_bc8972fa55", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_project_rules", "projects", column: "project_id", name: "fk_rails_5fb4dd100b", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_project_rules_groups", "approval_project_rules", column: "approval_project_rule_id", name: "fk_rails_9071e863d1", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_project_rules_groups", "namespaces", column: "group_id", name: "fk_rails_396841e79e", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_project_rules_users", "approval_project_rules", column: "approval_project_rule_id", name: "fk_rails_b9e9394efb", on_delete: :cascade)
+ add_concurrent_foreign_key("approval_project_rules_users", "users", column: "user_id", name: "fk_rails_f365da8250", on_delete: :cascade)
+ add_concurrent_foreign_key("approvals", "merge_requests", column: "merge_request_id", name: "fk_310d714958", on_delete: :cascade)
+ add_concurrent_foreign_key("approver_groups", "namespaces", column: "group_id", name: "fk_rails_1cdcbd7723", on_delete: :cascade)
+ add_concurrent_foreign_key("board_assignees", "boards", column: "board_id", name: "fk_rails_3f6f926bd5", on_delete: :cascade)
+ add_concurrent_foreign_key("board_assignees", "users", column: "assignee_id", name: "fk_rails_1c0ff59e82", on_delete: :cascade)
+ add_concurrent_foreign_key("board_labels", "boards", column: "board_id", name: "fk_rails_9374a16edd", on_delete: :cascade)
+ add_concurrent_foreign_key("board_labels", "labels", column: "label_id", name: "fk_rails_362b0600a3", on_delete: :cascade)
+ add_concurrent_foreign_key("ci_sources_pipelines", "ci_builds", column: "source_job_id", name: "fk_be5624bf37", on_delete: :cascade)
+ add_concurrent_foreign_key("ci_sources_pipelines", "ci_pipelines", column: "pipeline_id", name: "fk_e1bad85861", on_delete: :cascade)
+ add_concurrent_foreign_key("ci_sources_pipelines", "ci_pipelines", column: "source_pipeline_id", name: "fk_d4e29af7d7", on_delete: :cascade)
+ add_concurrent_foreign_key("ci_sources_pipelines", "projects", column: "source_project_id", name: "fk_acd9737679", on_delete: :cascade)
+ add_concurrent_foreign_key("ci_sources_pipelines", "projects", column: "project_id", name: "fk_1e53c97c0a", on_delete: :cascade)
+ add_concurrent_foreign_key("design_management_designs", "issues", column: "issue_id", name: "fk_rails_bfe283ec3c", on_delete: :cascade)
+ add_concurrent_foreign_key("design_management_designs", "projects", column: "project_id", name: "fk_rails_4bb1073360", on_delete: :cascade)
+ add_concurrent_foreign_key("design_management_designs_versions", "design_management_designs", column: "design_id", on_delete: :cascade)
+ add_concurrent_foreign_key("design_management_designs_versions", "design_management_versions", column: "version_id", on_delete: :cascade)
+ add_concurrent_foreign_key("draft_notes", "merge_requests", column: "merge_request_id", name: "fk_rails_e753681674", on_delete: :cascade)
+ add_concurrent_foreign_key("draft_notes", "users", column: "author_id", name: "fk_rails_2a8dac9901", on_delete: :cascade)
+ add_concurrent_foreign_key("elasticsearch_indexed_namespaces", "namespaces", column: "namespace_id", name: "fk_rails_bdcf044f37", on_delete: :cascade)
+ add_concurrent_foreign_key("elasticsearch_indexed_projects", "projects", column: "project_id", name: "fk_rails_bd13bbdc3d", on_delete: :cascade)
+ add_concurrent_foreign_key("epic_issues", "epics", column: "epic_id", name: "fk_rails_5d942936b4", on_delete: :cascade)
+ add_concurrent_foreign_key("epic_issues", "issues", column: "issue_id", name: "fk_rails_4209981af6", on_delete: :cascade)
+ add_concurrent_foreign_key("epic_metrics", "epics", column: "epic_id", name: "fk_rails_d071904753", on_delete: :cascade)
+ add_concurrent_foreign_key("epics", "epics", column: "parent_id", name: "fk_25b99c1be3", on_delete: :cascade)
+ add_concurrent_foreign_key("epics", "milestones", column: "milestone_id", name: "fk_rails_1bf671ebb7", on_delete: :nullify)
+ add_concurrent_foreign_key("epics", "namespaces", column: "group_id", name: "fk_f081aa4489", on_delete: :cascade)
+ add_concurrent_foreign_key("epics", "users", column: "assignee_id", name: "fk_dccd3f98fc", on_delete: :nullify)
+ add_concurrent_foreign_key("epics", "users", column: "author_id", name: "fk_3654b61b03", on_delete: :cascade)
+ add_concurrent_foreign_key("epics", "users", column: "closed_by_id", name: "fk_aa5798e761", on_delete: :nullify)
+ add_concurrent_foreign_key("geo_event_log", "geo_cache_invalidation_events", column: "cache_invalidation_event_id", name: "fk_42c3b54bed", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_hashed_storage_migrated_events", column: "hashed_storage_migrated_event_id", name: "fk_27548c6db3", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_job_artifact_deleted_events", column: "job_artifact_deleted_event_id", name: "fk_176d3fbb5d", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_lfs_object_deleted_events", column: "lfs_object_deleted_event_id", name: "fk_d5af95fcd9", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_repositories_changed_events", column: "repositories_changed_event_id", name: "fk_4a99ebfd60", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_repository_created_events", column: "repository_created_event_id", name: "fk_9b9afb1916", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_repository_renamed_events", column: "repository_renamed_event_id", name: "fk_86c84214ec", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", name: "fk_78a6492f68", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_reset_checksum_events", column: "reset_checksum_event_id", name: "fk_cff7185ad2", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_event_log", "geo_upload_deleted_events", column: "upload_deleted_event_id", name: "fk_c1f241c70d", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_hashed_storage_attachments_events", "projects", column: "project_id", name: "fk_rails_d496b088e9", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_hashed_storage_migrated_events", "projects", column: "project_id", name: "fk_rails_687ed7d7c5", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_node_namespace_links", "geo_nodes", column: "geo_node_id", name: "fk_rails_546bf08d3e", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_node_namespace_links", "namespaces", column: "namespace_id", name: "fk_rails_41ff5fb854", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_node_statuses", "geo_nodes", column: "geo_node_id", name: "fk_rails_0ecc699c2a", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_repositories_changed_events", "geo_nodes", column: "geo_node_id", name: "fk_rails_75ec0fefcc", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_repository_created_events", "projects", column: "project_id", name: "fk_rails_1f49e46a61", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_repository_renamed_events", "projects", column: "project_id", name: "fk_rails_4e6524febb", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_repository_updated_events", "projects", column: "project_id", name: "fk_rails_2b70854c08", on_delete: :cascade)
+ add_concurrent_foreign_key("geo_reset_checksum_events", "projects", column: "project_id", name: "fk_rails_910a06f12b", on_delete: :cascade)
+ add_concurrent_foreign_key("gitlab_subscriptions", "namespaces", column: "namespace_id", name: "fk_e2595d00a1", on_delete: :cascade)
+ add_concurrent_foreign_key("gitlab_subscriptions", "plans", column: "hosted_plan_id", name: "fk_bd0c4019c3", on_delete: :cascade)
+ add_concurrent_foreign_key("identities", "saml_providers", column: "saml_provider_id", name: "fk_aade90f0fc", on_delete: :cascade)
+ add_concurrent_foreign_key("index_statuses", "projects", column: "project_id", name: "fk_74b2492545", on_delete: :cascade)
+ add_concurrent_foreign_key("insights", "namespaces", column: "namespace_id", name: "fk_rails_5c4391f60a", on_delete: nil)
+ add_concurrent_foreign_key("insights", "projects", column: "project_id", name: "fk_rails_f36fda3932", on_delete: nil)
+ add_concurrent_foreign_key("issue_links", "issues", column: "source_id", name: "fk_c900194ff2", on_delete: :cascade)
+ add_concurrent_foreign_key("issue_links", "issues", column: "target_id", name: "fk_e71bb44f1f", on_delete: :cascade)
+ add_concurrent_foreign_key("lists", "milestones", column: "milestone_id", name: "fk_rails_baed5f39b7", on_delete: :cascade)
+ add_concurrent_foreign_key("lists", "users", column: "user_id", name: "fk_d6cf4279f7", on_delete: :cascade)
+ add_concurrent_foreign_key("namespace_statistics", "namespaces", column: "namespace_id", name: "fk_rails_0062050394", on_delete: :cascade)
+ add_concurrent_foreign_key("namespaces", "namespaces", column: "custom_project_templates_group_id", name: "fk_e7a0b20a6b", on_delete: :nullify)
+ add_concurrent_foreign_key("namespaces", "plans", column: "plan_id", name: "fk_fdd12e5b80", on_delete: :nullify)
+ add_concurrent_foreign_key("namespaces", "projects", column: "file_template_project_id", name: "fk_319256d87a", on_delete: :nullify)
+ add_concurrent_foreign_key("notes", "reviews", column: "review_id", name: "fk_2e82291620", on_delete: :nullify)
+ add_concurrent_foreign_key("operations_feature_flag_scopes", "operations_feature_flags", column: "feature_flag_id", name: "fk_rails_a50a04d0a4", on_delete: :cascade)
+ add_concurrent_foreign_key("operations_feature_flags", "projects", column: "project_id", name: "fk_rails_648e241be7", on_delete: :cascade)
+ add_concurrent_foreign_key("operations_feature_flags_clients", "projects", column: "project_id", name: "fk_rails_6650ed902c", on_delete: :cascade)
+ add_concurrent_foreign_key("packages_maven_metadata", "packages_packages", column: "package_id", name: "fk_be88aed360", on_delete: :cascade)
+ add_concurrent_foreign_key("packages_package_files", "packages_packages", column: "package_id", name: "fk_86f0f182f8", on_delete: :cascade)
+ add_concurrent_foreign_key("packages_packages", "projects", column: "project_id", name: "fk_rails_e1ac527425", on_delete: :cascade)
+ add_concurrent_foreign_key("path_locks", "projects", column: "project_id", name: "fk_5265c98f24", on_delete: :cascade)
+ add_concurrent_foreign_key("path_locks", "users", column: "user_id", name: "fk_rails_762cdcf942", on_delete: nil)
+ add_concurrent_foreign_key("project_alerting_settings", "projects", column: "project_id", name: "fk_rails_27a84b407d", on_delete: :cascade)
+ add_concurrent_foreign_key("project_feature_usages", "projects", column: "project_id", name: "fk_rails_c22a50024b", on_delete: :cascade)
+ add_concurrent_foreign_key("project_incident_management_settings", "projects", column: "project_id", name: "fk_rails_9c2ea1b7dd", on_delete: :cascade)
+ add_concurrent_foreign_key("project_repository_states", "projects", column: "project_id", name: "fk_rails_0f2298ca8a", on_delete: :cascade)
+ add_concurrent_foreign_key("project_tracing_settings", "projects", column: "project_id", name: "fk_rails_fe56f57fc6", on_delete: :cascade)
+ add_concurrent_foreign_key("prometheus_alert_events", "projects", column: "project_id", name: "fk_rails_4675865839", on_delete: :cascade)
+ add_concurrent_foreign_key("prometheus_alert_events", "prometheus_alerts", column: "prometheus_alert_id", name: "fk_rails_106f901176", on_delete: :cascade)
+ add_concurrent_foreign_key("prometheus_alerts", "environments", column: "environment_id", name: "fk_rails_6d9b283465", on_delete: :cascade)
+ add_concurrent_foreign_key("prometheus_alerts", "projects", column: "project_id", name: "fk_rails_f0e8db86aa", on_delete: :cascade)
+ add_concurrent_foreign_key("prometheus_alerts", "prometheus_metrics", column: "prometheus_metric_id", name: "fk_rails_e6351447ec", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_branch_merge_access_levels", "namespaces", column: "group_id", name: "fk_98f3d044fe", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_branch_merge_access_levels", "users", column: "user_id", name: "fk_rails_5ffb4f3590", on_delete: nil)
+ add_concurrent_foreign_key("protected_branch_push_access_levels", "namespaces", column: "group_id", name: "fk_7111b68cdb", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_branch_push_access_levels", "users", column: "user_id", name: "fk_rails_8dcb712d65", on_delete: nil)
+ add_concurrent_foreign_key("protected_branch_unprotect_access_levels", "namespaces", column: "group_id", name: "fk_rails_5be1abfc25", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_branch_unprotect_access_levels", "protected_branches", column: "protected_branch_id", name: "fk_rails_e9eb8dc025", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_branch_unprotect_access_levels", "users", column: "user_id", name: "fk_rails_2d2aba21ef", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_environment_deploy_access_levels", "namespaces", column: "group_id", name: "fk_rails_45cc02a931", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_environment_deploy_access_levels", "protected_environments", column: "protected_environment_id", name: "fk_rails_898a13b650", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_environment_deploy_access_levels", "users", column: "user_id", name: "fk_rails_5b9f6970fe", on_delete: :cascade)
+ add_concurrent_foreign_key("protected_environments", "projects", column: "project_id", name: "fk_rails_a354313d11", on_delete: :cascade)
+ add_concurrent_foreign_key("push_rules", "projects", column: "project_id", name: "fk_83b29894de", on_delete: :cascade)
+ add_concurrent_foreign_key("resource_label_events", "epics", column: "epic_id", name: "fk_rails_75efb0a653", on_delete: :cascade)
+ add_concurrent_foreign_key("reviews", "merge_requests", column: "merge_request_id", name: "fk_rails_5ca11d8c31", on_delete: :cascade)
+ add_concurrent_foreign_key("reviews", "projects", column: "project_id", name: "fk_rails_64798be025", on_delete: :cascade)
+ add_concurrent_foreign_key("reviews", "users", column: "author_id", name: "fk_rails_29e6f859c4", on_delete: :nullify)
+ add_concurrent_foreign_key("saml_providers", "namespaces", column: "group_id", name: "fk_rails_306d459be7", on_delete: :cascade)
+ add_concurrent_foreign_key("scim_oauth_access_tokens", "namespaces", column: "group_id", name: "fk_rails_c84404fb6c", on_delete: :cascade)
+ add_concurrent_foreign_key("slack_integrations", "services", column: "service_id", name: "fk_rails_73db19721a", on_delete: :cascade)
+ add_concurrent_foreign_key("smartcard_identities", "users", column: "user_id", name: "fk_rails_4689f889a9", on_delete: :cascade)
+ add_concurrent_foreign_key("software_license_policies", "projects", column: "project_id", name: "fk_rails_87b2247ce5", on_delete: :cascade)
+ add_concurrent_foreign_key("software_license_policies", "software_licenses", column: "software_license_id", name: "fk_rails_7a7a2a92de", on_delete: :cascade)
+ add_concurrent_foreign_key("users", "namespaces", column: "managing_group_id", name: "fk_a4b8fefe3e", on_delete: :nullify)
+ add_concurrent_foreign_key("users_ops_dashboard_projects", "projects", column: "project_id", name: "fk_rails_9b4ebf005b", on_delete: :cascade)
+ add_concurrent_foreign_key("users_ops_dashboard_projects", "users", column: "user_id", name: "fk_rails_220a0562db", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_feedback", "ci_pipelines", column: "pipeline_id", name: "fk_rails_20976e6fd9", on_delete: :nullify)
+ add_concurrent_foreign_key("vulnerability_feedback", "issues", column: "issue_id", name: "fk_rails_8c77e5891a", on_delete: :nullify)
+ add_concurrent_foreign_key("vulnerability_feedback", "merge_requests", column: "merge_request_id", name: "fk_563ff1912e", on_delete: :nullify)
+ add_concurrent_foreign_key("vulnerability_feedback", "projects", column: "project_id", name: "fk_rails_debd54e456", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_feedback", "users", column: "author_id", name: "fk_rails_472f69b043", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_identifiers", "projects", column: "project_id", name: "fk_rails_a67a16c885", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrence_identifiers", "vulnerability_identifiers", column: "identifier_id", name: "fk_rails_be2e49e1d0", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrence_identifiers", "vulnerability_occurrences", column: "occurrence_id", name: "fk_rails_e4ef6d027c", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrence_pipelines", "ci_pipelines", column: "pipeline_id", name: "fk_rails_6421e35d7d", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrence_pipelines", "vulnerability_occurrences", column: "occurrence_id", name: "fk_rails_dc3ae04693", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrences", "projects", column: "project_id", name: "fk_rails_90fed4faba", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrences", "vulnerability_identifiers", column: "primary_identifier_id", name: "fk_rails_c8661a61eb", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_occurrences", "vulnerability_scanners", column: "scanner_id", name: "fk_rails_bf5b788ca7", on_delete: :cascade)
+ add_concurrent_foreign_key("vulnerability_scanners", "projects", column: "project_id", name: "fk_rails_5c9d42a221", on_delete: :cascade)
+ add_concurrent_foreign_key("dependency_proxy_blobs", "namespaces", column: "group_id", on_delete: :cascade)
+ add_concurrent_foreign_key("dependency_proxy_group_settings", "namespaces", column: "group_id", on_delete: :cascade)
+ add_concurrent_foreign_key("jira_connect_subscriptions", "jira_connect_installations", column: "jira_connect_installation_id", on_delete: :cascade)
+ add_concurrent_foreign_key("jira_connect_subscriptions", "namespaces", column: "namespace_id", on_delete: :cascade)
+
+ remove_foreign_key_without_error("protected_tag_create_access_levels", column: :group_id)
+ add_concurrent_foreign_key("protected_tag_create_access_levels", "namespaces", column: :group_id, name: "fk_b4eb82fe3c", on_delete: :cascade)
+ end
+
+ def remove_foreign_keys
+ remove_foreign_key_without_error("application_settings", column: "custom_project_templates_group_id")
+ remove_foreign_key_without_error("application_settings", column: "file_template_project_id")
+ remove_foreign_key_without_error("approval_merge_request_rule_sources", column: "approval_merge_request_rule_id")
+ remove_foreign_key_without_error("approval_merge_request_rule_sources", column: "approval_project_rule_id")
+ remove_foreign_key_without_error("approval_merge_request_rules", column: "merge_request_id")
+ remove_foreign_key_without_error("approval_merge_request_rules_approved_approvers", column: "approval_merge_request_rule_id")
+ remove_foreign_key_without_error("approval_merge_request_rules_approved_approvers", column: "user_id")
+ remove_foreign_key_without_error("approval_merge_request_rules_groups", column: "approval_merge_request_rule_id")
+ remove_foreign_key_without_error("approval_merge_request_rules_groups", column: "group_id")
+ remove_foreign_key_without_error("approval_merge_request_rules_users", column: "approval_merge_request_rule_id")
+ remove_foreign_key_without_error("approval_merge_request_rules_users", column: "user_id")
+ remove_foreign_key_without_error("approval_project_rules", column: "project_id")
+ remove_foreign_key_without_error("approval_project_rules_groups", column: "approval_project_rule_id")
+ remove_foreign_key_without_error("approval_project_rules_groups", column: "group_id")
+ remove_foreign_key_without_error("approval_project_rules_users", column: "approval_project_rule_id")
+ remove_foreign_key_without_error("approval_project_rules_users", column: "user_id")
+ remove_foreign_key_without_error("approvals", column: "merge_request_id")
+ remove_foreign_key_without_error("approver_groups", column: "group_id")
+ remove_foreign_key_without_error("board_assignees", column: "board_id")
+ remove_foreign_key_without_error("board_assignees", column: "assignee_id")
+ remove_foreign_key_without_error("board_labels", column: "board_id")
+ remove_foreign_key_without_error("board_labels", column: "label_id")
+ remove_foreign_key_without_error("ci_sources_pipelines", column: "source_job_id")
+ remove_foreign_key_without_error("ci_sources_pipelines", column: "pipeline_id")
+ remove_foreign_key_without_error("ci_sources_pipelines", column: "source_pipeline_id")
+ remove_foreign_key_without_error("ci_sources_pipelines", column: "source_project_id")
+ remove_foreign_key_without_error("ci_sources_pipelines", column: "project_id")
+ remove_foreign_key_without_error("design_management_designs", column: "issue_id")
+ remove_foreign_key_without_error("design_management_designs", column: "project_id")
+ remove_foreign_key_without_error("design_management_versions", column: "design_management_design_id")
+ remove_foreign_key_without_error("draft_notes", column: "merge_request_id")
+ remove_foreign_key_without_error("draft_notes", column: "author_id")
+ remove_foreign_key_without_error("elasticsearch_indexed_namespaces", column: "namespace_id")
+ remove_foreign_key_without_error("elasticsearch_indexed_projects", column: "project_id")
+ remove_foreign_key_without_error("epic_issues", column: "epic_id")
+ remove_foreign_key_without_error("epic_issues", column: "issue_id")
+ remove_foreign_key_without_error("epic_metrics", column: "epic_id")
+ remove_foreign_key_without_error("epics", column: "parent_id")
+ remove_foreign_key_without_error("epics", column: "milestone_id")
+ remove_foreign_key_without_error("epics", column: "group_id")
+ remove_foreign_key_without_error("epics", column: "assignee_id")
+ remove_foreign_key_without_error("epics", column: "author_id")
+ remove_foreign_key_without_error("epics", column: "closed_by_id")
+ remove_foreign_key_without_error("geo_event_log", column: "cache_invalidation_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "hashed_storage_migrated_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "job_artifact_deleted_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "lfs_object_deleted_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "repositories_changed_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "repository_created_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "repository_deleted_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "repository_renamed_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "repository_updated_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "reset_checksum_event_id")
+ remove_foreign_key_without_error("geo_event_log", column: "upload_deleted_event_id")
+ remove_foreign_key_without_error("geo_hashed_storage_attachments_events", column: "project_id")
+ remove_foreign_key_without_error("geo_hashed_storage_migrated_events", column: "project_id")
+ remove_foreign_key_without_error("geo_node_namespace_links", column: "geo_node_id")
+ remove_foreign_key_without_error("geo_node_namespace_links", column: "namespace_id")
+ remove_foreign_key_without_error("geo_node_statuses", column: "geo_node_id")
+ remove_foreign_key_without_error("geo_repositories_changed_events", column: "geo_node_id")
+ remove_foreign_key_without_error("geo_repository_created_events", column: "project_id")
+ remove_foreign_key_without_error("geo_repository_renamed_events", column: "project_id")
+ remove_foreign_key_without_error("geo_repository_updated_events", column: "project_id")
+ remove_foreign_key_without_error("geo_reset_checksum_events", column: "project_id")
+ remove_foreign_key_without_error("gitlab_subscriptions", column: "namespace_id")
+ remove_foreign_key_without_error("gitlab_subscriptions", column: "hosted_plan_id")
+ remove_foreign_key_without_error("identities", column: "saml_provider_id")
+ remove_foreign_key_without_error("index_statuses", column: "project_id")
+ remove_foreign_key_without_error("insights", column: "namespace_id", on_delete: nil)
+ remove_foreign_key_without_error("insights", column: "project_id", on_delete: nil)
+ remove_foreign_key_without_error("issue_links", column: "source_id")
+ remove_foreign_key_without_error("issue_links", column: "target_id")
+ remove_foreign_key_without_error("lists", column: "milestone_id")
+ remove_foreign_key_without_error("lists", column: "user_id")
+ remove_foreign_key_without_error("namespace_statistics", column: "namespace_id")
+ remove_foreign_key_without_error("namespaces", column: "custom_project_templates_group_id")
+ remove_foreign_key_without_error("namespaces", column: "plan_id")
+ remove_foreign_key_without_error("namespaces", column: "file_template_project_id")
+ remove_foreign_key_without_error("notes", column: "review_id")
+ remove_foreign_key_without_error("operations_feature_flag_scopes", column: "feature_flag_id")
+ remove_foreign_key_without_error("operations_feature_flags", column: "project_id")
+ remove_foreign_key_without_error("operations_feature_flags_clients", column: "project_id")
+ remove_foreign_key_without_error("packages_maven_metadata", column: "package_id")
+ remove_foreign_key_without_error("packages_package_files", column: "package_id")
+ remove_foreign_key_without_error("packages_packages", column: "project_id")
+ remove_foreign_key_without_error("path_locks", column: "project_id")
+ remove_foreign_key_without_error("path_locks", column: "user_id", on_delete: nil)
+ remove_foreign_key_without_error("project_alerting_settings", column: "project_id")
+ remove_foreign_key_without_error("project_feature_usages", column: "project_id")
+ remove_foreign_key_without_error("project_incident_management_settings", column: "project_id")
+ remove_foreign_key_without_error("project_repository_states", column: "project_id")
+ remove_foreign_key_without_error("project_tracing_settings", column: "project_id")
+ remove_foreign_key_without_error("prometheus_alert_events", column: "project_id")
+ remove_foreign_key_without_error("prometheus_alert_events", column: "prometheus_alert_id")
+ remove_foreign_key_without_error("prometheus_alerts", column: "environment_id")
+ remove_foreign_key_without_error("prometheus_alerts", column: "project_id")
+ remove_foreign_key_without_error("prometheus_alerts", column: "prometheus_metric_id")
+ remove_foreign_key_without_error("protected_branch_merge_access_levels", column: "group_id")
+ remove_foreign_key_without_error("protected_branch_merge_access_levels", column: "user_id", on_delete: nil)
+ remove_foreign_key_without_error("protected_branch_push_access_levels", column: "group_id")
+ remove_foreign_key_without_error("protected_branch_push_access_levels", column: "user_id", on_delete: nil)
+ remove_foreign_key_without_error("protected_branch_unprotect_access_levels", column: "group_id")
+ remove_foreign_key_without_error("protected_branch_unprotect_access_levels", column: "protected_branch_id")
+ remove_foreign_key_without_error("protected_branch_unprotect_access_levels", column: "user_id")
+ remove_foreign_key_without_error("protected_environment_deploy_access_levels", column: "group_id")
+ remove_foreign_key_without_error("protected_environment_deploy_access_levels", column: "protected_environment_id")
+ remove_foreign_key_without_error("protected_environment_deploy_access_levels", column: "user_id")
+ remove_foreign_key_without_error("protected_environments", column: "project_id")
+ remove_foreign_key_without_error("push_rules", column: "project_id")
+ remove_foreign_key_without_error("resource_label_events", column: "epic_id")
+ remove_foreign_key_without_error("reviews", column: "merge_request_id")
+ remove_foreign_key_without_error("reviews", column: "project_id")
+ remove_foreign_key_without_error("reviews", column: "author_id")
+ remove_foreign_key_without_error("saml_providers", column: "group_id")
+ remove_foreign_key_without_error("scim_oauth_access_tokens", column: "group_id")
+ remove_foreign_key_without_error("slack_integrations", column: "service_id")
+ remove_foreign_key_without_error("smartcard_identities", column: "user_id")
+ remove_foreign_key_without_error("software_license_policies", column: "project_id")
+ remove_foreign_key_without_error("software_license_policies", column: "software_license_id")
+ remove_foreign_key_without_error("users", column: "managing_group_id")
+ remove_foreign_key_without_error("users_ops_dashboard_projects", column: "project_id")
+ remove_foreign_key_without_error("users_ops_dashboard_projects", column: "user_id")
+ remove_foreign_key_without_error("vulnerability_feedback", column: "pipeline_id")
+ remove_foreign_key_without_error("vulnerability_feedback", column: "issue_id")
+ remove_foreign_key_without_error("vulnerability_feedback", column: "merge_request_id")
+ remove_foreign_key_without_error("vulnerability_feedback", column: "project_id")
+ remove_foreign_key_without_error("vulnerability_feedback", column: "author_id")
+ remove_foreign_key_without_error("vulnerability_identifiers", column: "project_id")
+ remove_foreign_key_without_error("vulnerability_occurrence_identifiers", column: "identifier_id")
+ remove_foreign_key_without_error("vulnerability_occurrence_identifiers", column: "occurrence_id")
+ remove_foreign_key_without_error("vulnerability_occurrence_pipelines", column: "pipeline_id")
+ remove_foreign_key_without_error("vulnerability_occurrence_pipelines", column: "occurrence_id")
+ remove_foreign_key_without_error("vulnerability_occurrences", column: "project_id")
+ remove_foreign_key_without_error("vulnerability_occurrences", column: "primary_identifier_id")
+ remove_foreign_key_without_error("vulnerability_occurrences", column: "scanner_id")
+ remove_foreign_key_without_error("vulnerability_scanners", column: "project_id")
+ remove_foreign_key_without_error("dependency_proxy_blobs", column: "group_id")
+ remove_foreign_key_without_error("dependency_proxy_group_settings", column: "group_id")
+ remove_foreign_key_without_error("jira_connect_subscriptions", "jira_connect_installations", column: "jira_connect_installation_id")
+ remove_foreign_key_without_error("jira_connect_subscriptions", "namespaces", column: "namespace_id")
+
+ remove_foreign_key_without_error("protected_tag_create_access_levels", column: :group_id)
+ add_concurrent_foreign_key("protected_tag_create_access_levels", "namespaces", column: :group_id, on_delete: nil)
+ end
+end
+# rubocop: enable Metrics/AbcSize
+# rubocop: enable Migration/Datetime
diff --git a/db/migrate/20190403161806_update_designs_index.rb b/db/migrate/20190403161806_update_designs_index.rb
new file mode 100644
index 00000000000..78517e372d5
--- /dev/null
+++ b/db/migrate/20190403161806_update_designs_index.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class UpdateDesignsIndex < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index :design_management_designs, :issue_id, unique: true
+ end
+
+ def down
+ add_concurrent_index :design_management_designs, :issue_id, unique: true
+ end
+end
diff --git a/db/migrate/20190409224933_add_name_to_geo_nodes.rb b/db/migrate/20190409224933_add_name_to_geo_nodes.rb
new file mode 100644
index 00000000000..2dff81b429c
--- /dev/null
+++ b/db/migrate/20190409224933_add_name_to_geo_nodes.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddNameToGeoNodes < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ def up
+ add_column :geo_nodes, :name, :string
+
+ # url is also unique, and its type and size is identical to the name column,
+ # so this is safe.
+ execute "UPDATE geo_nodes SET name = url;"
+
+ # url is also `null: false`, so this is safe.
+ change_column :geo_nodes, :name, :string, null: false
+ end
+
+ def down
+ remove_column :geo_nodes, :name
+ end
+end
diff --git a/db/migrate/20190410173409_add_name_index_to_geo_nodes.rb b/db/migrate/20190410173409_add_name_index_to_geo_nodes.rb
new file mode 100644
index 00000000000..efbdaf1d025
--- /dev/null
+++ b/db/migrate/20190410173409_add_name_index_to_geo_nodes.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddNameIndexToGeoNodes < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :geo_nodes, :name, unique: true
+ end
+
+ def down
+ remove_concurrent_index :geo_nodes, :name
+ end
+end
diff --git a/db/migrate/20190412183653_remove_url_index_from_geo_nodes.rb b/db/migrate/20190412183653_remove_url_index_from_geo_nodes.rb
new file mode 100644
index 00000000000..86a46260553
--- /dev/null
+++ b/db/migrate/20190412183653_remove_url_index_from_geo_nodes.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class RemoveUrlIndexFromGeoNodes < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index :geo_nodes, :url
+ end
+
+ def down
+ add_concurrent_index :geo_nodes, :url, unique: true
+ end
+end
diff --git a/db/migrate/20190414185432_add_comment_to_vulnerability_feedback.rb b/db/migrate/20190414185432_add_comment_to_vulnerability_feedback.rb
new file mode 100644
index 00000000000..63644a2f8fd
--- /dev/null
+++ b/db/migrate/20190414185432_add_comment_to_vulnerability_feedback.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddCommentToVulnerabilityFeedback < ActiveRecord::Migration[5.1]
+ DOWNTIME = false
+
+ def up
+ add_column :vulnerability_feedback, :comment_author_id, :integer
+ add_column :vulnerability_feedback, :comment, :text
+ add_column :vulnerability_feedback, :comment_timestamp, :datetime_with_timezone
+ end
+
+ def down
+ remove_column :vulnerability_feedback, :comment_author_id
+ remove_column :vulnerability_feedback, :comment
+ remove_column :vulnerability_feedback, :comment_timestamp
+ end
+end
diff --git a/db/migrate/20190415172035_update_insights_foreign_keys.rb b/db/migrate/20190415172035_update_insights_foreign_keys.rb
new file mode 100644
index 00000000000..5d3aa4c05e9
--- /dev/null
+++ b/db/migrate/20190415172035_update_insights_foreign_keys.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+# rubocop: disable Migration/AddConcurrentForeignKey
+
+class UpdateInsightsForeignKeys < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ remove_foreign_key_if_exists(:insights, column: :project_id)
+ add_foreign_key(:insights, :projects, column: :project_id, on_delete: :cascade)
+
+ remove_foreign_key_if_exists(:insights, column: :namespace_id)
+ add_foreign_key(:insights, :namespaces, column: :namespace_id, on_delete: :cascade)
+ end
+
+ def down
+ remove_foreign_key_if_exists(:insights, column: :namespace_id)
+ add_foreign_key(:insights, :namespaces, column: :namespace_id)
+
+ remove_foreign_key_if_exists(:insights, column: :project_id)
+ add_foreign_key(:insights, :projects, column: :project_id)
+ end
+end
diff --git a/db/migrate/20190418132750_add_foreign_key_from_vulnerability_feedback_to_users.rb b/db/migrate/20190418132750_add_foreign_key_from_vulnerability_feedback_to_users.rb
new file mode 100644
index 00000000000..0bd9012aee8
--- /dev/null
+++ b/db/migrate/20190418132750_add_foreign_key_from_vulnerability_feedback_to_users.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddForeignKeyFromVulnerabilityFeedbackToUsers < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_foreign_key :vulnerability_feedback, :users, column: :comment_author_id, on_delete: :nullify
+ add_concurrent_index :vulnerability_feedback, :comment_author_id
+ end
+
+ def down
+ remove_foreign_key :vulnerability_feedback, column: :comment_author_id
+ remove_concurrent_index :vulnerability_feedback, :comment_author_id
+ end
+end
diff --git a/db/migrate/20190419121952_add_bridged_pipeline_id_to_bridges.rb b/db/migrate/20190419121952_add_bridged_pipeline_id_to_bridges.rb
new file mode 100644
index 00000000000..fac556c1897
--- /dev/null
+++ b/db/migrate/20190419121952_add_bridged_pipeline_id_to_bridges.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddBridgedPipelineIdToBridges < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ def change
+ add_column :ci_builds, :upstream_pipeline_id, :integer
+ end
+end
diff --git a/db/migrate/20190419123057_add_bridged_pipeline_id_foreign_key.rb b/db/migrate/20190419123057_add_bridged_pipeline_id_foreign_key.rb
new file mode 100644
index 00000000000..c31ec7bc107
--- /dev/null
+++ b/db/migrate/20190419123057_add_bridged_pipeline_id_foreign_key.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddBridgedPipelineIdForeignKey < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :ci_builds, :upstream_pipeline_id, where: 'upstream_pipeline_id IS NOT NULL'
+ add_concurrent_foreign_key :ci_builds, :ci_pipelines, column: :upstream_pipeline_id
+ end
+
+ def down
+ remove_foreign_key :ci_builds, column: :upstream_pipeline_id
+ remove_concurrent_index :ci_builds, :upstream_pipeline_id
+ end
+end
diff --git a/db/migrate/20190423124640_add_index_to_projects_mirror_user_id.rb b/db/migrate/20190423124640_add_index_to_projects_mirror_user_id.rb
new file mode 100644
index 00000000000..b008d8ce0cc
--- /dev/null
+++ b/db/migrate/20190423124640_add_index_to_projects_mirror_user_id.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexToProjectsMirrorUserId < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :projects, :mirror_user_id
+ end
+
+ def down
+ remove_concurrent_index :projects, :mirror_user_id
+ end
+end
diff --git a/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb b/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb
new file mode 100644
index 00000000000..7339a4fccba
--- /dev/null
+++ b/db/migrate/20190520200123_add_rule_type_to_approval_merge_request_approval_rules.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddRuleTypeToApprovalMergeRequestApprovalRules < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_column_with_default(:approval_merge_request_rules, :rule_type, :integer, limit: 2, default: 1)
+ end
+
+ def down
+ remove_column(:approval_merge_request_rules, :rule_type)
+ end
+end
diff --git a/db/migrate/20190528173628_add_index_for_code_owner_rule_type_on_approval_merge_request_rules.rb b/db/migrate/20190528173628_add_index_for_code_owner_rule_type_on_approval_merge_request_rules.rb
new file mode 100644
index 00000000000..96d878a98f2
--- /dev/null
+++ b/db/migrate/20190528173628_add_index_for_code_owner_rule_type_on_approval_merge_request_rules.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddIndexForCodeOwnerRuleTypeOnApprovalMergeRequestRules < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ INDEX_CODE_OWNERS_RULES_UNIQUENESS_NAME = 'index_approval_rule_name_for_code_owners_rule_type'
+ INDEX_CODE_OWNERS_RULES_QUERY_NAME = 'index_approval_rules_code_owners_rule_type'
+
+ class ApprovalMergeRequestRule < ActiveRecord::Base
+ include EachBatch
+
+ enum rule_types: {
+ regular: 1,
+ code_owner: 2
+ }
+ end
+
+ def up
+ # Ensure only 1 code_owner rule per merge_request
+ add_concurrent_index(
+ :approval_merge_request_rules,
+ [:merge_request_id, :rule_type, :name],
+ unique: true,
+ where: "rule_type = #{ApprovalMergeRequestRule.rule_types[:code_owner]}",
+ name: INDEX_CODE_OWNERS_RULES_UNIQUENESS_NAME
+ )
+
+ # Support lookups for all code_owner rules per merge_request
+ add_concurrent_index(
+ :approval_merge_request_rules,
+ [:merge_request_id, :rule_type],
+ where: "rule_type = #{ApprovalMergeRequestRule.rule_types[:code_owner]}",
+ name: INDEX_CODE_OWNERS_RULES_QUERY_NAME
+ )
+ end
+
+ def down
+ remove_concurrent_index_by_name(
+ :approval_merge_request_rules,
+ INDEX_CODE_OWNERS_RULES_UNIQUENESS_NAME
+ )
+
+ remove_concurrent_index_by_name(
+ :approval_merge_request_rules,
+ INDEX_CODE_OWNERS_RULES_QUERY_NAME
+ )
+ end
+end
diff --git a/db/migrate/20190603124955_add_index_to_count_pending_mirror_updates.rb b/db/migrate/20190603124955_add_index_to_count_pending_mirror_updates.rb
new file mode 100644
index 00000000000..6aa94f7b20b
--- /dev/null
+++ b/db/migrate/20190603124955_add_index_to_count_pending_mirror_updates.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexToCountPendingMirrorUpdates < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :project_mirror_data, [:last_update_at, :retry_count]
+ end
+
+ def down
+ remove_concurrent_index :project_mirror_data, [:last_update_at, :retry_count]
+ end
+end
diff --git a/db/post_migrate/20161128170531_drop_user_activities_table.rb b/db/post_migrate/20161128170531_drop_user_activities_table.rb
index 64d13a08953..d8b1e0731f3 100644
--- a/db/post_migrate/20161128170531_drop_user_activities_table.rb
+++ b/db/post_migrate/20161128170531_drop_user_activities_table.rb
@@ -1,9 +1,33 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
class DropUserActivitiesTable < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
- # This migration is a no-op. It just exists to match EE.
- def change
+ # When using the methods "add_concurrent_index" or "add_column_with_default"
+ # you must disable the use of transactions as these methods can not run in an
+ # existing transaction. When using "add_concurrent_index" make sure that this
+ # method is the _only_ method called in the migration, any other changes
+ # should go in a separate migration. This ensures that upon failure _only_ the
+ # index creation fails and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
+
+ def up
+ drop_table :user_activities if table_exists?(:user_activities)
+ end
+
+ def down
+ unless table_exists?(:user_activities)
+ create_table "user_activities", force: :cascade do |t|
+ t.integer "user_id"
+ t.datetime "last_activity_at", null: false
+ end
+
+ add_index "user_activities", ["user_id"], name: "index_user_activities_on_user_id", unique: true, using: :btree
+ end
end
end
diff --git a/db/post_migrate/20170502101023_cleanup_namespaceless_pending_delete_projects.rb b/db/post_migrate/20170502101023_cleanup_namespaceless_pending_delete_projects.rb
deleted file mode 100644
index c018d30c175..00000000000
--- a/db/post_migrate/20170502101023_cleanup_namespaceless_pending_delete_projects.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# This is the counterpart of RequeuePendingDeleteProjects and cleans all
-# projects with `pending_delete = true` and that do not have a namespace.
-class CleanupNamespacelessPendingDeleteProjects < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
-
- disable_ddl_transaction!
-
- def up
- @offset = 0
-
- loop do
- ids = pending_delete_batch
-
- break if ids.empty?
-
- args = ids.map { |id| Array(id) }
-
- NamespacelessProjectDestroyWorker.bulk_perform_async(args)
-
- @offset += 1
- end
- end
-
- def down
- # noop
- end
-
- private
-
- def pending_delete_batch
- connection.exec_query(find_batch).map { |row| row['id'].to_i }
- end
-
- BATCH_SIZE = 5000
-
- def find_batch
- projects = Arel::Table.new(:projects)
- projects.project(projects[:id])
- .where(projects[:pending_delete].eq(true))
- .where(projects[:namespace_id].eq(nil))
- .skip(@offset * BATCH_SIZE)
- .take(BATCH_SIZE)
- .to_sql
- end
-end
diff --git a/db/post_migrate/20170703130158_schedule_merge_request_diff_migrations.rb b/db/post_migrate/20170703130158_schedule_merge_request_diff_migrations.rb
deleted file mode 100644
index fd4b2859f7f..00000000000
--- a/db/post_migrate/20170703130158_schedule_merge_request_diff_migrations.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-class ScheduleMergeRequestDiffMigrations < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
- BATCH_SIZE = 2500
- MIGRATION = 'DeserializeMergeRequestDiffsAndCommits'
-
- disable_ddl_transaction!
-
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
-
- include ::EachBatch
- end
-
- # Assuming that there are 5 million rows affected (which is more than on
- # GitLab.com), and that each batch of 2,500 rows takes up to 5 minutes, then
- # we can migrate all the rows in 7 days.
- #
- # On staging, plucking the IDs themselves takes 5 seconds.
- def up
- non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
-
- MergeRequestDiff.where(non_empty).each_batch(of: BATCH_SIZE) do |relation, index|
- range = relation.pluck('MIN(id)', 'MAX(id)').first
-
- BackgroundMigrationWorker.perform_in(index * 5.minutes, MIGRATION, range)
- end
- end
-
- def down
- end
-end
diff --git a/db/post_migrate/20170926150348_schedule_merge_request_diff_migrations_take_two.rb b/db/post_migrate/20170926150348_schedule_merge_request_diff_migrations_take_two.rb
deleted file mode 100644
index 9b675a51725..00000000000
--- a/db/post_migrate/20170926150348_schedule_merge_request_diff_migrations_take_two.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-class ScheduleMergeRequestDiffMigrationsTakeTwo < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
- BATCH_SIZE = 500
- MIGRATION = 'DeserializeMergeRequestDiffsAndCommits'
- DELAY_INTERVAL = 10.minutes
-
- disable_ddl_transaction!
-
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
-
- include ::EachBatch
-
- default_scope { where('st_commits IS NOT NULL OR st_diffs IS NOT NULL') }
- end
-
- # By this point, we assume ScheduleMergeRequestDiffMigrations - the first
- # version of this - has already run. On GitLab.com, we have ~220k un-migrated
- # rows, but these rows will, in general, take a long time.
- #
- # With a gap of 10 minutes per batch, and 500 rows per batch, these migrations
- # are scheduled over 220_000 / 500 / 6 ~= 74 hours, which is a little over
- # three days.
- def up
- queue_background_migration_jobs_by_range_at_intervals(MergeRequestDiff, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
- end
-
- def down
- end
-end
diff --git a/db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb b/db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb
deleted file mode 100644
index e49a70f902c..00000000000
--- a/db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration[4.2]
- disable_ddl_transaction!
-
- DOWNTIME = false
- MIGRATION = 'CreateGpgKeySubkeysFromGpgKeys'
-
- class GpgKey < ActiveRecord::Base
- self.table_name = 'gpg_keys'
-
- include EachBatch
- end
-
- def up
- GpgKey.select(:id).each_batch do |gpg_keys|
- jobs = gpg_keys.pluck(:id).map do |id|
- [MIGRATION, [id]]
- end
-
- BackgroundMigrationWorker.bulk_perform_async(jobs)
- end
- end
-
- def down
- end
-end
diff --git a/db/post_migrate/20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb b/db/post_migrate/20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb
deleted file mode 100644
index 51441a36e4b..00000000000
--- a/db/post_migrate/20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-class SchedulePopulateMergeRequestMetricsWithEventsData < ActiveRecord::Migration[4.2]
- DOWNTIME = false
- BATCH_SIZE = 10_000
- MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'
-
- disable_ddl_transaction!
-
- class MergeRequest < ActiveRecord::Base
- self.table_name = 'merge_requests'
-
- include ::EachBatch
- end
-
- def up
- say 'Scheduling `PopulateMergeRequestMetricsWithEventsData` jobs'
- # It will update around 4_000_000 records in batches of 10_000 merge
- # requests (running between 10 minutes) and should take around 66 hours to complete.
- # Apparently, production PostgreSQL is able to vacuum 10k-20k dead_tuples by
- # minute, and at maximum, each of these jobs should UPDATE 20k records.
- #
- # More information about the updates in `PopulateMergeRequestMetricsWithEventsData` class.
- #
- MergeRequest.all.each_batch(of: BATCH_SIZE) do |relation, index|
- range = relation.pluck('MIN(id)', 'MAX(id)').first
-
- BackgroundMigrationWorker.perform_in(index * 10.minutes, MIGRATION, range)
- end
- end
-
- def down
- execute "update merge_request_metrics set latest_closed_at = null"
- execute "update merge_request_metrics set latest_closed_by_id = null"
- execute "update merge_request_metrics set merged_by_id = null"
- end
-end
diff --git a/db/post_migrate/20171207150343_remove_soft_removed_objects.rb b/db/post_migrate/20171207150343_remove_soft_removed_objects.rb
deleted file mode 100644
index 53707c67d36..00000000000
--- a/db/post_migrate/20171207150343_remove_soft_removed_objects.rb
+++ /dev/null
@@ -1,208 +0,0 @@
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class RemoveSoftRemovedObjects < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- # Set this constant to true if this migration requires downtime.
- DOWNTIME = false
-
- disable_ddl_transaction!
-
- module SoftRemoved
- extend ActiveSupport::Concern
-
- included do
- scope :soft_removed, -> { where('deleted_at IS NOT NULL') }
- end
- end
-
- class User < ActiveRecord::Base
- self.table_name = 'users'
-
- include EachBatch
- end
-
- class Issue < ActiveRecord::Base
- self.table_name = 'issues'
-
- include EachBatch
- include SoftRemoved
- end
-
- class MergeRequest < ActiveRecord::Base
- self.table_name = 'merge_requests'
-
- include EachBatch
- include SoftRemoved
- end
-
- class Namespace < ActiveRecord::Base
- self.table_name = 'namespaces'
-
- include EachBatch
- include SoftRemoved
-
- scope :soft_removed_personal, -> { soft_removed.where(type: nil) }
- scope :soft_removed_group, -> { soft_removed.where(type: 'Group') }
- end
-
- class Route < ActiveRecord::Base
- self.table_name = 'routes'
-
- include EachBatch
- include SoftRemoved
- end
-
- class Project < ActiveRecord::Base
- self.table_name = 'projects'
-
- include EachBatch
- include SoftRemoved
- end
-
- class CiPipelineSchedule < ActiveRecord::Base
- self.table_name = 'ci_pipeline_schedules'
-
- include EachBatch
- include SoftRemoved
- end
-
- class CiTrigger < ActiveRecord::Base
- self.table_name = 'ci_triggers'
-
- include EachBatch
- include SoftRemoved
- end
-
- MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze
-
- def up
- disable_statement_timeout do
- remove_personal_routes
- remove_personal_namespaces
- remove_group_namespaces
- remove_simple_soft_removed_rows
- end
- end
-
- def down
- # The data removed by this migration can't be restored in an automated way.
- end
-
- def remove_simple_soft_removed_rows
- create_temporary_indexes
-
- MODELS.each do |model|
- say_with_time("Removing soft removed rows from #{model.table_name}") do
- model.soft_removed.each_batch do |batch, index|
- batch.delete_all
- end
- end
- end
- ensure
- remove_temporary_indexes
- end
-
- def create_temporary_indexes
- MODELS.each do |model|
- index_name = temporary_index_name_for(model)
-
- # Without this index the removal process can take a very long time. For
- # example, getting the next ID of a batch for the `issues` table in
- # staging would take between 15 and 20 seconds.
- next if temporary_index_exists?(model)
-
- say_with_time("Creating temporary index #{index_name}") do
- add_concurrent_index(
- model.table_name,
- [:deleted_at, :id],
- name: index_name,
- where: 'deleted_at IS NOT NULL'
- )
- end
- end
- end
-
- def remove_temporary_indexes
- MODELS.each do |model|
- index_name = temporary_index_name_for(model)
-
- next unless temporary_index_exists?(model)
-
- say_with_time("Removing temporary index #{index_name}") do
- remove_concurrent_index_by_name(model.table_name, index_name)
- end
- end
- end
-
- def temporary_index_name_for(model)
- "index_on_#{model.table_name}_tmp"
- end
-
- def temporary_index_exists?(model)
- index_name = temporary_index_name_for(model)
-
- index_exists?(model.table_name, [:deleted_at, :id], name: index_name)
- end
-
- def remove_personal_namespaces
- # Some personal namespaces are left behind in case of GitLab.com. In these
- # cases the associated data such as the projects and users has already been
- # removed.
- Namespace.soft_removed_personal.each_batch do |batch|
- batch.delete_all
- end
- end
-
- def remove_group_namespaces
- admin_id = id_for_admin_user
-
- unless admin_id
- say 'Not scheduling soft removed groups for removal as no admin user ' \
- 'could be found. You will need to remove any such groups manually.'
-
- return
- end
-
- # Left over groups can't be easily removed because we may also need to
- # remove memberships, repositories, and other associated data. As a result
- # we'll just schedule a Sidekiq job to remove these.
- #
- # As of January 5th, 2018 there are 36 groups that will be removed using
- # this code.
- Namespace.select(:id).soft_removed_group.each_batch(of: 10) do |batch, index|
- batch.each do |ns|
- schedule_group_removal(index * 5.minutes, ns.id, admin_id)
- end
- end
- end
-
- def schedule_group_removal(delay, group_id, user_id)
- if migrate_inline?
- GroupDestroyWorker.new.perform(group_id, user_id)
- else
- GroupDestroyWorker.perform_in(delay, group_id, user_id)
- end
- end
-
- def remove_personal_routes
- namespaces = Namespace.select(1)
- .soft_removed
- .where('namespaces.type IS NULL')
- .where('routes.source_type = ?', 'Namespace')
- .where('routes.source_id = namespaces.id')
-
- Route.where('EXISTS (?)', namespaces).each_batch do |batch|
- batch.delete_all
- end
- end
-
- def id_for_admin_user
- User.where(admin: true).limit(1).pluck(:id).first
- end
-
- def migrate_inline?
- Rails.env.test? || Rails.env.development?
- end
-end
diff --git a/db/post_migrate/20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb b/db/post_migrate/20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb
deleted file mode 100644
index b82ee3569c9..00000000000
--- a/db/post_migrate/20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-class MigrateImportAttributesDataFromProjectsToProjectMirrorData < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
-
- UP_MIGRATION = 'PopulateImportState'.freeze
- DOWN_MIGRATION = 'RollbackImportStateData'.freeze
-
- BATCH_SIZE = 1000
- DELAY_INTERVAL = 5.minutes
-
- disable_ddl_transaction!
-
- class Project < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'projects'
- end
-
- class ProjectImportState < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'project_mirror_data'
- end
-
- def up
- projects = Project.where.not(import_status: :none)
-
- queue_background_migration_jobs_by_range_at_intervals(projects, UP_MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
- end
-
- def down
- import_state = ProjectImportState.where.not(status: :none)
-
- queue_background_migration_jobs_by_range_at_intervals(import_state, DOWN_MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
- end
-end
diff --git a/db/post_migrate/20180521162137_migrate_remaining_mr_metrics_populating_background_migration.rb b/db/post_migrate/20180521162137_migrate_remaining_mr_metrics_populating_background_migration.rb
deleted file mode 100644
index 39666a0cd2a..00000000000
--- a/db/post_migrate/20180521162137_migrate_remaining_mr_metrics_populating_background_migration.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-class MigrateRemainingMrMetricsPopulatingBackgroundMigration < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
- BATCH_SIZE = 5_000
- MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'
- DELAY_INTERVAL = 10.minutes
-
- disable_ddl_transaction!
-
- class MergeRequest < ActiveRecord::Base
- self.table_name = 'merge_requests'
-
- include ::EachBatch
- end
-
- def up
- # Perform any ongoing background migration that might still be running. This
- # avoids scheduling way too many of the same jobs on self-hosted instances
- # if they're updating GitLab across multiple versions. The "Take one"
- # migration was executed on 10.4 on
- # SchedulePopulateMergeRequestMetricsWithEventsData.
- Gitlab::BackgroundMigration.steal(MIGRATION)
-
- metrics_not_exists_clause = <<~SQL
- NOT EXISTS (SELECT 1 FROM merge_request_metrics
- WHERE merge_request_metrics.merge_request_id = merge_requests.id)
- SQL
-
- relation = MergeRequest.where(metrics_not_exists_clause)
-
- # We currently have ~400_000 MR records without metrics on GitLab.com.
- # This means it'll schedule ~80 jobs (5000 MRs each) with a 10 minutes gap,
- # so this should take ~14 hours for all background migrations to complete.
- #
- queue_background_migration_jobs_by_range_at_intervals(relation,
- MIGRATION,
- DELAY_INTERVAL,
- batch_size: BATCH_SIZE)
- end
-
- def down
- end
-end
diff --git a/db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb b/db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb
deleted file mode 100644
index 73f6a3a2a43..00000000000
--- a/db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-class EnqueueDeleteDiffFilesWorkers < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
- SCHEDULER = 'ScheduleDiffFilesDeletion'.freeze
- TMP_INDEX = 'tmp_partial_diff_id_with_files_index'.freeze
-
- disable_ddl_transaction!
-
- def up
- unless index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
- add_concurrent_index(:merge_request_diffs, :id, where: "(state NOT IN ('without_files', 'empty'))", name: TMP_INDEX)
- end
-
- BackgroundMigrationWorker.perform_async(SCHEDULER)
-
- # We don't remove the index since it's going to be used on DeleteDiffFiles
- # worker. We should remove it in an upcoming release.
- end
-
- def down
- if index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
- remove_concurrent_index_by_name(:merge_request_diffs, TMP_INDEX)
- end
- end
-end
diff --git a/db/post_migrate/20180723130817_delete_inconsistent_internal_id_records.rb b/db/post_migrate/20180723130817_delete_inconsistent_internal_id_records.rb
deleted file mode 100644
index 440868005bb..00000000000
--- a/db/post_migrate/20180723130817_delete_inconsistent_internal_id_records.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-class DeleteInconsistentInternalIdRecords < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
-
- disable_ddl_transaction!
-
- # This migration cleans up any inconsistent records in internal_ids.
- #
- # That is, it deletes records that track a `last_value` that is
- # smaller than the maximum internal id (usually `iid`) found in
- # the corresponding model records.
-
- def up
- disable_statement_timeout do
- delete_internal_id_records('issues', 'project_id')
- delete_internal_id_records('merge_requests', 'project_id', 'target_project_id')
- delete_internal_id_records('deployments', 'project_id')
- delete_internal_id_records('milestones', 'project_id')
- delete_internal_id_records('milestones', 'namespace_id', 'group_id')
- delete_internal_id_records('ci_pipelines', 'project_id')
- end
- end
-
- class InternalId < ActiveRecord::Base
- self.table_name = 'internal_ids'
- enum usage: { issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5 }
- end
-
- private
-
- def delete_internal_id_records(base_table, scope_column_name, base_scope_column_name = scope_column_name)
- sql = <<~SQL
- SELECT id FROM ( -- workaround for MySQL
- SELECT internal_ids.id FROM (
- SELECT #{base_scope_column_name} AS #{scope_column_name}, max(iid) as maximum_iid from #{base_table} GROUP BY #{scope_column_name}
- ) maxima JOIN internal_ids USING (#{scope_column_name})
- WHERE internal_ids.usage=#{InternalId.usages.fetch(base_table)} AND maxima.maximum_iid > internal_ids.last_value
- ) internal_ids
- SQL
-
- InternalId.where("id IN (#{sql})").tap do |ids| # rubocop:disable GitlabSecurity/SqlInjection
- say "Deleting internal_id records for #{base_table}: #{ids.pluck(:project_id, :last_value)}" unless ids.empty?
- end.delete_all
- end
-end
diff --git a/db/post_migrate/20180906051323_remove_orphaned_label_links.rb b/db/post_migrate/20180906051323_remove_orphaned_label_links.rb
deleted file mode 100644
index a474aaf534c..00000000000
--- a/db/post_migrate/20180906051323_remove_orphaned_label_links.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-class RemoveOrphanedLabelLinks < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
-
- disable_ddl_transaction!
-
- class LabelLinks < ActiveRecord::Base
- self.table_name = 'label_links'
- include EachBatch
-
- def self.orphaned
- where('NOT EXISTS ( SELECT 1 FROM labels WHERE labels.id = label_links.label_id )')
- end
- end
-
- def up
- # Some of these queries can take up to 10 seconds to run on GitLab.com,
- # which is pretty close to our 15 second statement timeout. To ensure a
- # smooth deployment procedure we disable the statement timeouts for this
- # migration, just in case.
- disable_statement_timeout do
- # On GitLab.com there are over 2,000,000 orphaned label links. On
- # staging, removing 100,000 rows generated a max replication lag of 6.7
- # MB. In total, removing all these rows will only generate about 136 MB
- # of data, so it should be safe to do this.
- LabelLinks.orphaned.each_batch(of: 100_000) do |batch|
- batch.delete_all
- end
- end
-
- add_concurrent_foreign_key(:label_links, :labels, column: :label_id, on_delete: :cascade)
- end
-
- def down
- # There is no way to restore orphaned label links.
- if foreign_key_exists?(:label_links, column: :label_id)
- remove_foreign_key(:label_links, column: :label_id)
- end
- end
-end
diff --git a/db/post_migrate/20180913051323_consume_remaining_diff_files_deletion_jobs.rb b/db/post_migrate/20180913051323_consume_remaining_diff_files_deletion_jobs.rb
deleted file mode 100644
index 2c266a4695b..00000000000
--- a/db/post_migrate/20180913051323_consume_remaining_diff_files_deletion_jobs.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-class ConsumeRemainingDiffFilesDeletionJobs < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
-
- disable_ddl_transaction!
-
- MIGRATION = 'ScheduleDiffFilesDeletion'.freeze
- TMP_INDEX = 'tmp_partial_diff_id_with_files_index'.freeze
-
- def up
- # Perform any ongoing background migration that might still be scheduled.
- Gitlab::BackgroundMigration.steal(MIGRATION)
-
- remove_concurrent_index_by_name(:merge_request_diffs, TMP_INDEX)
- end
-
- def down
- add_concurrent_index(:merge_request_diffs, :id, where: "(state NOT IN ('without_files', 'empty'))", name: TMP_INDEX)
- end
-end
diff --git a/db/post_migrate/20180916014356_populate_external_pipeline_source.rb b/db/post_migrate/20180916014356_populate_external_pipeline_source.rb
deleted file mode 100644
index a3d2df1f2bd..00000000000
--- a/db/post_migrate/20180916014356_populate_external_pipeline_source.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class PopulateExternalPipelineSource < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- # Set this constant to true if this migration requires downtime.
- DOWNTIME = false
- MIGRATION = 'PopulateExternalPipelineSource'.freeze
- BATCH_SIZE = 500
-
- disable_ddl_transaction!
-
- class Pipeline < ActiveRecord::Base
- include EachBatch
- self.table_name = 'ci_pipelines'
- end
-
- def up
- Pipeline.where(source: nil).tap do |relation|
- queue_background_migration_jobs_by_range_at_intervals(relation,
- MIGRATION,
- 5.minutes,
- batch_size: BATCH_SIZE)
- end
- end
-
- def down
- # noop
- end
-end
diff --git a/db/post_migrate/20181014121030_enqueue_redact_links.rb b/db/post_migrate/20181014121030_enqueue_redact_links.rb
deleted file mode 100644
index 8d1a840d594..00000000000
--- a/db/post_migrate/20181014121030_enqueue_redact_links.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-class EnqueueRedactLinks < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
- BATCH_SIZE = 1000
- DELAY_INTERVAL = 5.minutes.to_i
- MIGRATION = 'RedactLinks'
-
- disable_ddl_transaction!
-
- class Note < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'notes'
- self.inheritance_column = :_type_disabled
- end
-
- class Issue < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'issues'
- self.inheritance_column = :_type_disabled
- end
-
- class MergeRequest < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'merge_requests'
- self.inheritance_column = :_type_disabled
- end
-
- class Snippet < ActiveRecord::Base
- include EachBatch
-
- self.table_name = 'snippets'
- self.inheritance_column = :_type_disabled
- end
-
- def up
- disable_statement_timeout do
- schedule_migration(Note, 'note')
- schedule_migration(Issue, 'description')
- schedule_migration(MergeRequest, 'description')
- schedule_migration(Snippet, 'description')
- end
- end
-
- def down
- # nothing to do
- end
-
- private
-
- def schedule_migration(model, field)
- link_pattern = "%/sent_notifications/" + ("_" * 32) + "/unsubscribe%"
-
- model.where("#{field} like ?", link_pattern).each_batch(of: BATCH_SIZE) do |batch, index|
- start_id, stop_id = batch.pluck('MIN(id)', 'MAX(id)').first
-
- BackgroundMigrationWorker.perform_in(index * DELAY_INTERVAL, MIGRATION, [model.name.demodulize, field, start_id, stop_id])
- end
- end
-end
diff --git a/db/post_migrate/20181204154019_populate_mr_metrics_with_events_data.rb b/db/post_migrate/20181204154019_populate_mr_metrics_with_events_data.rb
deleted file mode 100644
index 1e43e3dd790..00000000000
--- a/db/post_migrate/20181204154019_populate_mr_metrics_with_events_data.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-# See http://doc.gitlab.com/ce/development/migration_style_guide.html
-# for more information on how to write migrations for GitLab.
-
-class PopulateMrMetricsWithEventsData < ActiveRecord::Migration[4.2]
- include Gitlab::Database::MigrationHelpers
-
- DOWNTIME = false
- BATCH_SIZE = 10_000
- MIGRATION = 'PopulateMergeRequestMetricsWithEventsDataImproved'
- PREVIOUS_MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'
-
- disable_ddl_transaction!
-
- def up
- # Perform any ongoing background migration that might still be running from
- # previous try (see https://gitlab.com/gitlab-org/gitlab-ce/issues/47676).
- Gitlab::BackgroundMigration.steal(PREVIOUS_MIGRATION)
-
- say 'Scheduling `PopulateMergeRequestMetricsWithEventsData` jobs'
- # It will update around 4_000_000 records in batches of 10_000 merge
- # requests (running between 5 minutes) and should take around 53 hours to complete.
- # Apparently, production PostgreSQL is able to vacuum 10k-20k dead_tuples
- # per minute. So this should give us enough space.
- #
- # More information about the updates in `PopulateMergeRequestMetricsWithEventsDataImproved` class.
- #
- MergeRequest.all.each_batch(of: BATCH_SIZE) do |relation, index|
- range = relation.pluck('MIN(id)', 'MAX(id)').first
-
- BackgroundMigrationWorker.perform_in(index * 8.minutes, MIGRATION, range)
- end
- end
-
- def down
- end
-end
diff --git a/db/post_migrate/20190402224749_schedule_merge_request_assignees_migration_progress_check.rb b/db/post_migrate/20190402224749_schedule_merge_request_assignees_migration_progress_check.rb
new file mode 100644
index 00000000000..6fb67deb834
--- /dev/null
+++ b/db/post_migrate/20190402224749_schedule_merge_request_assignees_migration_progress_check.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class ScheduleMergeRequestAssigneesMigrationProgressCheck < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ MIGRATION = 'MergeRequestAssigneesMigrationProgressCheck'.freeze
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ BackgroundMigrationWorker.perform_async(MIGRATION)
+ end
+
+ def down
+ end
+end
diff --git a/db/post_migrate/20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb b/db/post_migrate/20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb
new file mode 100644
index 00000000000..447f91ebc7e
--- /dev/null
+++ b/db/post_migrate/20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+class AddUniqueConstraintToApprovalsUserIdAndMergeRequestId < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ remove_duplicates
+ add_concurrent_index :approvals, [:user_id, :merge_request_id], unique: true
+ end
+
+ def down
+ remove_concurrent_index :approvals, [:user_id, :merge_request_id]
+ end
+
+ private
+
+ def remove_duplicates
+ add_concurrent_index :approvals, [:user_id, :merge_request_id, :id]
+
+ if Gitlab::Database.mysql?
+ execute <<-SQL
+ DELETE FROM a
+ USING approvals AS a
+ INNER JOIN (
+ SELECT user_id, merge_request_id, MIN(id) as min_id
+ FROM approvals
+ GROUP BY user_id, merge_request_id
+ HAVING COUNT(id) > 1
+ ) as approvals_with_duplicates
+ ON approvals_with_duplicates.user_id = a.user_id
+ AND approvals_with_duplicates.merge_request_id = a.merge_request_id
+ WHERE approvals_with_duplicates.min_id <> a.id;
+ SQL
+ else
+ execute <<-SQL
+ DELETE FROM approvals
+ USING (
+ SELECT user_id, merge_request_id, MIN(id) as min_id
+ FROM approvals
+ GROUP BY user_id, merge_request_id
+ HAVING COUNT(id) > 1
+ ) as approvals_with_duplicates
+ WHERE approvals_with_duplicates.user_id = approvals.user_id
+ AND approvals_with_duplicates.merge_request_id = approvals.merge_request_id
+ AND approvals_with_duplicates.min_id <> approvals.id;
+ SQL
+ end
+
+ remove_concurrent_index :approvals, [:user_id, :merge_request_id, :id]
+ end
+end
diff --git a/db/post_migrate/20190404231137_remove_alternate_url_from_geo_nodes.rb b/db/post_migrate/20190404231137_remove_alternate_url_from_geo_nodes.rb
new file mode 100644
index 00000000000..785ceb2fb28
--- /dev/null
+++ b/db/post_migrate/20190404231137_remove_alternate_url_from_geo_nodes.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+# We are reverting the feature that created this column. This is for anyone who
+# migrated while the feature still existed in master.
+class RemoveAlternateUrlFromGeoNodes < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ def up
+ remove_column(:geo_nodes, :alternate_url) if column_exists?(:geo_nodes, :alternate_url)
+ end
+
+ def down
+ add_column :geo_nodes, :alternate_url, :string
+ end
+end
diff --git a/db/post_migrate/20190418132125_populate_project_statistics_packages_size.rb b/db/post_migrate/20190418132125_populate_project_statistics_packages_size.rb
new file mode 100644
index 00000000000..a6bee3453c1
--- /dev/null
+++ b/db/post_migrate/20190418132125_populate_project_statistics_packages_size.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+class PopulateProjectStatisticsPackagesSize < ActiveRecord::Migration[5.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ class ProjectStatistics < ActiveRecord::Base
+ self.table_name = 'project_statistics'
+ end
+
+ def up
+ stats_ids = ProjectStatistics.joins(
+ <<~SQL.strip_heredoc
+ INNER JOIN projects ON projects.id = project_statistics.project_id
+ INNER JOIN packages_packages ON packages_packages.project_id = projects.id
+ INNER JOIN packages_package_files ON packages_package_files.package_id = packages_packages.id
+ SQL
+ ).distinct.select(:id)
+
+ packages_size = Arel.sql(
+ '(SELECT SUM(size) FROM packages_package_files ' \
+ 'JOIN packages_packages ON packages_packages.id = packages_package_files.package_id ' \
+ 'WHERE packages_packages.project_id = project_statistics.project_id)'
+ )
+ update_column_in_batches(:project_statistics, :packages_size, packages_size) do |table, query|
+ query.where(table[:id].in(stats_ids))
+ end
+
+ storage_size = Arel.sql('(repository_size + lfs_objects_size + build_artifacts_size + COALESCE(packages_size, 0))')
+ update_column_in_batches(:project_statistics, :storage_size, storage_size) do |table, query|
+ query.where(table[:id].in(stats_ids))
+ end
+ end
+
+ def down
+ storage_size = Arel.sql('(repository_size + lfs_objects_size + build_artifacts_size)')
+ update_column_in_batches(:project_statistics, :storage_size, storage_size) do |table, query|
+ query.where(table[:packages_size].gt(0))
+ end
+
+ update_column_in_batches(:project_statistics, :packages_size, nil)
+ end
+end
diff --git a/db/post_migrate/20190511144331_remove_users_support_type.rb b/db/post_migrate/20190511144331_remove_users_support_type.rb
new file mode 100644
index 00000000000..32df33432b9
--- /dev/null
+++ b/db/post_migrate/20190511144331_remove_users_support_type.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class RemoveUsersSupportType < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ INDEX_STATE_INTERNAL_ATTRS = 'index_users_on_state_and_internal_attrs'
+
+ disable_ddl_transaction!
+
+ def up
+ remove_concurrent_index :users, :state, name: INDEX_STATE_INTERNAL_ATTRS
+ remove_concurrent_index :users, :support_bot
+
+ remove_column :users, :support_bot
+ end
+
+ def down
+ add_column :users, :support_bot, :boolean
+
+ add_concurrent_index :users, :support_bot
+ add_concurrent_index :users, :state,
+ name: INDEX_STATE_INTERNAL_ATTRS,
+ where: 'ghost <> true AND support_bot <> true'
+ end
+end
diff --git a/db/post_migrate/20190520201748_populate_rule_type_on_approval_merge_request_rules.rb b/db/post_migrate/20190520201748_populate_rule_type_on_approval_merge_request_rules.rb
new file mode 100644
index 00000000000..0f0df456134
--- /dev/null
+++ b/db/post_migrate/20190520201748_populate_rule_type_on_approval_merge_request_rules.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class PopulateRuleTypeOnApprovalMergeRequestRules < ActiveRecord::Migration[5.1]
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ class ApprovalMergeRequestRule < ActiveRecord::Base
+ include EachBatch
+
+ enum rule_types: {
+ regular: 1,
+ code_owner: 2
+ }
+ end
+
+ def up
+ # On Gitlab.com, this should update about 17k rows. Since our updates are
+ # small and we are populating prior to indexing, the overhead should be small
+ ApprovalMergeRequestRule.where(code_owner: true).each_batch do |batch|
+ batch.update_all(rule_type: ApprovalMergeRequestRule.rule_types[:code_owner])
+ end
+ end
+
+ def down
+ # code_owner is already kept in sync with `rule_type`, so no changes are needed
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index ef4ec57acec..a50ab02f718 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -45,6 +45,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.text "message_font_color"
t.string "favicon"
t.boolean "email_header_and_footer_enabled", default: false, null: false
+ t.integer "updated_by"
end
create_table "application_setting_terms", id: :serial, force: :cascade do |t|
@@ -153,7 +154,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.integer "throttle_authenticated_web_requests_per_period", default: 7200, null: false
t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false
t.boolean "password_authentication_enabled_for_web"
- t.boolean "password_authentication_enabled_for_git", default: true
+ t.boolean "password_authentication_enabled_for_git", default: true, null: false
t.integer "gitaly_timeout_default", default: 55, null: false
t.integer "gitaly_timeout_medium", default: 30, null: false
t.integer "gitaly_timeout_fast", default: 10, null: false
@@ -196,9 +197,136 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.boolean "dns_rebinding_protection_enabled", default: true, null: false
t.boolean "default_project_deletion_protection", default: false, null: false
t.boolean "lock_memberships_to_ldap", default: false, null: false
+ t.text "help_text"
+ t.boolean "elasticsearch_indexing", default: false, null: false
+ t.boolean "elasticsearch_search", default: false, null: false
+ t.integer "shared_runners_minutes", default: 0, null: false
+ t.bigint "repository_size_limit", default: 0
+ t.string "elasticsearch_url", default: "http://localhost:9200"
+ t.boolean "elasticsearch_aws", default: false, null: false
+ t.string "elasticsearch_aws_region", default: "us-east-1"
+ t.string "elasticsearch_aws_access_key"
+ t.string "elasticsearch_aws_secret_access_key"
+ t.integer "geo_status_timeout", default: 10
+ t.boolean "elasticsearch_experimental_indexer"
+ t.boolean "check_namespace_plan", default: false, null: false
+ t.integer "mirror_max_delay", default: 300, null: false
+ t.integer "mirror_max_capacity", default: 100, null: false
+ t.integer "mirror_capacity_threshold", default: 50, null: false
+ t.boolean "slack_app_enabled", default: false
+ t.string "slack_app_id"
+ t.string "slack_app_secret"
+ t.string "slack_app_verification_token"
+ t.boolean "allow_group_owners_to_manage_ldap", default: true, null: false
+ t.string "email_additional_text"
+ t.integer "file_template_project_id"
+ t.boolean "pseudonymizer_enabled", default: false, null: false
+ t.boolean "snowplow_enabled", default: false, null: false
+ t.string "snowplow_collector_uri"
+ t.string "snowplow_site_id"
+ t.string "snowplow_cookie_domain"
+ t.integer "custom_project_templates_group_id"
+ t.boolean "elasticsearch_limit_indexing", default: false, null: false
+ t.string "geo_node_allowed_ips", default: "0.0.0.0/0, ::/0"
+ t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id", using: :btree
+ t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id", using: :btree
t.index ["usage_stats_set_by_user_id"], name: "index_application_settings_on_usage_stats_set_by_user_id", using: :btree
end
+ create_table "approval_merge_request_rule_sources", force: :cascade do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.bigint "approval_project_rule_id", null: false
+ t.index ["approval_merge_request_rule_id"], name: "index_approval_merge_request_rule_sources_1", unique: true, using: :btree
+ t.index ["approval_project_rule_id"], name: "index_approval_merge_request_rule_sources_2", using: :btree
+ end
+
+ create_table "approval_merge_request_rules", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "merge_request_id", null: false
+ t.integer "approvals_required", limit: 2, default: 0, null: false
+ t.boolean "code_owner", default: false, null: false
+ t.string "name", null: false
+ t.integer "rule_type", limit: 2, default: 1, null: false
+ t.index ["merge_request_id", "code_owner", "name"], name: "approval_rule_name_index_for_code_owners", unique: true, where: "(code_owner = true)", using: :btree
+ t.index ["merge_request_id", "code_owner"], name: "index_approval_merge_request_rules_1", using: :btree
+ t.index ["merge_request_id", "rule_type", "name"], name: "index_approval_rule_name_for_code_owners_rule_type", unique: true, where: "(rule_type = 2)", using: :btree
+ t.index ["merge_request_id", "rule_type"], name: "index_approval_rules_code_owners_rule_type", where: "(rule_type = 2)", using: :btree
+ end
+
+ create_table "approval_merge_request_rules_approved_approvers", force: :cascade do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.integer "user_id", null: false
+ t.index ["approval_merge_request_rule_id", "user_id"], name: "index_approval_merge_request_rules_approved_approvers_1", unique: true, using: :btree
+ t.index ["user_id"], name: "index_approval_merge_request_rules_approved_approvers_2", using: :btree
+ end
+
+ create_table "approval_merge_request_rules_groups", force: :cascade do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.integer "group_id", null: false
+ t.index ["approval_merge_request_rule_id", "group_id"], name: "index_approval_merge_request_rules_groups_1", unique: true, using: :btree
+ t.index ["group_id"], name: "index_approval_merge_request_rules_groups_2", using: :btree
+ end
+
+ create_table "approval_merge_request_rules_users", force: :cascade do |t|
+ t.bigint "approval_merge_request_rule_id", null: false
+ t.integer "user_id", null: false
+ t.index ["approval_merge_request_rule_id", "user_id"], name: "index_approval_merge_request_rules_users_1", unique: true, using: :btree
+ t.index ["user_id"], name: "index_approval_merge_request_rules_users_2", using: :btree
+ end
+
+ create_table "approval_project_rules", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.integer "approvals_required", limit: 2, default: 0, null: false
+ t.string "name", null: false
+ t.index ["project_id"], name: "index_approval_project_rules_on_project_id", using: :btree
+ end
+
+ create_table "approval_project_rules_groups", force: :cascade do |t|
+ t.bigint "approval_project_rule_id", null: false
+ t.integer "group_id", null: false
+ t.index ["approval_project_rule_id", "group_id"], name: "index_approval_project_rules_groups_1", unique: true, using: :btree
+ t.index ["group_id"], name: "index_approval_project_rules_groups_2", using: :btree
+ end
+
+ create_table "approval_project_rules_users", force: :cascade do |t|
+ t.bigint "approval_project_rule_id", null: false
+ t.integer "user_id", null: false
+ t.index ["approval_project_rule_id", "user_id"], name: "index_approval_project_rules_users_1", unique: true, using: :btree
+ t.index ["user_id"], name: "index_approval_project_rules_users_2", using: :btree
+ end
+
+ create_table "approvals", id: :serial, force: :cascade do |t|
+ t.integer "merge_request_id", null: false
+ t.integer "user_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index ["merge_request_id"], name: "index_approvals_on_merge_request_id", using: :btree
+ t.index ["user_id", "merge_request_id"], name: "index_approvals_on_user_id_and_merge_request_id", unique: true, using: :btree
+ end
+
+ create_table "approver_groups", id: :serial, force: :cascade do |t|
+ t.integer "target_id", null: false
+ t.string "target_type", null: false
+ t.integer "group_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index ["group_id"], name: "index_approver_groups_on_group_id", using: :btree
+ t.index ["target_id", "target_type"], name: "index_approver_groups_on_target_id_and_target_type", using: :btree
+ end
+
+ create_table "approvers", id: :serial, force: :cascade do |t|
+ t.integer "target_id", null: false
+ t.string "target_type"
+ t.integer "user_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index ["target_id", "target_type"], name: "index_approvers_on_target_id_and_target_type", using: :btree
+ t.index ["user_id"], name: "index_approvers_on_user_id", using: :btree
+ end
+
create_table "audit_events", id: :serial, force: :cascade do |t|
t.integer "author_id", null: false
t.string "type", null: false
@@ -233,6 +361,13 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["project_id"], name: "index_badges_on_project_id", using: :btree
end
+ create_table "board_assignees", id: :serial, force: :cascade do |t|
+ t.integer "board_id", null: false
+ t.integer "assignee_id", null: false
+ t.index ["assignee_id"], name: "index_board_assignees_on_assignee_id", using: :btree
+ t.index ["board_id", "assignee_id"], name: "index_board_assignees_on_board_id_and_assignee_id", unique: true, using: :btree
+ end
+
create_table "board_group_recent_visits", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -245,6 +380,13 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["user_id"], name: "index_board_group_recent_visits_on_user_id", using: :btree
end
+ create_table "board_labels", id: :serial, force: :cascade do |t|
+ t.integer "board_id", null: false
+ t.integer "label_id", null: false
+ t.index ["board_id", "label_id"], name: "index_board_labels_on_board_id_and_label_id", unique: true, using: :btree
+ t.index ["label_id"], name: "index_board_labels_on_label_id", using: :btree
+ end
+
create_table "board_project_recent_visits", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -262,7 +404,11 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "group_id"
+ t.integer "milestone_id"
+ t.integer "weight"
+ t.string "name", default: "Development", null: false
t.index ["group_id"], name: "index_boards_on_group_id", using: :btree
+ t.index ["milestone_id"], name: "index_boards_on_milestone_id", using: :btree
t.index ["project_id"], name: "index_boards_on_project_id", using: :btree
end
@@ -375,6 +521,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.integer "failure_reason"
t.datetime_with_timezone "scheduled_at"
t.string "token_encrypted"
+ t.integer "upstream_pipeline_id"
t.index ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
t.index ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
t.index ["commit_id", "artifacts_expire_at", "id"], name: "index_ci_builds_on_commit_id_and_artifacts_expireatandidpartial", where: "(((type)::text = 'Ci::Build'::text) AND ((retried = false) OR (retried IS NULL)) AND ((name)::text = ANY (ARRAY[('sast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('sast:container'::character varying)::text, ('container_scanning'::character varying)::text, ('dast'::character varying)::text])))", using: :btree
@@ -382,6 +529,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["commit_id", "status", "type"], name: "index_ci_builds_on_commit_id_and_status_and_type", using: :btree
t.index ["commit_id", "type", "name", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_name_and_ref", using: :btree
t.index ["commit_id", "type", "ref"], name: "index_ci_builds_on_commit_id_and_type_and_ref", using: :btree
+ t.index ["name"], name: "index_ci_builds_on_name_for_security_products_values", where: "((name)::text = ANY (ARRAY[('container_scanning'::character varying)::text, ('dast'::character varying)::text, ('dependency_scanning'::character varying)::text, ('license_management'::character varying)::text, ('sast'::character varying)::text]))", using: :btree
t.index ["project_id", "id"], name: "index_ci_builds_on_project_id_and_id", using: :btree
t.index ["project_id", "status"], name: "index_ci_builds_project_id_and_status_for_live_jobs_partial2", where: "(((type)::text = 'Ci::Build'::text) AND ((status)::text = ANY (ARRAY[('running'::character varying)::text, ('pending'::character varying)::text, ('created'::character varying)::text])))", using: :btree
t.index ["protected"], name: "index_ci_builds_on_protected", using: :btree
@@ -394,6 +542,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree
t.index ["token_encrypted"], name: "index_ci_builds_on_token_encrypted", unique: true, where: "(token_encrypted IS NOT NULL)", using: :btree
t.index ["updated_at"], name: "index_ci_builds_on_updated_at", using: :btree
+ t.index ["upstream_pipeline_id"], name: "index_ci_builds_on_upstream_pipeline_id", where: "(upstream_pipeline_id IS NOT NULL)", using: :btree
t.index ["user_id"], name: "index_ci_builds_on_user_id", using: :btree
end
@@ -582,6 +731,19 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["token_encrypted"], name: "index_ci_runners_on_token_encrypted", using: :btree
end
+ create_table "ci_sources_pipelines", id: :serial, force: :cascade do |t|
+ t.integer "project_id"
+ t.integer "pipeline_id"
+ t.integer "source_project_id"
+ t.integer "source_job_id"
+ t.integer "source_pipeline_id"
+ t.index ["pipeline_id"], name: "index_ci_sources_pipelines_on_pipeline_id", using: :btree
+ t.index ["project_id"], name: "index_ci_sources_pipelines_on_project_id", using: :btree
+ t.index ["source_job_id"], name: "index_ci_sources_pipelines_on_source_job_id", using: :btree
+ t.index ["source_pipeline_id"], name: "index_ci_sources_pipelines_on_source_pipeline_id", using: :btree
+ t.index ["source_project_id"], name: "index_ci_sources_pipelines_on_source_project_id", using: :btree
+ end
+
create_table "ci_stages", id: :serial, force: :cascade do |t|
t.integer "project_id"
t.integer "pipeline_id"
@@ -770,6 +932,9 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.text "status_reason"
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
+ t.string "encrypted_alert_manager_token"
+ t.string "encrypted_alert_manager_token_iv"
+ t.datetime_with_timezone "last_update_started_at"
t.index ["cluster_id"], name: "index_clusters_applications_prometheus_on_cluster_id", unique: true, using: :btree
end
@@ -846,6 +1011,25 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.float "percentage_service_desk_issues", default: 0.0, null: false
end
+ create_table "dependency_proxy_blobs", id: :serial, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.text "file", null: false
+ t.string "file_name", null: false
+ t.integer "file_store"
+ t.integer "group_id", null: false
+ t.bigint "size"
+ t.datetime_with_timezone "updated_at", null: false
+ t.index ["group_id", "file_name"], name: "index_dependency_proxy_blobs_on_group_id_and_file_name", using: :btree
+ end
+
+ create_table "dependency_proxy_group_settings", id: :serial, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.boolean "enabled", default: false, null: false
+ t.integer "group_id", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.index ["group_id"], name: "index_dependency_proxy_group_settings_on_group_id", using: :btree
+ end
+
create_table "deploy_keys_projects", id: :serial, force: :cascade do |t|
t.integer "deploy_key_id", null: false
t.integer "project_id", null: false
@@ -893,6 +1077,55 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["project_id", "status"], name: "index_deployments_on_project_id_and_status", using: :btree
end
+ create_table "design_management_designs", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.integer "issue_id", null: false
+ t.string "filename", null: false
+ t.index ["issue_id", "filename"], name: "index_design_management_designs_on_issue_id_and_filename", unique: true, using: :btree
+ t.index ["project_id"], name: "index_design_management_designs_on_project_id", using: :btree
+ end
+
+ create_table "design_management_designs_versions", id: false, force: :cascade do |t|
+ t.bigint "design_id", null: false
+ t.bigint "version_id", null: false
+ t.index ["design_id", "version_id"], name: "design_management_designs_versions_uniqueness", unique: true, using: :btree
+ t.index ["design_id"], name: "index_design_management_designs_versions_on_design_id", using: :btree
+ t.index ["version_id"], name: "index_design_management_designs_versions_on_version_id", using: :btree
+ end
+
+ create_table "design_management_versions", force: :cascade do |t|
+ t.binary "sha", null: false
+ t.index ["sha"], name: "index_design_management_versions_on_sha", unique: true, using: :btree
+ end
+
+ create_table "draft_notes", force: :cascade do |t|
+ t.integer "merge_request_id", null: false
+ t.integer "author_id", null: false
+ t.boolean "resolve_discussion", default: false, null: false
+ t.string "discussion_id"
+ t.text "note", null: false
+ t.text "position"
+ t.text "original_position"
+ t.text "change_position"
+ t.index ["author_id"], name: "index_draft_notes_on_author_id", using: :btree
+ t.index ["discussion_id"], name: "index_draft_notes_on_discussion_id", using: :btree
+ t.index ["merge_request_id"], name: "index_draft_notes_on_merge_request_id", using: :btree
+ end
+
+ create_table "elasticsearch_indexed_namespaces", id: false, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "namespace_id"
+ t.index ["namespace_id"], name: "index_elasticsearch_indexed_namespaces_on_namespace_id", unique: true, using: :btree
+ end
+
+ create_table "elasticsearch_indexed_projects", id: false, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id"
+ t.index ["project_id"], name: "index_elasticsearch_indexed_projects_on_project_id", unique: true, using: :btree
+ end
+
create_table "emails", id: :serial, force: :cascade do |t|
t.integer "user_id", null: false
t.string "email", null: false
@@ -915,10 +1148,67 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.string "environment_type"
t.string "state", default: "available", null: false
t.string "slug", null: false
+ t.index ["name"], name: "index_environments_on_name_varchar_pattern_ops", using: :btree, opclasses: {"name"=>"varchar_pattern_ops"}
t.index ["project_id", "name"], name: "index_environments_on_project_id_and_name", unique: true, using: :btree
t.index ["project_id", "slug"], name: "index_environments_on_project_id_and_slug", unique: true, using: :btree
end
+ create_table "epic_issues", id: :serial, force: :cascade do |t|
+ t.integer "epic_id", null: false
+ t.integer "issue_id", null: false
+ t.integer "relative_position", default: 1073741823, null: false
+ t.index ["epic_id"], name: "index_epic_issues_on_epic_id", using: :btree
+ t.index ["issue_id"], name: "index_epic_issues_on_issue_id", unique: true, using: :btree
+ end
+
+ create_table "epic_metrics", id: :serial, force: :cascade do |t|
+ t.integer "epic_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["epic_id"], name: "index_epic_metrics", using: :btree
+ end
+
+ create_table "epics", id: :serial, force: :cascade do |t|
+ t.integer "milestone_id"
+ t.integer "group_id", null: false
+ t.integer "author_id", null: false
+ t.integer "assignee_id"
+ t.integer "iid", null: false
+ t.integer "cached_markdown_version"
+ t.integer "updated_by_id"
+ t.integer "last_edited_by_id"
+ t.integer "lock_version"
+ t.date "start_date"
+ t.date "end_date"
+ t.datetime "last_edited_at"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.string "title", null: false
+ t.string "title_html", null: false
+ t.text "description"
+ t.text "description_html"
+ t.integer "start_date_sourcing_milestone_id"
+ t.integer "due_date_sourcing_milestone_id"
+ t.date "start_date_fixed"
+ t.date "due_date_fixed"
+ t.boolean "start_date_is_fixed"
+ t.boolean "due_date_is_fixed"
+ t.integer "state", limit: 2, default: 1, null: false
+ t.integer "closed_by_id"
+ t.datetime "closed_at"
+ t.integer "parent_id"
+ t.integer "relative_position"
+ t.index ["assignee_id"], name: "index_epics_on_assignee_id", using: :btree
+ t.index ["author_id"], name: "index_epics_on_author_id", using: :btree
+ t.index ["closed_by_id"], name: "index_epics_on_closed_by_id", using: :btree
+ t.index ["end_date"], name: "index_epics_on_end_date", using: :btree
+ t.index ["group_id"], name: "index_epics_on_group_id", using: :btree
+ t.index ["iid"], name: "index_epics_on_iid", using: :btree
+ t.index ["milestone_id"], name: "index_milestone", using: :btree
+ t.index ["parent_id"], name: "index_epics_on_parent_id", using: :btree
+ t.index ["start_date"], name: "index_epics_on_start_date", using: :btree
+ end
+
create_table "events", id: :serial, force: :cascade do |t|
t.integer "project_id"
t.integer "author_id", null: false
@@ -973,6 +1263,227 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree
end
+ create_table "geo_cache_invalidation_events", force: :cascade do |t|
+ t.string "key", null: false
+ end
+
+ create_table "geo_event_log", force: :cascade do |t|
+ t.datetime "created_at", null: false
+ t.bigint "repository_updated_event_id"
+ t.bigint "repository_deleted_event_id"
+ t.bigint "repository_renamed_event_id"
+ t.bigint "repositories_changed_event_id"
+ t.bigint "repository_created_event_id"
+ t.bigint "hashed_storage_migrated_event_id"
+ t.bigint "lfs_object_deleted_event_id"
+ t.bigint "hashed_storage_attachments_event_id"
+ t.bigint "upload_deleted_event_id"
+ t.bigint "job_artifact_deleted_event_id"
+ t.bigint "reset_checksum_event_id"
+ t.bigint "cache_invalidation_event_id"
+ t.index ["cache_invalidation_event_id"], name: "index_geo_event_log_on_cache_invalidation_event_id", where: "(cache_invalidation_event_id IS NOT NULL)", using: :btree
+ t.index ["hashed_storage_attachments_event_id"], name: "index_geo_event_log_on_hashed_storage_attachments_event_id", where: "(hashed_storage_attachments_event_id IS NOT NULL)", using: :btree
+ t.index ["hashed_storage_migrated_event_id"], name: "index_geo_event_log_on_hashed_storage_migrated_event_id", where: "(hashed_storage_migrated_event_id IS NOT NULL)", using: :btree
+ t.index ["job_artifact_deleted_event_id"], name: "index_geo_event_log_on_job_artifact_deleted_event_id", where: "(job_artifact_deleted_event_id IS NOT NULL)", using: :btree
+ t.index ["lfs_object_deleted_event_id"], name: "index_geo_event_log_on_lfs_object_deleted_event_id", where: "(lfs_object_deleted_event_id IS NOT NULL)", using: :btree
+ t.index ["repositories_changed_event_id"], name: "index_geo_event_log_on_repositories_changed_event_id", where: "(repositories_changed_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_created_event_id"], name: "index_geo_event_log_on_repository_created_event_id", where: "(repository_created_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_deleted_event_id"], name: "index_geo_event_log_on_repository_deleted_event_id", where: "(repository_deleted_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_renamed_event_id"], name: "index_geo_event_log_on_repository_renamed_event_id", where: "(repository_renamed_event_id IS NOT NULL)", using: :btree
+ t.index ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", where: "(repository_updated_event_id IS NOT NULL)", using: :btree
+ t.index ["reset_checksum_event_id"], name: "index_geo_event_log_on_reset_checksum_event_id", where: "(reset_checksum_event_id IS NOT NULL)", using: :btree
+ t.index ["upload_deleted_event_id"], name: "index_geo_event_log_on_upload_deleted_event_id", where: "(upload_deleted_event_id IS NOT NULL)", using: :btree
+ end
+
+ create_table "geo_hashed_storage_attachments_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.text "old_attachments_path", null: false
+ t.text "new_attachments_path", null: false
+ t.index ["project_id"], name: "index_geo_hashed_storage_attachments_events_on_project_id", using: :btree
+ end
+
+ create_table "geo_hashed_storage_migrated_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "old_disk_path", null: false
+ t.text "new_disk_path", null: false
+ t.text "old_wiki_disk_path", null: false
+ t.text "new_wiki_disk_path", null: false
+ t.integer "old_storage_version", limit: 2
+ t.integer "new_storage_version", limit: 2, null: false
+ t.index ["project_id"], name: "index_geo_hashed_storage_migrated_events_on_project_id", using: :btree
+ end
+
+ create_table "geo_job_artifact_deleted_events", force: :cascade do |t|
+ t.integer "job_artifact_id", null: false
+ t.string "file_path", null: false
+ t.index ["job_artifact_id"], name: "index_geo_job_artifact_deleted_events_on_job_artifact_id", using: :btree
+ end
+
+ create_table "geo_lfs_object_deleted_events", force: :cascade do |t|
+ t.integer "lfs_object_id", null: false
+ t.string "oid", null: false
+ t.string "file_path", null: false
+ t.index ["lfs_object_id"], name: "index_geo_lfs_object_deleted_events_on_lfs_object_id", using: :btree
+ end
+
+ create_table "geo_node_namespace_links", id: :serial, force: :cascade do |t|
+ t.integer "geo_node_id", null: false
+ t.integer "namespace_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["geo_node_id", "namespace_id"], name: "index_geo_node_namespace_links_on_geo_node_id_and_namespace_id", unique: true, using: :btree
+ t.index ["geo_node_id"], name: "index_geo_node_namespace_links_on_geo_node_id", using: :btree
+ t.index ["namespace_id"], name: "index_geo_node_namespace_links_on_namespace_id", using: :btree
+ end
+
+ create_table "geo_node_statuses", id: :serial, force: :cascade do |t|
+ t.integer "geo_node_id", null: false
+ t.integer "db_replication_lag_seconds"
+ t.integer "repositories_synced_count"
+ t.integer "repositories_failed_count"
+ t.integer "lfs_objects_count"
+ t.integer "lfs_objects_synced_count"
+ t.integer "lfs_objects_failed_count"
+ t.integer "attachments_count"
+ t.integer "attachments_synced_count"
+ t.integer "attachments_failed_count"
+ t.integer "last_event_id"
+ t.datetime "last_event_date"
+ t.integer "cursor_last_event_id"
+ t.datetime "cursor_last_event_date"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.datetime "last_successful_status_check_at"
+ t.string "status_message"
+ t.integer "replication_slots_count"
+ t.integer "replication_slots_used_count"
+ t.bigint "replication_slots_max_retained_wal_bytes"
+ t.integer "wikis_synced_count"
+ t.integer "wikis_failed_count"
+ t.integer "job_artifacts_count"
+ t.integer "job_artifacts_synced_count"
+ t.integer "job_artifacts_failed_count"
+ t.string "version"
+ t.string "revision"
+ t.integer "repositories_verified_count"
+ t.integer "repositories_verification_failed_count"
+ t.integer "wikis_verified_count"
+ t.integer "wikis_verification_failed_count"
+ t.integer "lfs_objects_synced_missing_on_primary_count"
+ t.integer "job_artifacts_synced_missing_on_primary_count"
+ t.integer "attachments_synced_missing_on_primary_count"
+ t.integer "repositories_checksummed_count"
+ t.integer "repositories_checksum_failed_count"
+ t.integer "repositories_checksum_mismatch_count"
+ t.integer "wikis_checksummed_count"
+ t.integer "wikis_checksum_failed_count"
+ t.integer "wikis_checksum_mismatch_count"
+ t.binary "storage_configuration_digest"
+ t.integer "repositories_retrying_verification_count"
+ t.integer "wikis_retrying_verification_count"
+ t.integer "projects_count"
+ t.index ["geo_node_id"], name: "index_geo_node_statuses_on_geo_node_id", unique: true, using: :btree
+ end
+
+ create_table "geo_nodes", id: :serial, force: :cascade do |t|
+ t.boolean "primary"
+ t.integer "oauth_application_id"
+ t.boolean "enabled", default: true, null: false
+ t.string "access_key"
+ t.string "encrypted_secret_access_key"
+ t.string "encrypted_secret_access_key_iv"
+ t.string "clone_url_prefix"
+ t.integer "files_max_capacity", default: 10, null: false
+ t.integer "repos_max_capacity", default: 25, null: false
+ t.string "url", null: false
+ t.string "selective_sync_type"
+ t.text "selective_sync_shards"
+ t.integer "verification_max_capacity", default: 100, null: false
+ t.integer "minimum_reverification_interval", default: 7, null: false
+ t.string "internal_url"
+ t.string "name", null: false
+ t.index ["access_key"], name: "index_geo_nodes_on_access_key", using: :btree
+ t.index ["name"], name: "index_geo_nodes_on_name", unique: true, using: :btree
+ t.index ["primary"], name: "index_geo_nodes_on_primary", using: :btree
+ end
+
+ create_table "geo_repositories_changed_events", force: :cascade do |t|
+ t.integer "geo_node_id", null: false
+ t.index ["geo_node_id"], name: "index_geo_repositories_changed_events_on_geo_node_id", using: :btree
+ end
+
+ create_table "geo_repository_created_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "repo_path", null: false
+ t.text "wiki_path"
+ t.text "project_name", null: false
+ t.index ["project_id"], name: "index_geo_repository_created_events_on_project_id", using: :btree
+ end
+
+ create_table "geo_repository_deleted_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "deleted_path", null: false
+ t.text "deleted_wiki_path"
+ t.text "deleted_project_name", null: false
+ t.index ["project_id"], name: "index_geo_repository_deleted_events_on_project_id", using: :btree
+ end
+
+ create_table "geo_repository_renamed_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.text "repository_storage_name", null: false
+ t.text "old_path_with_namespace", null: false
+ t.text "new_path_with_namespace", null: false
+ t.text "old_wiki_path_with_namespace", null: false
+ t.text "new_wiki_path_with_namespace", null: false
+ t.text "old_path", null: false
+ t.text "new_path", null: false
+ t.index ["project_id"], name: "index_geo_repository_renamed_events_on_project_id", using: :btree
+ end
+
+ create_table "geo_repository_updated_events", force: :cascade do |t|
+ t.integer "branches_affected", null: false
+ t.integer "tags_affected", null: false
+ t.integer "project_id", null: false
+ t.integer "source", limit: 2, null: false
+ t.boolean "new_branch", default: false, null: false
+ t.boolean "remove_branch", default: false, null: false
+ t.text "ref"
+ t.index ["project_id"], name: "index_geo_repository_updated_events_on_project_id", using: :btree
+ t.index ["source"], name: "index_geo_repository_updated_events_on_source", using: :btree
+ end
+
+ create_table "geo_reset_checksum_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.index ["project_id"], name: "index_geo_reset_checksum_events_on_project_id", using: :btree
+ end
+
+ create_table "geo_upload_deleted_events", force: :cascade do |t|
+ t.integer "upload_id", null: false
+ t.string "file_path", null: false
+ t.integer "model_id", null: false
+ t.string "model_type", null: false
+ t.string "uploader", null: false
+ t.index ["upload_id"], name: "index_geo_upload_deleted_events_on_upload_id", using: :btree
+ end
+
+ create_table "gitlab_subscriptions", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.date "start_date"
+ t.date "end_date"
+ t.date "trial_ends_on"
+ t.integer "namespace_id"
+ t.integer "hosted_plan_id"
+ t.integer "max_seats_used", default: 0
+ t.integer "seats", default: 0
+ t.boolean "trial", default: false
+ t.index ["hosted_plan_id"], name: "index_gitlab_subscriptions_on_hosted_plan_id", using: :btree
+ t.index ["namespace_id"], name: "index_gitlab_subscriptions_on_namespace_id", unique: true, using: :btree
+ end
+
create_table "gpg_key_subkeys", id: :serial, force: :cascade do |t|
t.integer "gpg_key_id", null: false
t.binary "keyid"
@@ -1022,12 +1533,22 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["key", "value"], name: "index_group_custom_attributes_on_key_and_value", using: :btree
end
+ create_table "historical_data", id: :serial, force: :cascade do |t|
+ t.date "date", null: false
+ t.integer "active_user_count"
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ end
+
create_table "identities", id: :serial, force: :cascade do |t|
t.string "extern_uid"
t.string "provider"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
+ t.integer "saml_provider_id"
+ t.string "secondary_extern_uid"
+ t.index ["saml_provider_id"], name: "index_identities_on_saml_provider_id", where: "(saml_provider_id IS NOT NULL)", using: :btree
t.index ["user_id"], name: "index_identities_on_user_id", using: :btree
end
@@ -1040,6 +1561,23 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["updated_at"], name: "index_import_export_uploads_on_updated_at", using: :btree
end
+ create_table "index_statuses", id: :serial, force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.datetime "indexed_at"
+ t.text "note"
+ t.string "last_commit"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["project_id"], name: "index_index_statuses_on_project_id", unique: true, using: :btree
+ end
+
+ create_table "insights", id: :serial, force: :cascade do |t|
+ t.integer "namespace_id", null: false
+ t.integer "project_id", null: false
+ t.index ["namespace_id"], name: "index_insights_on_namespace_id", using: :btree
+ t.index ["project_id"], name: "index_insights_on_project_id", using: :btree
+ end
+
create_table "internal_ids", force: :cascade do |t|
t.integer "project_id"
t.integer "usage", null: false
@@ -1064,6 +1602,16 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree
end
+ create_table "issue_links", id: :serial, force: :cascade do |t|
+ t.integer "source_id", null: false
+ t.integer "target_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.index ["source_id", "target_id"], name: "index_issue_links_on_source_id_and_target_id", unique: true, using: :btree
+ t.index ["source_id"], name: "index_issue_links_on_source_id", using: :btree
+ t.index ["target_id"], name: "index_issue_links_on_target_id", using: :btree
+ end
+
create_table "issue_metrics", id: :serial, force: :cascade do |t|
t.integer "issue_id", null: false
t.datetime "first_mentioned_in_commit_at"
@@ -1113,6 +1661,8 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.datetime_with_timezone "closed_at"
t.integer "closed_by_id"
t.integer "state_id", limit: 2
+ t.string "service_desk_reply_to"
+ t.integer "weight"
t.index ["author_id"], name: "index_issues_on_author_id", using: :btree
t.index ["closed_by_id"], name: "index_issues_on_closed_by_id", using: :btree
t.index ["confidential"], name: "index_issues_on_confidential", using: :btree
@@ -1130,6 +1680,24 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["updated_by_id"], name: "index_issues_on_updated_by_id", where: "(updated_by_id IS NOT NULL)", using: :btree
end
+ create_table "jira_connect_installations", force: :cascade do |t|
+ t.string "client_key"
+ t.string "encrypted_shared_secret"
+ t.string "encrypted_shared_secret_iv"
+ t.string "base_url"
+ t.index ["client_key"], name: "index_jira_connect_installations_on_client_key", unique: true, using: :btree
+ end
+
+ create_table "jira_connect_subscriptions", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.bigint "jira_connect_installation_id", null: false
+ t.integer "namespace_id", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.index ["jira_connect_installation_id", "namespace_id"], name: "idx_jira_connect_subscriptions_on_installation_id_namespace_id", unique: true, using: :btree
+ t.index ["jira_connect_installation_id"], name: "idx_jira_connect_subscriptions_on_installation_id", using: :btree
+ t.index ["namespace_id"], name: "index_jira_connect_subscriptions_on_namespace_id", using: :btree
+ end
+
create_table "jira_tracker_data", force: :cascade do |t|
t.integer "service_id", null: false
t.datetime_with_timezone "created_at", null: false
@@ -1200,6 +1768,16 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["type", "project_id"], name: "index_labels_on_type_and_project_id", using: :btree
end
+ create_table "ldap_group_links", id: :serial, force: :cascade do |t|
+ t.string "cn"
+ t.integer "group_access", null: false
+ t.integer "group_id", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.string "provider"
+ t.string "filter"
+ end
+
create_table "lfs_file_locks", id: :serial, force: :cascade do |t|
t.integer "project_id", null: false
t.integer "user_id", null: false
@@ -1230,6 +1808,12 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["project_id"], name: "index_lfs_objects_projects_on_project_id", using: :btree
end
+ create_table "licenses", id: :serial, force: :cascade do |t|
+ t.text "data", null: false
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ end
+
create_table "lists", id: :serial, force: :cascade do |t|
t.integer "board_id", null: false
t.integer "label_id"
@@ -1237,9 +1821,13 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.integer "position"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.integer "milestone_id"
+ t.integer "user_id"
t.index ["board_id", "label_id"], name: "index_lists_on_board_id_and_label_id", unique: true, using: :btree
t.index ["label_id"], name: "index_lists_on_label_id", using: :btree
t.index ["list_type"], name: "index_lists_on_list_type", using: :btree
+ t.index ["milestone_id"], name: "index_lists_on_milestone_id", using: :btree
+ t.index ["user_id"], name: "index_lists_on_user_id", using: :btree
end
create_table "members", id: :serial, force: :cascade do |t|
@@ -1257,6 +1845,8 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.datetime "invite_accepted_at"
t.datetime "requested_at"
t.date "expires_at"
+ t.boolean "ldap", default: false, null: false
+ t.boolean "override", default: false, null: false
t.index ["access_level"], name: "index_members_on_access_level", using: :btree
t.index ["invite_token"], name: "index_members_on_invite_token", unique: true, using: :btree
t.index ["requested_at"], name: "index_members_on_requested_at", using: :btree
@@ -1389,6 +1979,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.boolean "squash", default: false, null: false
t.boolean "allow_maintainer_to_push"
t.integer "state_id", limit: 2
+ t.integer "approvals_before_merge"
t.index ["assignee_id"], name: "index_merge_requests_on_assignee_id", using: :btree
t.index ["author_id"], name: "index_merge_requests_on_author_id", using: :btree
t.index ["created_at"], name: "index_merge_requests_on_created_at", using: :btree
@@ -1456,6 +2047,13 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["title"], name: "index_milestones_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
end
+ create_table "namespace_statistics", id: :serial, force: :cascade do |t|
+ t.integer "namespace_id", null: false
+ t.integer "shared_runners_seconds", default: 0, null: false
+ t.datetime "shared_runners_seconds_last_reset"
+ t.index ["namespace_id"], name: "index_namespace_statistics_on_namespace_id", unique: true, using: :btree
+ end
+
create_table "namespaces", id: :serial, force: :cascade do |t|
t.string "name", null: false
t.string "path", null: false
@@ -1479,16 +2077,37 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.integer "project_creation_level"
t.boolean "auto_devops_enabled"
t.datetime_with_timezone "last_ci_minutes_notification_at"
+ t.integer "custom_project_templates_group_id"
+ t.integer "file_template_project_id"
+ t.string "ldap_sync_error"
+ t.datetime "ldap_sync_last_successful_update_at"
+ t.datetime "ldap_sync_last_sync_at"
+ t.datetime "ldap_sync_last_update_at"
+ t.integer "plan_id"
+ t.bigint "repository_size_limit"
+ t.string "saml_discovery_token"
+ t.integer "shared_runners_minutes_limit"
+ t.datetime_with_timezone "trial_ends_on"
+ t.integer "extra_shared_runners_minutes_limit"
+ t.string "ldap_sync_status", default: "ready", null: false
+ t.boolean "membership_lock", default: false
t.index ["created_at"], name: "index_namespaces_on_created_at", using: :btree
+ t.index ["custom_project_templates_group_id", "type"], name: "index_namespaces_on_custom_project_templates_group_id_and_type", where: "(custom_project_templates_group_id IS NOT NULL)", using: :btree
+ t.index ["file_template_project_id"], name: "index_namespaces_on_file_template_project_id", using: :btree
+ t.index ["ldap_sync_last_successful_update_at"], name: "index_namespaces_on_ldap_sync_last_successful_update_at", using: :btree
+ t.index ["ldap_sync_last_update_at"], name: "index_namespaces_on_ldap_sync_last_update_at", using: :btree
t.index ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree
t.index ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
t.index ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree
t.index ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree
t.index ["path"], name: "index_namespaces_on_path", using: :btree
t.index ["path"], name: "index_namespaces_on_path_trigram", using: :gin, opclasses: {"path"=>"gin_trgm_ops"}
+ t.index ["plan_id"], name: "index_namespaces_on_plan_id", using: :btree
t.index ["require_two_factor_authentication"], name: "index_namespaces_on_require_two_factor_authentication", using: :btree
t.index ["runners_token"], name: "index_namespaces_on_runners_token", unique: true, using: :btree
t.index ["runners_token_encrypted"], name: "index_namespaces_on_runners_token_encrypted", unique: true, using: :btree
+ t.index ["shared_runners_minutes_limit", "extra_shared_runners_minutes_limit"], name: "index_namespaces_on_shared_and_extra_runners_minutes_limit", using: :btree
+ t.index ["trial_ends_on"], name: "index_namespaces_on_trial_ends_on", where: "(trial_ends_on IS NOT NULL)", using: :btree
t.index ["type"], name: "index_namespaces_on_type", using: :btree
end
@@ -1529,6 +2148,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.integer "cached_markdown_version"
t.text "change_position"
t.boolean "resolved_by_push"
+ t.bigint "review_id"
t.index ["author_id"], name: "index_notes_on_author_id", using: :btree
t.index ["commit_id"], name: "index_notes_on_commit_id", using: :btree
t.index ["created_at"], name: "index_notes_on_created_at", using: :btree
@@ -1538,6 +2158,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["noteable_id", "noteable_type"], name: "index_notes_on_noteable_id_and_noteable_type", using: :btree
t.index ["noteable_type"], name: "index_notes_on_noteable_type", using: :btree
t.index ["project_id", "noteable_type"], name: "index_notes_on_project_id_and_noteable_type", using: :btree
+ t.index ["review_id"], name: "index_notes_on_review_id", using: :btree
end
create_table "notification_settings", id: :serial, force: :cascade do |t|
@@ -1562,6 +2183,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.boolean "push_to_merge_request"
t.boolean "issue_due"
t.string "notification_email"
+ t.boolean "new_epic"
t.index ["source_id", "source_type"], name: "index_notification_settings_on_source_id_and_source_type", using: :btree
t.index ["user_id", "source_id", "source_type"], name: "index_notifications_on_user_id_and_source_id_and_source_type", unique: true, using: :btree
t.index ["user_id"], name: "index_notification_settings_on_user_id", using: :btree
@@ -1614,6 +2236,66 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["access_grant_id"], name: "index_oauth_openid_requests_on_access_grant_id", using: :btree
end
+ create_table "operations_feature_flag_scopes", force: :cascade do |t|
+ t.bigint "feature_flag_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.boolean "active", null: false
+ t.string "environment_scope", default: "*", null: false
+ t.index ["feature_flag_id", "environment_scope"], name: "index_feature_flag_scopes_on_flag_id_and_environment_scope", unique: true, using: :btree
+ end
+
+ create_table "operations_feature_flags", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.boolean "active", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "name", null: false
+ t.text "description"
+ t.index ["project_id", "name"], name: "index_operations_feature_flags_on_project_id_and_name", unique: true, using: :btree
+ end
+
+ create_table "operations_feature_flags_clients", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.string "token", null: false
+ t.index ["project_id", "token"], name: "index_operations_feature_flags_clients_on_project_id_and_token", unique: true, using: :btree
+ end
+
+ create_table "packages_maven_metadata", force: :cascade do |t|
+ t.bigint "package_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "app_group", null: false
+ t.string "app_name", null: false
+ t.string "app_version"
+ t.string "path", limit: 512, null: false
+ t.index ["package_id", "path"], name: "index_packages_maven_metadata_on_package_id_and_path", using: :btree
+ end
+
+ create_table "packages_package_files", force: :cascade do |t|
+ t.bigint "package_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.bigint "size"
+ t.integer "file_type"
+ t.integer "file_store"
+ t.binary "file_md5"
+ t.binary "file_sha1"
+ t.string "file_name", null: false
+ t.text "file", null: false
+ t.index ["package_id", "file_name"], name: "index_packages_package_files_on_package_id_and_file_name", using: :btree
+ end
+
+ create_table "packages_packages", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "name", null: false
+ t.string "version"
+ t.integer "package_type", limit: 2, null: false
+ t.index ["project_id"], name: "index_packages_packages_on_project_id", using: :btree
+ end
+
create_table "pages_domain_acme_orders", force: :cascade do |t|
t.integer "pages_domain_id", null: false
t.datetime_with_timezone "expires_at", null: false
@@ -1650,6 +2332,17 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["verified_at"], name: "index_pages_domains_on_verified_at", using: :btree
end
+ create_table "path_locks", id: :serial, force: :cascade do |t|
+ t.string "path", null: false
+ t.integer "project_id"
+ t.integer "user_id"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["path"], name: "index_path_locks_on_path", using: :btree
+ t.index ["project_id"], name: "index_path_locks_on_project_id", using: :btree
+ t.index ["user_id"], name: "index_path_locks_on_user_id", using: :btree
+ end
+
create_table "personal_access_tokens", id: :serial, force: :cascade do |t|
t.integer "user_id", null: false
t.string "name", null: false
@@ -1664,6 +2357,16 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["user_id"], name: "index_personal_access_tokens_on_user_id", using: :btree
end
+ create_table "plans", id: :serial, force: :cascade do |t|
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.string "name"
+ t.string "title"
+ t.integer "active_pipelines_limit"
+ t.integer "pipeline_size_limit"
+ t.index ["name"], name: "index_plans_on_name", using: :btree
+ end
+
create_table "pool_repositories", force: :cascade do |t|
t.integer "shard_id", null: false
t.string "disk_path"
@@ -1681,6 +2384,11 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["name"], name: "index_programming_languages_on_name", unique: true, using: :btree
end
+ create_table "project_alerting_settings", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
+ t.string "encrypted_token", null: false
+ t.string "encrypted_token_iv", null: false
+ end
+
create_table "project_authorizations", id: false, force: :cascade do |t|
t.integer "user_id", null: false
t.integer "project_id", null: false
@@ -1741,6 +2449,14 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.string "organization_name"
end
+ create_table "project_feature_usages", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
+ t.datetime "jira_dvcs_cloud_last_sync_at"
+ t.datetime "jira_dvcs_server_last_sync_at"
+ t.index ["jira_dvcs_cloud_last_sync_at", "project_id"], name: "idx_proj_feat_usg_on_jira_dvcs_cloud_last_sync_at_and_proj_id", where: "(jira_dvcs_cloud_last_sync_at IS NOT NULL)", using: :btree
+ t.index ["jira_dvcs_server_last_sync_at", "project_id"], name: "idx_proj_feat_usg_on_jira_dvcs_server_last_sync_at_and_proj_id", where: "(jira_dvcs_server_last_sync_at IS NOT NULL)", using: :btree
+ t.index ["project_id"], name: "index_project_feature_usages_on_project_id", using: :btree
+ end
+
create_table "project_features", id: :serial, force: :cascade do |t|
t.integer "project_id", null: false
t.integer "merge_requests_access_level"
@@ -1775,6 +2491,12 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
end
+ create_table "project_incident_management_settings", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
+ t.boolean "create_issue", default: false, null: false
+ t.boolean "send_email", default: true, null: false
+ t.text "issue_template_key"
+ end
+
create_table "project_metrics_settings", primary_key: "project_id", id: :integer, default: nil, force: :cascade do |t|
t.string "external_dashboard_url", null: false
end
@@ -1784,7 +2506,16 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.string "status"
t.string "jid"
t.text "last_error"
+ t.datetime_with_timezone "last_successful_update_at"
+ t.datetime_with_timezone "last_update_at"
+ t.datetime "last_update_scheduled_at"
+ t.datetime "last_update_started_at"
+ t.datetime "next_execution_timestamp"
+ t.integer "retry_count", default: 0, null: false
t.index ["jid"], name: "index_project_mirror_data_on_jid", using: :btree
+ t.index ["last_successful_update_at"], name: "index_project_mirror_data_on_last_successful_update_at", using: :btree
+ t.index ["last_update_at", "retry_count"], name: "index_project_mirror_data_on_last_update_at_and_retry_count", using: :btree
+ t.index ["next_execution_timestamp", "retry_count"], name: "index_mirror_data_on_next_execution_and_retry_count", using: :btree
t.index ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree
t.index ["status"], name: "index_project_mirror_data_on_status", using: :btree
end
@@ -1798,6 +2529,26 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["shard_id"], name: "index_project_repositories_on_shard_id", using: :btree
end
+ create_table "project_repository_states", id: :serial, force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.binary "repository_verification_checksum"
+ t.binary "wiki_verification_checksum"
+ t.string "last_repository_verification_failure"
+ t.string "last_wiki_verification_failure"
+ t.datetime_with_timezone "repository_retry_at"
+ t.datetime_with_timezone "wiki_retry_at"
+ t.integer "repository_retry_count"
+ t.integer "wiki_retry_count"
+ t.datetime_with_timezone "last_repository_verification_ran_at"
+ t.datetime_with_timezone "last_wiki_verification_ran_at"
+ t.index ["last_repository_verification_failure"], name: "idx_repository_states_on_repository_failure_partial", where: "(last_repository_verification_failure IS NOT NULL)", using: :btree
+ t.index ["last_wiki_verification_failure"], name: "idx_repository_states_on_wiki_failure_partial", where: "(last_wiki_verification_failure IS NOT NULL)", using: :btree
+ t.index ["project_id", "last_repository_verification_ran_at"], name: "idx_repository_states_on_last_repository_verification_ran_at", where: "((repository_verification_checksum IS NOT NULL) AND (last_repository_verification_failure IS NULL))", using: :btree
+ t.index ["project_id", "last_wiki_verification_ran_at"], name: "idx_repository_states_on_last_wiki_verification_ran_at", where: "((wiki_verification_checksum IS NOT NULL) AND (last_wiki_verification_failure IS NULL))", using: :btree
+ t.index ["project_id"], name: "idx_repository_states_outdated_checksums", where: "(((repository_verification_checksum IS NULL) AND (last_repository_verification_failure IS NULL)) OR ((wiki_verification_checksum IS NULL) AND (last_wiki_verification_failure IS NULL)))", using: :btree
+ t.index ["project_id"], name: "index_project_repository_states_on_project_id", unique: true, using: :btree
+ end
+
create_table "project_statistics", id: :serial, force: :cascade do |t|
t.integer "project_id", null: false
t.integer "namespace_id", null: false
@@ -1808,10 +2559,20 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.bigint "build_artifacts_size", default: 0, null: false
t.bigint "packages_size", default: 0, null: false
t.bigint "wiki_size"
+ t.bigint "shared_runners_seconds", default: 0, null: false
+ t.datetime "shared_runners_seconds_last_reset"
t.index ["namespace_id"], name: "index_project_statistics_on_namespace_id", using: :btree
t.index ["project_id"], name: "index_project_statistics_on_project_id", unique: true, using: :btree
end
+ create_table "project_tracing_settings", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.string "external_url", null: false
+ t.index ["project_id"], name: "index_project_tracing_settings_on_project_id", unique: true, using: :btree
+ end
+
create_table "projects", id: :serial, force: :cascade do |t|
t.string "name"
t.string "path"
@@ -1859,7 +2620,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.boolean "resolve_outdated_diff_discussions"
t.boolean "repository_read_only"
t.boolean "merge_requests_ff_only_enabled", default: false
- t.boolean "merge_requests_rebase_enabled", default: false, null: false
+ t.boolean "merge_requests_rebase_enabled", default: false
t.integer "jobs_cache_index"
t.boolean "pages_https_only", default: true
t.boolean "remote_mirror_available_overridden"
@@ -1868,14 +2629,40 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.string "bfg_object_map"
t.boolean "detected_repository_languages"
t.string "external_authorization_classification_label"
+ t.boolean "disable_overriding_approvers_per_merge_request"
+ t.string "external_webhook_token"
+ t.text "issues_template"
+ t.boolean "merge_requests_author_approval"
+ t.boolean "merge_requests_disable_committers_approval"
+ t.boolean "merge_requests_require_code_owner_approval"
+ t.text "merge_requests_template"
+ t.datetime "mirror_last_successful_update_at"
+ t.datetime "mirror_last_update_at"
+ t.boolean "mirror_overwrites_diverged_branches"
+ t.integer "mirror_user_id"
+ t.boolean "only_mirror_protected_branches"
+ t.boolean "packages_enabled"
+ t.boolean "pull_mirror_available_overridden"
+ t.bigint "repository_size_limit"
+ t.boolean "require_password_to_approve"
+ t.boolean "mirror", default: false, null: false
+ t.boolean "mirror_trigger_builds", default: false, null: false
+ t.boolean "reset_approvals_on_push", default: true
+ t.boolean "service_desk_enabled", default: true
+ t.integer "approvals_before_merge", default: 0, null: false
+ t.index ["archived", "pending_delete", "merge_requests_require_code_owner_approval"], name: "projects_requiring_code_owner_approval", where: "((pending_delete = false) AND (archived = false) AND (merge_requests_require_code_owner_approval = true))", using: :btree
t.index ["created_at"], name: "index_projects_on_created_at", using: :btree
t.index ["creator_id"], name: "index_projects_on_creator_id", using: :btree
t.index ["description"], name: "index_projects_on_description_trigram", using: :gin, opclasses: {"description"=>"gin_trgm_ops"}
+ t.index ["id", "repository_storage", "last_repository_updated_at"], name: "idx_projects_on_repository_storage_last_repository_updated_at", using: :btree
t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))", using: :btree
+ t.index ["id"], name: "index_projects_on_mirror_and_mirror_trigger_builds_both_true", where: "((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))", using: :btree
t.index ["last_activity_at"], name: "index_projects_on_last_activity_at", using: :btree
t.index ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)", using: :btree
t.index ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed", using: :btree
t.index ["last_repository_updated_at"], name: "index_projects_on_last_repository_updated_at", using: :btree
+ t.index ["mirror_last_successful_update_at"], name: "index_projects_on_mirror_last_successful_update_at", using: :btree
+ t.index ["mirror_user_id"], name: "index_projects_on_mirror_user_id", using: :btree
t.index ["name"], name: "index_projects_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
t.index ["namespace_id"], name: "index_projects_on_namespace_id", using: :btree
t.index ["path"], name: "index_projects_on_path", using: :btree
@@ -1890,6 +2677,30 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree
end
+ create_table "prometheus_alert_events", force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.integer "prometheus_alert_id", null: false
+ t.datetime_with_timezone "started_at", null: false
+ t.datetime_with_timezone "ended_at"
+ t.integer "status", limit: 2
+ t.string "payload_key"
+ t.index ["project_id", "status"], name: "index_prometheus_alert_events_on_project_id_and_status", using: :btree
+ t.index ["prometheus_alert_id", "payload_key"], name: "index_prometheus_alert_event_scoped_payload_key", unique: true, using: :btree
+ end
+
+ create_table "prometheus_alerts", id: :serial, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.float "threshold", null: false
+ t.integer "operator", null: false
+ t.integer "environment_id", null: false
+ t.integer "project_id", null: false
+ t.integer "prometheus_metric_id", null: false
+ t.index ["environment_id"], name: "index_prometheus_alerts_on_environment_id", using: :btree
+ t.index ["project_id", "prometheus_metric_id", "environment_id"], name: "index_prometheus_alerts_metric_environment", unique: true, using: :btree
+ t.index ["prometheus_metric_id"], name: "index_prometheus_alerts_on_prometheus_metric_id", using: :btree
+ end
+
create_table "prometheus_metrics", id: :serial, force: :cascade do |t|
t.integer "project_id"
t.string "title", null: false
@@ -1910,18 +2721,36 @@ ActiveRecord::Schema.define(version: 20190611161641) do
create_table "protected_branch_merge_access_levels", id: :serial, force: :cascade do |t|
t.integer "protected_branch_id", null: false
- t.integer "access_level", default: 40, null: false
+ t.integer "access_level", default: 40
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.integer "group_id"
+ t.integer "user_id"
+ t.index ["group_id"], name: "index_protected_branch_merge_access_levels_on_group_id", using: :btree
t.index ["protected_branch_id"], name: "index_protected_branch_merge_access", using: :btree
+ t.index ["user_id"], name: "index_protected_branch_merge_access_levels_on_user_id", using: :btree
end
create_table "protected_branch_push_access_levels", id: :serial, force: :cascade do |t|
t.integer "protected_branch_id", null: false
- t.integer "access_level", default: 40, null: false
+ t.integer "access_level", default: 40
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
+ t.integer "group_id"
+ t.integer "user_id"
+ t.index ["group_id"], name: "index_protected_branch_push_access_levels_on_group_id", using: :btree
t.index ["protected_branch_id"], name: "index_protected_branch_push_access", using: :btree
+ t.index ["user_id"], name: "index_protected_branch_push_access_levels_on_user_id", using: :btree
+ end
+
+ create_table "protected_branch_unprotect_access_levels", id: :serial, force: :cascade do |t|
+ t.integer "protected_branch_id", null: false
+ t.integer "access_level", default: 40
+ t.integer "user_id"
+ t.integer "group_id"
+ t.index ["group_id"], name: "index_protected_branch_unprotect_access_levels_on_group_id", using: :btree
+ t.index ["protected_branch_id"], name: "index_protected_branch_unprotect_access", using: :btree
+ t.index ["user_id"], name: "index_protected_branch_unprotect_access_levels_on_user_id", using: :btree
end
create_table "protected_branches", id: :serial, force: :cascade do |t|
@@ -1932,6 +2761,27 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["project_id"], name: "index_protected_branches_on_project_id", using: :btree
end
+ create_table "protected_environment_deploy_access_levels", id: :serial, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "access_level", default: 40
+ t.integer "protected_environment_id", null: false
+ t.integer "user_id"
+ t.integer "group_id"
+ t.index ["group_id"], name: "index_protected_environment_deploy_access_levels_on_group_id", using: :btree
+ t.index ["protected_environment_id"], name: "index_protected_environment_deploy_access", using: :btree
+ t.index ["user_id"], name: "index_protected_environment_deploy_access_levels_on_user_id", using: :btree
+ end
+
+ create_table "protected_environments", id: :serial, force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.string "name", null: false
+ t.index ["project_id", "name"], name: "index_protected_environments_on_project_id_and_name", unique: true, using: :btree
+ t.index ["project_id"], name: "index_protected_environments_on_project_id", using: :btree
+ end
+
create_table "protected_tag_create_access_levels", id: :serial, force: :cascade do |t|
t.integer "protected_tag_id", null: false
t.integer "access_level", default: 40
@@ -1965,6 +2815,29 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["event_id"], name: "index_push_event_payloads_on_event_id", unique: true, using: :btree
end
+ create_table "push_rules", id: :serial, force: :cascade do |t|
+ t.string "force_push_regex"
+ t.string "delete_branch_regex"
+ t.string "commit_message_regex"
+ t.boolean "deny_delete_tag"
+ t.integer "project_id"
+ t.datetime "created_at"
+ t.datetime "updated_at"
+ t.string "author_email_regex"
+ t.boolean "member_check", default: false, null: false
+ t.string "file_name_regex"
+ t.boolean "is_sample", default: false
+ t.integer "max_file_size", default: 0, null: false
+ t.boolean "prevent_secrets", default: false, null: false
+ t.string "branch_name_regex"
+ t.boolean "reject_unsigned_commits"
+ t.boolean "commit_committer_check"
+ t.boolean "regexp_uses_re2", default: true
+ t.string "commit_message_negative_regex"
+ t.index ["is_sample"], name: "index_push_rules_on_is_sample", where: "is_sample", using: :btree
+ t.index ["project_id"], name: "index_push_rules_on_project_id", using: :btree
+ end
+
create_table "redirect_routes", id: :serial, force: :cascade do |t|
t.integer "source_id", null: false
t.string "source_type", null: false
@@ -2039,12 +2912,24 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.integer "cached_markdown_version"
t.text "reference"
t.text "reference_html"
+ t.integer "epic_id"
+ t.index ["epic_id"], name: "index_resource_label_events_on_epic_id", using: :btree
t.index ["issue_id"], name: "index_resource_label_events_on_issue_id", using: :btree
t.index ["label_id"], name: "index_resource_label_events_on_label_id", using: :btree
t.index ["merge_request_id"], name: "index_resource_label_events_on_merge_request_id", using: :btree
t.index ["user_id"], name: "index_resource_label_events_on_user_id", using: :btree
end
+ create_table "reviews", force: :cascade do |t|
+ t.integer "author_id"
+ t.integer "merge_request_id", null: false
+ t.integer "project_id", null: false
+ t.datetime_with_timezone "created_at", null: false
+ t.index ["author_id"], name: "index_reviews_on_author_id", using: :btree
+ t.index ["merge_request_id"], name: "index_reviews_on_merge_request_id", using: :btree
+ t.index ["project_id"], name: "index_reviews_on_project_id", using: :btree
+ end
+
create_table "routes", id: :serial, force: :cascade do |t|
t.integer "source_id", null: false
t.string "source_type", null: false
@@ -2057,6 +2942,24 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["source_type", "source_id"], name: "index_routes_on_source_type_and_source_id", unique: true, using: :btree
end
+ create_table "saml_providers", id: :serial, force: :cascade do |t|
+ t.integer "group_id", null: false
+ t.boolean "enabled", null: false
+ t.string "certificate_fingerprint", null: false
+ t.string "sso_url", null: false
+ t.boolean "enforced_sso", default: false, null: false
+ t.boolean "enforced_group_managed_accounts", default: false, null: false
+ t.index ["group_id"], name: "index_saml_providers_on_group_id", using: :btree
+ end
+
+ create_table "scim_oauth_access_tokens", id: :serial, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "group_id", null: false
+ t.string "token_encrypted", null: false
+ t.index ["group_id", "token_encrypted"], name: "index_scim_oauth_access_tokens_on_group_id_and_token_encrypted", unique: true, using: :btree
+ end
+
create_table "sent_notifications", id: :serial, force: :cascade do |t|
t.integer "project_id"
t.integer "noteable_id"
@@ -2104,6 +3007,26 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["name"], name: "index_shards_on_name", unique: true, using: :btree
end
+ create_table "slack_integrations", id: :serial, force: :cascade do |t|
+ t.integer "service_id", null: false
+ t.string "team_id", null: false
+ t.string "team_name", null: false
+ t.string "alias", null: false
+ t.string "user_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["service_id"], name: "index_slack_integrations_on_service_id", using: :btree
+ t.index ["team_id", "alias"], name: "index_slack_integrations_on_team_id_and_alias", unique: true, using: :btree
+ end
+
+ create_table "smartcard_identities", force: :cascade do |t|
+ t.integer "user_id", null: false
+ t.string "subject", null: false
+ t.string "issuer", null: false
+ t.index ["subject", "issuer"], name: "index_smartcard_identities_on_subject_and_issuer", unique: true, using: :btree
+ t.index ["user_id"], name: "index_smartcard_identities_on_user_id", using: :btree
+ end
+
create_table "snippets", id: :serial, force: :cascade do |t|
t.string "title"
t.text "content"
@@ -2127,6 +3050,19 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["visibility_level"], name: "index_snippets_on_visibility_level", using: :btree
end
+ create_table "software_license_policies", id: :serial, force: :cascade do |t|
+ t.integer "project_id", null: false
+ t.integer "software_license_id", null: false
+ t.integer "approval_status", default: 0, null: false
+ t.index ["project_id", "software_license_id"], name: "index_software_license_policies_unique_per_project", unique: true, using: :btree
+ t.index ["software_license_id"], name: "index_software_license_policies_on_software_license_id", using: :btree
+ end
+
+ create_table "software_licenses", id: :serial, force: :cascade do |t|
+ t.string "name", null: false
+ t.index ["name"], name: "index_software_licenses_on_name", using: :btree
+ end
+
create_table "spam_logs", id: :serial, force: :cascade do |t|
t.integer "user_id"
t.string "source_ip"
@@ -2326,6 +3262,10 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.string "timezone"
t.boolean "time_display_relative"
t.boolean "time_format_in_24h"
+ t.integer "epic_notes_filter", limit: 2, default: 0, null: false
+ t.string "epics_sort"
+ t.integer "roadmap_epics_state"
+ t.string "roadmaps_sort"
t.index ["user_id"], name: "index_user_preferences_on_user_id", unique: true, using: :btree
end
@@ -2415,24 +3355,48 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.boolean "private_profile"
t.boolean "include_private_contributions"
t.string "commit_email"
+ t.boolean "auditor", default: false, null: false
+ t.datetime "admin_email_unsubscribed_at"
+ t.boolean "email_opted_in"
+ t.datetime "email_opted_in_at"
+ t.string "email_opted_in_ip"
+ t.integer "email_opted_in_source_id"
+ t.integer "group_view"
+ t.integer "managing_group_id"
+ t.text "note"
+ t.integer "roadmap_layout", limit: 2
+ t.integer "bot_type", limit: 2
t.index ["accepted_term_id"], name: "index_users_on_accepted_term_id", using: :btree
t.index ["admin"], name: "index_users_on_admin", using: :btree
+ t.index ["bot_type"], name: "index_users_on_bot_type", using: :btree
t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true, using: :btree
t.index ["created_at"], name: "index_users_on_created_at", using: :btree
t.index ["email"], name: "index_users_on_email", unique: true, using: :btree
t.index ["email"], name: "index_users_on_email_trigram", using: :gin, opclasses: {"email"=>"gin_trgm_ops"}
t.index ["feed_token"], name: "index_users_on_feed_token", using: :btree
t.index ["ghost"], name: "index_users_on_ghost", using: :btree
+ t.index ["group_view"], name: "index_users_on_group_view", using: :btree
t.index ["incoming_email_token"], name: "index_users_on_incoming_email_token", using: :btree
+ t.index ["managing_group_id"], name: "index_users_on_managing_group_id", using: :btree
t.index ["name"], name: "index_users_on_name", using: :btree
t.index ["name"], name: "index_users_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
t.index ["public_email"], name: "index_users_on_public_email", where: "((public_email)::text <> ''::text)", using: :btree
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree
t.index ["state"], name: "index_users_on_state", using: :btree
+ t.index ["state"], name: "index_users_on_state_and_internal", where: "((ghost <> true) AND (bot_type IS NULL))", using: :btree
t.index ["username"], name: "index_users_on_username", using: :btree
t.index ["username"], name: "index_users_on_username_trigram", using: :gin, opclasses: {"username"=>"gin_trgm_ops"}
end
+ create_table "users_ops_dashboard_projects", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "user_id", null: false
+ t.integer "project_id", null: false
+ t.index ["project_id"], name: "index_users_ops_dashboard_projects_on_project_id", using: :btree
+ t.index ["user_id", "project_id"], name: "index_users_ops_dashboard_projects_on_user_id_and_project_id", unique: true, using: :btree
+ end
+
create_table "users_star_projects", id: :serial, force: :cascade do |t|
t.integer "project_id", null: false
t.integer "user_id", null: false
@@ -2442,6 +3406,88 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.index ["user_id", "project_id"], name: "index_users_star_projects_on_user_id_and_project_id", unique: true, using: :btree
end
+ create_table "vulnerability_feedback", id: :serial, force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "feedback_type", limit: 2, null: false
+ t.integer "category", limit: 2, null: false
+ t.integer "project_id", null: false
+ t.integer "author_id", null: false
+ t.integer "pipeline_id"
+ t.integer "issue_id"
+ t.string "project_fingerprint", limit: 40, null: false
+ t.integer "merge_request_id"
+ t.integer "comment_author_id"
+ t.text "comment"
+ t.datetime_with_timezone "comment_timestamp"
+ t.index ["author_id"], name: "index_vulnerability_feedback_on_author_id", using: :btree
+ t.index ["comment_author_id"], name: "index_vulnerability_feedback_on_comment_author_id", using: :btree
+ t.index ["issue_id"], name: "index_vulnerability_feedback_on_issue_id", using: :btree
+ t.index ["merge_request_id"], name: "index_vulnerability_feedback_on_merge_request_id", using: :btree
+ t.index ["pipeline_id"], name: "index_vulnerability_feedback_on_pipeline_id", using: :btree
+ t.index ["project_id", "category", "feedback_type", "project_fingerprint"], name: "vulnerability_feedback_unique_idx", unique: true, using: :btree
+ end
+
+ create_table "vulnerability_identifiers", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.binary "fingerprint", null: false
+ t.string "external_type", null: false
+ t.string "external_id", null: false
+ t.string "name", null: false
+ t.text "url"
+ t.index ["project_id", "fingerprint"], name: "index_vulnerability_identifiers_on_project_id_and_fingerprint", unique: true, using: :btree
+ end
+
+ create_table "vulnerability_occurrence_identifiers", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.bigint "occurrence_id", null: false
+ t.bigint "identifier_id", null: false
+ t.index ["identifier_id"], name: "index_vulnerability_occurrence_identifiers_on_identifier_id", using: :btree
+ t.index ["occurrence_id", "identifier_id"], name: "index_vulnerability_occurrence_identifiers_on_unique_keys", unique: true, using: :btree
+ end
+
+ create_table "vulnerability_occurrence_pipelines", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.bigint "occurrence_id", null: false
+ t.integer "pipeline_id", null: false
+ t.index ["occurrence_id", "pipeline_id"], name: "vulnerability_occurrence_pipelines_on_unique_keys", unique: true, using: :btree
+ t.index ["pipeline_id"], name: "index_vulnerability_occurrence_pipelines_on_pipeline_id", using: :btree
+ end
+
+ create_table "vulnerability_occurrences", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "severity", limit: 2, null: false
+ t.integer "confidence", limit: 2, null: false
+ t.integer "report_type", limit: 2, null: false
+ t.integer "project_id", null: false
+ t.bigint "scanner_id", null: false
+ t.bigint "primary_identifier_id", null: false
+ t.binary "project_fingerprint", null: false
+ t.binary "location_fingerprint", null: false
+ t.string "uuid", limit: 36, null: false
+ t.string "name", null: false
+ t.string "metadata_version", null: false
+ t.text "raw_metadata", null: false
+ t.index ["primary_identifier_id"], name: "index_vulnerability_occurrences_on_primary_identifier_id", using: :btree
+ t.index ["project_id", "primary_identifier_id", "location_fingerprint", "scanner_id"], name: "index_vulnerability_occurrences_on_unique_keys", unique: true, using: :btree
+ t.index ["scanner_id"], name: "index_vulnerability_occurrences_on_scanner_id", using: :btree
+ t.index ["uuid"], name: "index_vulnerability_occurrences_on_uuid", unique: true, using: :btree
+ end
+
+ create_table "vulnerability_scanners", force: :cascade do |t|
+ t.datetime_with_timezone "created_at", null: false
+ t.datetime_with_timezone "updated_at", null: false
+ t.integer "project_id", null: false
+ t.string "external_id", null: false
+ t.string "name", null: false
+ t.index ["project_id", "external_id"], name: "index_vulnerability_scanners_on_project_id_and_external_id", unique: true, using: :btree
+ end
+
create_table "web_hook_logs", id: :serial, force: :cascade do |t|
t.integer "web_hook_id", null: false
t.string "trigger"
@@ -2482,16 +3528,39 @@ ActiveRecord::Schema.define(version: 20190611161641) do
t.string "encrypted_token_iv"
t.string "encrypted_url"
t.string "encrypted_url_iv"
+ t.integer "group_id"
t.index ["project_id"], name: "index_web_hooks_on_project_id", using: :btree
t.index ["type"], name: "index_web_hooks_on_type", using: :btree
end
+ add_foreign_key "application_settings", "namespaces", column: "custom_project_templates_group_id", on_delete: :nullify
+ add_foreign_key "application_settings", "projects", column: "file_template_project_id", name: "fk_ec757bd087", on_delete: :nullify
add_foreign_key "application_settings", "users", column: "usage_stats_set_by_user_id", name: "fk_964370041d", on_delete: :nullify
+ add_foreign_key "approval_merge_request_rule_sources", "approval_merge_request_rules", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rule_sources", "approval_project_rules", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules", "merge_requests", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules_approved_approvers", "approval_merge_request_rules", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules_approved_approvers", "users", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules_groups", "approval_merge_request_rules", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules_groups", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules_users", "approval_merge_request_rules", on_delete: :cascade
+ add_foreign_key "approval_merge_request_rules_users", "users", on_delete: :cascade
+ add_foreign_key "approval_project_rules", "projects", on_delete: :cascade
+ add_foreign_key "approval_project_rules_groups", "approval_project_rules", on_delete: :cascade
+ add_foreign_key "approval_project_rules_groups", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "approval_project_rules_users", "approval_project_rules", on_delete: :cascade
+ add_foreign_key "approval_project_rules_users", "users", on_delete: :cascade
+ add_foreign_key "approvals", "merge_requests", name: "fk_310d714958", on_delete: :cascade
+ add_foreign_key "approver_groups", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "badges", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "badges", "projects", on_delete: :cascade
+ add_foreign_key "board_assignees", "boards", on_delete: :cascade
+ add_foreign_key "board_assignees", "users", column: "assignee_id", on_delete: :cascade
add_foreign_key "board_group_recent_visits", "boards", on_delete: :cascade
add_foreign_key "board_group_recent_visits", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "board_group_recent_visits", "users", on_delete: :cascade
+ add_foreign_key "board_labels", "boards", on_delete: :cascade
+ add_foreign_key "board_labels", "labels", on_delete: :cascade
add_foreign_key "board_project_recent_visits", "boards", on_delete: :cascade
add_foreign_key "board_project_recent_visits", "projects", on_delete: :cascade
add_foreign_key "board_project_recent_visits", "users", on_delete: :cascade
@@ -2505,6 +3574,7 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "ci_build_trace_sections", "projects", on_delete: :cascade
add_foreign_key "ci_builds", "ci_pipelines", column: "auto_canceled_by_id", name: "fk_a2141b1522", on_delete: :nullify
add_foreign_key "ci_builds", "ci_pipelines", column: "commit_id", name: "fk_d3130c9a7f", on_delete: :cascade
+ add_foreign_key "ci_builds", "ci_pipelines", column: "upstream_pipeline_id", name: "fk_87f4cefcda", on_delete: :cascade
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_builds_metadata", "ci_builds", column: "build_id", on_delete: :cascade
@@ -2526,6 +3596,11 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "ci_runner_namespaces", "ci_runners", column: "runner_id", on_delete: :cascade
add_foreign_key "ci_runner_namespaces", "namespaces", on_delete: :cascade
add_foreign_key "ci_runner_projects", "projects", name: "fk_4478a6f1e4", on_delete: :cascade
+ add_foreign_key "ci_sources_pipelines", "ci_builds", column: "source_job_id", name: "fk_be5624bf37", on_delete: :cascade
+ add_foreign_key "ci_sources_pipelines", "ci_pipelines", column: "pipeline_id", name: "fk_e1bad85861", on_delete: :cascade
+ add_foreign_key "ci_sources_pipelines", "ci_pipelines", column: "source_pipeline_id", name: "fk_d4e29af7d7", on_delete: :cascade
+ add_foreign_key "ci_sources_pipelines", "projects", column: "source_project_id", name: "fk_acd9737679", on_delete: :cascade
+ add_foreign_key "ci_sources_pipelines", "projects", name: "fk_1e53c97c0a", on_delete: :cascade
add_foreign_key "ci_stages", "ci_pipelines", column: "pipeline_id", name: "fk_fb57e6cc56", on_delete: :cascade
add_foreign_key "ci_stages", "projects", name: "fk_2360681d1d", on_delete: :cascade
add_foreign_key "ci_trigger_requests", "ci_triggers", column: "trigger_id", name: "fk_b8ec8b7245", on_delete: :cascade
@@ -2552,9 +3627,28 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "clusters_kubernetes_namespaces", "clusters", on_delete: :cascade
add_foreign_key "clusters_kubernetes_namespaces", "projects", on_delete: :nullify
add_foreign_key "container_repositories", "projects"
+ add_foreign_key "dependency_proxy_blobs", "namespaces", column: "group_id", name: "fk_db58bbc5d7", on_delete: :cascade
+ add_foreign_key "dependency_proxy_group_settings", "namespaces", column: "group_id", name: "fk_616ddd680a", on_delete: :cascade
add_foreign_key "deploy_keys_projects", "projects", name: "fk_58a901ca7e", on_delete: :cascade
add_foreign_key "deployments", "projects", name: "fk_b9a3851b82", on_delete: :cascade
+ add_foreign_key "design_management_designs", "issues", on_delete: :cascade
+ add_foreign_key "design_management_designs", "projects", on_delete: :cascade
+ add_foreign_key "design_management_designs_versions", "design_management_designs", column: "design_id", name: "fk_03c671965c", on_delete: :cascade
+ add_foreign_key "design_management_designs_versions", "design_management_versions", column: "version_id", name: "fk_f4d25ba00c", on_delete: :cascade
+ add_foreign_key "draft_notes", "merge_requests", on_delete: :cascade
+ add_foreign_key "draft_notes", "users", column: "author_id", on_delete: :cascade
+ add_foreign_key "elasticsearch_indexed_namespaces", "namespaces", on_delete: :cascade
+ add_foreign_key "elasticsearch_indexed_projects", "projects", on_delete: :cascade
add_foreign_key "environments", "projects", name: "fk_d1c8c1da6a", on_delete: :cascade
+ add_foreign_key "epic_issues", "epics", on_delete: :cascade
+ add_foreign_key "epic_issues", "issues", on_delete: :cascade
+ add_foreign_key "epic_metrics", "epics", on_delete: :cascade
+ add_foreign_key "epics", "epics", column: "parent_id", name: "fk_25b99c1be3", on_delete: :cascade
+ add_foreign_key "epics", "milestones", on_delete: :nullify
+ add_foreign_key "epics", "namespaces", column: "group_id", name: "fk_f081aa4489", on_delete: :cascade
+ add_foreign_key "epics", "users", column: "assignee_id", name: "fk_dccd3f98fc", on_delete: :nullify
+ add_foreign_key "epics", "users", column: "author_id", name: "fk_3654b61b03", on_delete: :cascade
+ add_foreign_key "epics", "users", column: "closed_by_id", name: "fk_aa5798e761", on_delete: :nullify
add_foreign_key "events", "projects", on_delete: :cascade
add_foreign_key "events", "users", column: "author_id", name: "fk_edfd187b6f", on_delete: :cascade
add_foreign_key "fork_network_members", "fork_networks", on_delete: :cascade
@@ -2562,18 +3656,47 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "fork_network_members", "projects", on_delete: :cascade
add_foreign_key "fork_networks", "projects", column: "root_project_id", name: "fk_e7b436b2b5", on_delete: :nullify
add_foreign_key "forked_project_links", "projects", column: "forked_to_project_id", name: "fk_434510edb0", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_cache_invalidation_events", column: "cache_invalidation_event_id", name: "fk_42c3b54bed", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_hashed_storage_migrated_events", column: "hashed_storage_migrated_event_id", name: "fk_27548c6db3", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_job_artifact_deleted_events", column: "job_artifact_deleted_event_id", name: "fk_176d3fbb5d", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_lfs_object_deleted_events", column: "lfs_object_deleted_event_id", name: "fk_d5af95fcd9", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_repositories_changed_events", column: "repositories_changed_event_id", name: "fk_4a99ebfd60", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_repository_created_events", column: "repository_created_event_id", name: "fk_9b9afb1916", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_repository_deleted_events", column: "repository_deleted_event_id", name: "fk_c4b1c1f66e", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_repository_renamed_events", column: "repository_renamed_event_id", name: "fk_86c84214ec", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", name: "fk_78a6492f68", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_reset_checksum_events", column: "reset_checksum_event_id", name: "fk_cff7185ad2", on_delete: :cascade
+ add_foreign_key "geo_event_log", "geo_upload_deleted_events", column: "upload_deleted_event_id", name: "fk_c1f241c70d", on_delete: :cascade
+ add_foreign_key "geo_hashed_storage_attachments_events", "projects", on_delete: :cascade
+ add_foreign_key "geo_hashed_storage_migrated_events", "projects", on_delete: :cascade
+ add_foreign_key "geo_node_namespace_links", "geo_nodes", on_delete: :cascade
+ add_foreign_key "geo_node_namespace_links", "namespaces", on_delete: :cascade
+ add_foreign_key "geo_node_statuses", "geo_nodes", on_delete: :cascade
+ add_foreign_key "geo_repositories_changed_events", "geo_nodes", on_delete: :cascade
+ add_foreign_key "geo_repository_created_events", "projects", on_delete: :cascade
+ add_foreign_key "geo_repository_renamed_events", "projects", on_delete: :cascade
+ add_foreign_key "geo_repository_updated_events", "projects", on_delete: :cascade
+ add_foreign_key "geo_reset_checksum_events", "projects", on_delete: :cascade
+ add_foreign_key "gitlab_subscriptions", "namespaces", name: "fk_e2595d00a1", on_delete: :cascade
+ add_foreign_key "gitlab_subscriptions", "plans", column: "hosted_plan_id", name: "fk_bd0c4019c3", on_delete: :cascade
add_foreign_key "gpg_key_subkeys", "gpg_keys", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade
add_foreign_key "gpg_signatures", "gpg_key_subkeys", on_delete: :nullify
add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify
add_foreign_key "gpg_signatures", "projects", on_delete: :cascade
add_foreign_key "group_custom_attributes", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "identities", "saml_providers", name: "fk_aade90f0fc", on_delete: :cascade
add_foreign_key "import_export_uploads", "projects", on_delete: :cascade
+ add_foreign_key "index_statuses", "projects", name: "fk_74b2492545", on_delete: :cascade
+ add_foreign_key "insights", "namespaces", on_delete: :cascade
+ add_foreign_key "insights", "projects", on_delete: :cascade
add_foreign_key "internal_ids", "namespaces", name: "fk_162941d509", on_delete: :cascade
add_foreign_key "internal_ids", "projects", on_delete: :cascade
add_foreign_key "ip_restrictions", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade
add_foreign_key "issue_assignees", "users", name: "fk_5e0c8d9154", on_delete: :cascade
+ add_foreign_key "issue_links", "issues", column: "source_id", name: "fk_c900194ff2", on_delete: :cascade
+ add_foreign_key "issue_links", "issues", column: "target_id", name: "fk_e71bb44f1f", on_delete: :cascade
add_foreign_key "issue_metrics", "issues", on_delete: :cascade
add_foreign_key "issue_tracker_data", "services", on_delete: :cascade
add_foreign_key "issues", "issues", column: "moved_to_id", name: "fk_a194299be1", on_delete: :nullify
@@ -2582,6 +3705,8 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "issues", "users", column: "author_id", name: "fk_05f1e72feb", on_delete: :nullify
add_foreign_key "issues", "users", column: "closed_by_id", name: "fk_c63cbf6c25", on_delete: :nullify
add_foreign_key "issues", "users", column: "updated_by_id", name: "fk_ffed080f01", on_delete: :nullify
+ add_foreign_key "jira_connect_subscriptions", "jira_connect_installations", name: "fk_f1d617343f", on_delete: :cascade
+ add_foreign_key "jira_connect_subscriptions", "namespaces", name: "fk_a3c10bcf7d", on_delete: :cascade
add_foreign_key "jira_tracker_data", "services", on_delete: :cascade
add_foreign_key "label_links", "labels", name: "fk_d97dd08678", on_delete: :cascade
add_foreign_key "label_priorities", "labels", on_delete: :cascade
@@ -2592,6 +3717,8 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "lfs_file_locks", "users", on_delete: :cascade
add_foreign_key "lists", "boards", name: "fk_0d3f677137", on_delete: :cascade
add_foreign_key "lists", "labels", name: "fk_7a5553d60f", on_delete: :cascade
+ add_foreign_key "lists", "milestones", on_delete: :cascade
+ add_foreign_key "lists", "users", name: "fk_d6cf4279f7", on_delete: :cascade
add_foreign_key "members", "users", name: "fk_2e88fb7ce9", on_delete: :cascade
add_foreign_key "merge_request_assignees", "merge_requests", on_delete: :cascade
add_foreign_key "merge_request_assignees", "users", on_delete: :cascade
@@ -2621,15 +3748,29 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "merge_trains", "users", on_delete: :cascade
add_foreign_key "milestones", "namespaces", column: "group_id", name: "fk_95650a40d4", on_delete: :cascade
add_foreign_key "milestones", "projects", name: "fk_9bd0a0c791", on_delete: :cascade
+ add_foreign_key "namespace_statistics", "namespaces", on_delete: :cascade
+ add_foreign_key "namespaces", "namespaces", column: "custom_project_templates_group_id", name: "fk_e7a0b20a6b", on_delete: :nullify
+ add_foreign_key "namespaces", "plans", name: "fk_fdd12e5b80", on_delete: :nullify
+ add_foreign_key "namespaces", "projects", column: "file_template_project_id", name: "fk_319256d87a", on_delete: :nullify
add_foreign_key "note_diff_files", "notes", column: "diff_note_id", on_delete: :cascade
add_foreign_key "notes", "projects", name: "fk_99e097b079", on_delete: :cascade
+ add_foreign_key "notes", "reviews", name: "fk_2e82291620", on_delete: :nullify
add_foreign_key "notification_settings", "users", name: "fk_0c95e91db7", on_delete: :cascade
add_foreign_key "oauth_openid_requests", "oauth_access_grants", column: "access_grant_id", name: "fk_oauth_openid_requests_oauth_access_grants_access_grant_id"
+ add_foreign_key "operations_feature_flag_scopes", "operations_feature_flags", column: "feature_flag_id", on_delete: :cascade
+ add_foreign_key "operations_feature_flags", "projects", on_delete: :cascade
+ add_foreign_key "operations_feature_flags_clients", "projects", on_delete: :cascade
+ add_foreign_key "packages_maven_metadata", "packages_packages", column: "package_id", name: "fk_be88aed360", on_delete: :cascade
+ add_foreign_key "packages_package_files", "packages_packages", column: "package_id", name: "fk_86f0f182f8", on_delete: :cascade
+ add_foreign_key "packages_packages", "projects", on_delete: :cascade
add_foreign_key "pages_domain_acme_orders", "pages_domains", on_delete: :cascade
add_foreign_key "pages_domains", "projects", name: "fk_ea2f6dfc6f", on_delete: :cascade
+ add_foreign_key "path_locks", "projects", name: "fk_5265c98f24", on_delete: :cascade
+ add_foreign_key "path_locks", "users"
add_foreign_key "personal_access_tokens", "users"
add_foreign_key "pool_repositories", "projects", column: "source_project_id", on_delete: :nullify
add_foreign_key "pool_repositories", "shards", on_delete: :restrict
+ add_foreign_key "project_alerting_settings", "projects", on_delete: :cascade
add_foreign_key "project_authorizations", "projects", on_delete: :cascade
add_foreign_key "project_authorizations", "users", on_delete: :cascade
add_foreign_key "project_auto_devops", "projects", on_delete: :cascade
@@ -2639,35 +3780,66 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "project_deploy_tokens", "deploy_tokens", on_delete: :cascade
add_foreign_key "project_deploy_tokens", "projects", on_delete: :cascade
add_foreign_key "project_error_tracking_settings", "projects", on_delete: :cascade
+ add_foreign_key "project_feature_usages", "projects", on_delete: :cascade
add_foreign_key "project_features", "projects", name: "fk_18513d9b92", on_delete: :cascade
add_foreign_key "project_group_links", "projects", name: "fk_daa8cee94c", on_delete: :cascade
add_foreign_key "project_import_data", "projects", name: "fk_ffb9ee3a10", on_delete: :cascade
+ add_foreign_key "project_incident_management_settings", "projects", on_delete: :cascade
add_foreign_key "project_metrics_settings", "projects", on_delete: :cascade
add_foreign_key "project_mirror_data", "projects", on_delete: :cascade
add_foreign_key "project_repositories", "projects", on_delete: :cascade
add_foreign_key "project_repositories", "shards", on_delete: :restrict
+ add_foreign_key "project_repository_states", "projects", on_delete: :cascade
add_foreign_key "project_statistics", "projects", on_delete: :cascade
+ add_foreign_key "project_tracing_settings", "projects", on_delete: :cascade
add_foreign_key "projects", "pool_repositories", name: "fk_6e5c14658a", on_delete: :nullify
+ add_foreign_key "prometheus_alert_events", "projects", on_delete: :cascade
+ add_foreign_key "prometheus_alert_events", "prometheus_alerts", on_delete: :cascade
+ add_foreign_key "prometheus_alerts", "environments", on_delete: :cascade
+ add_foreign_key "prometheus_alerts", "projects", on_delete: :cascade
+ add_foreign_key "prometheus_alerts", "prometheus_metrics", on_delete: :cascade
add_foreign_key "prometheus_metrics", "projects", on_delete: :cascade
+ add_foreign_key "protected_branch_merge_access_levels", "namespaces", column: "group_id", name: "fk_98f3d044fe", on_delete: :cascade
add_foreign_key "protected_branch_merge_access_levels", "protected_branches", name: "fk_8a3072ccb3", on_delete: :cascade
+ add_foreign_key "protected_branch_merge_access_levels", "users"
+ add_foreign_key "protected_branch_push_access_levels", "namespaces", column: "group_id", name: "fk_7111b68cdb", on_delete: :cascade
add_foreign_key "protected_branch_push_access_levels", "protected_branches", name: "fk_9ffc86a3d9", on_delete: :cascade
+ add_foreign_key "protected_branch_push_access_levels", "users"
+ add_foreign_key "protected_branch_unprotect_access_levels", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "protected_branch_unprotect_access_levels", "protected_branches", on_delete: :cascade
+ add_foreign_key "protected_branch_unprotect_access_levels", "users", on_delete: :cascade
add_foreign_key "protected_branches", "projects", name: "fk_7a9c6d93e7", on_delete: :cascade
- add_foreign_key "protected_tag_create_access_levels", "namespaces", column: "group_id"
+ add_foreign_key "protected_environment_deploy_access_levels", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "protected_environment_deploy_access_levels", "protected_environments", on_delete: :cascade
+ add_foreign_key "protected_environment_deploy_access_levels", "users", on_delete: :cascade
+ add_foreign_key "protected_environments", "projects", on_delete: :cascade
+ add_foreign_key "protected_tag_create_access_levels", "namespaces", column: "group_id", name: "fk_b4eb82fe3c", on_delete: :cascade
add_foreign_key "protected_tag_create_access_levels", "protected_tags", name: "fk_f7dfda8c51", on_delete: :cascade
add_foreign_key "protected_tag_create_access_levels", "users"
add_foreign_key "protected_tags", "projects", name: "fk_8e4af87648", on_delete: :cascade
add_foreign_key "push_event_payloads", "events", name: "fk_36c74129da", on_delete: :cascade
+ add_foreign_key "push_rules", "projects", name: "fk_83b29894de", on_delete: :cascade
add_foreign_key "release_links", "releases", on_delete: :cascade
add_foreign_key "releases", "projects", name: "fk_47fe2a0596", on_delete: :cascade
add_foreign_key "releases", "users", column: "author_id", name: "fk_8e4456f90f", on_delete: :nullify
add_foreign_key "remote_mirrors", "projects", on_delete: :cascade
add_foreign_key "repository_languages", "projects", on_delete: :cascade
+ add_foreign_key "resource_label_events", "epics", on_delete: :cascade
add_foreign_key "resource_label_events", "issues", on_delete: :cascade
add_foreign_key "resource_label_events", "labels", on_delete: :nullify
add_foreign_key "resource_label_events", "merge_requests", on_delete: :cascade
add_foreign_key "resource_label_events", "users", on_delete: :nullify
+ add_foreign_key "reviews", "merge_requests", on_delete: :cascade
+ add_foreign_key "reviews", "projects", on_delete: :cascade
+ add_foreign_key "reviews", "users", column: "author_id", on_delete: :nullify
+ add_foreign_key "saml_providers", "namespaces", column: "group_id", on_delete: :cascade
+ add_foreign_key "scim_oauth_access_tokens", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "services", "projects", name: "fk_71cce407f9", on_delete: :cascade
+ add_foreign_key "slack_integrations", "services", on_delete: :cascade
+ add_foreign_key "smartcard_identities", "users", on_delete: :cascade
add_foreign_key "snippets", "projects", name: "fk_be41fd4bb7", on_delete: :cascade
+ add_foreign_key "software_license_policies", "projects", on_delete: :cascade
+ add_foreign_key "software_license_policies", "software_licenses", on_delete: :cascade
add_foreign_key "subscriptions", "projects", on_delete: :cascade
add_foreign_key "suggestions", "notes", on_delete: :cascade
add_foreign_key "system_note_metadata", "notes", name: "fk_d83a918cb1", on_delete: :cascade
@@ -2690,7 +3862,25 @@ ActiveRecord::Schema.define(version: 20190611161641) do
add_foreign_key "user_statuses", "users", on_delete: :cascade
add_foreign_key "user_synced_attributes_metadata", "users", on_delete: :cascade
add_foreign_key "users", "application_setting_terms", column: "accepted_term_id", name: "fk_789cd90b35", on_delete: :cascade
+ add_foreign_key "users", "namespaces", column: "managing_group_id", name: "fk_a4b8fefe3e", on_delete: :nullify
+ add_foreign_key "users_ops_dashboard_projects", "projects", on_delete: :cascade
+ add_foreign_key "users_ops_dashboard_projects", "users", on_delete: :cascade
add_foreign_key "users_star_projects", "projects", name: "fk_22cd27ddfc", on_delete: :cascade
+ add_foreign_key "vulnerability_feedback", "ci_pipelines", column: "pipeline_id", on_delete: :nullify
+ add_foreign_key "vulnerability_feedback", "issues", on_delete: :nullify
+ add_foreign_key "vulnerability_feedback", "merge_requests", name: "fk_563ff1912e", on_delete: :nullify
+ add_foreign_key "vulnerability_feedback", "projects", on_delete: :cascade
+ add_foreign_key "vulnerability_feedback", "users", column: "author_id", on_delete: :cascade
+ add_foreign_key "vulnerability_feedback", "users", column: "comment_author_id", name: "fk_94f7c8a81e", on_delete: :nullify
+ add_foreign_key "vulnerability_identifiers", "projects", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrence_identifiers", "vulnerability_identifiers", column: "identifier_id", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrence_identifiers", "vulnerability_occurrences", column: "occurrence_id", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrence_pipelines", "ci_pipelines", column: "pipeline_id", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrence_pipelines", "vulnerability_occurrences", column: "occurrence_id", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrences", "projects", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrences", "vulnerability_identifiers", column: "primary_identifier_id", on_delete: :cascade
+ add_foreign_key "vulnerability_occurrences", "vulnerability_scanners", column: "scanner_id", on_delete: :cascade
+ add_foreign_key "vulnerability_scanners", "projects", on_delete: :cascade
add_foreign_key "web_hook_logs", "web_hooks", on_delete: :cascade
add_foreign_key "web_hooks", "projects", name: "fk_0c8ca6d9d1", on_delete: :cascade
end
diff --git a/doc/development/import_export.md b/doc/development/import_export.md
index fd067b80c16..64c91f151c5 100644
--- a/doc/development/import_export.md
+++ b/doc/development/import_export.md
@@ -147,7 +147,6 @@ The `ModelConfigurationSpec` checks and confirms the addition of new models:
If you think this model should be included in the export, please add it to `#{Gitlab::ImportExport.config_file}`.
Definitely add it to `#{File.expand_path(ce_models_yml)}`
- #{"or `#{File.expand_path(ee_models_yml)}` if the model/associations are EE-specific\n" if ee_models_hash.any?}
to signal that you've handled this error and to prevent it from showing up in the future.
MSG
```
@@ -253,7 +252,7 @@ Model relationships to be included in the project import/export:
```yaml
project_tree:
- labels:
- :priorities
+ - :priorities
- milestones:
- events:
- :push_event_payload
diff --git a/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb b/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb
deleted file mode 100644
index da8265a3a5f..00000000000
--- a/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-class Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys
- class GpgKey < ActiveRecord::Base
- self.table_name = 'gpg_keys'
-
- include EachBatch
- include ShaAttribute
-
- sha_attribute :primary_keyid
- sha_attribute :fingerprint
-
- has_many :subkeys, class_name: 'GpgKeySubkey'
- end
-
- class GpgKeySubkey < ActiveRecord::Base
- self.table_name = 'gpg_key_subkeys'
-
- include ShaAttribute
-
- sha_attribute :keyid
- sha_attribute :fingerprint
- end
-
- def perform(gpg_key_id)
- gpg_key = GpgKey.find_by(id: gpg_key_id)
-
- return if gpg_key.nil?
- return if gpg_key.subkeys.any?
-
- create_subkeys(gpg_key)
- update_signatures(gpg_key)
- end
-
- private
-
- def create_subkeys(gpg_key)
- gpg_subkeys = Gitlab::Gpg.subkeys_from_key(gpg_key.key)
-
- gpg_subkeys[gpg_key.primary_keyid.upcase]&.each do |subkey_data|
- gpg_key.subkeys.build(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
- end
-
- # Improve latency by doing all INSERTs in a single call
- GpgKey.transaction do
- gpg_key.save!
- end
- end
-
- def update_signatures(gpg_key)
- return unless gpg_key.subkeys.exists?
-
- InvalidGpgSignatureUpdateWorker.perform_async(gpg_key.id)
- end
-end
diff --git a/lib/gitlab/background_migration/delete_diff_files.rb b/lib/gitlab/background_migration/delete_diff_files.rb
deleted file mode 100644
index 664ead1af44..00000000000
--- a/lib/gitlab/background_migration/delete_diff_files.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class DeleteDiffFiles
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
-
- belongs_to :merge_request
- has_many :merge_request_diff_files
- end
-
- class MergeRequestDiffFile < ActiveRecord::Base
- self.table_name = 'merge_request_diff_files'
- end
-
- DEAD_TUPLES_THRESHOLD = 50_000
- VACUUM_WAIT_TIME = 5.minutes
-
- def perform(ids)
- @ids = ids
-
- # We should reschedule until deadtuples get in a desirable
- # state (e.g. < 50_000). That may take more than one reschedule.
- #
- if should_wait_deadtuple_vacuum?
- reschedule
- return
- end
-
- prune_diff_files
- end
-
- def should_wait_deadtuple_vacuum?
- return false unless Gitlab::Database.postgresql?
-
- diff_files_dead_tuples_count >= DEAD_TUPLES_THRESHOLD
- end
-
- private
-
- def reschedule
- BackgroundMigrationWorker.perform_in(VACUUM_WAIT_TIME, self.class.name.demodulize, [@ids])
- end
-
- def diffs_collection
- MergeRequestDiff.where(id: @ids)
- end
-
- def diff_files_dead_tuples_count
- dead_tuple =
- execute_statement("SELECT n_dead_tup FROM pg_stat_all_tables "\
- "WHERE relname = 'merge_request_diff_files'")[0]
-
- dead_tuple&.fetch('n_dead_tup', 0).to_i
- end
-
- def prune_diff_files
- removed = 0
- updated = 0
-
- MergeRequestDiff.transaction do
- updated = diffs_collection.update_all(state: 'without_files')
- removed = MergeRequestDiffFile.where(merge_request_diff_id: @ids).delete_all
- end
-
- log_info("Removed #{removed} merge_request_diff_files rows, "\
- "updated #{updated} merge_request_diffs rows")
- end
-
- def execute_statement(sql)
- ActiveRecord::Base.connection.execute(sql)
- end
-
- def log_info(message)
- Rails.logger.info("BackgroundMigration::DeleteDiffFiles - #{message}")
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb b/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb
deleted file mode 100644
index 58df74cfa9b..00000000000
--- a/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb
+++ /dev/null
@@ -1,149 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Metrics/MethodLength
-# rubocop:disable Metrics/AbcSize
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class DeserializeMergeRequestDiffsAndCommits
- attr_reader :diff_ids, :commit_rows, :file_rows
-
- class Error < StandardError
- def backtrace
- cause.backtrace
- end
- end
-
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
- end
-
- BUFFER_ROWS = 1000
- DIFF_FILE_BUFFER_ROWS = 100
-
- def perform(start_id, stop_id)
- merge_request_diffs = MergeRequestDiff
- .select(:id, :st_commits, :st_diffs)
- .where('st_commits IS NOT NULL OR st_diffs IS NOT NULL')
- .where(id: start_id..stop_id)
-
- reset_buffers!
-
- merge_request_diffs.each do |merge_request_diff|
- commits, files = single_diff_rows(merge_request_diff)
-
- diff_ids << merge_request_diff.id
- commit_rows.concat(commits)
- file_rows.concat(files)
-
- if diff_ids.length > BUFFER_ROWS ||
- commit_rows.length > BUFFER_ROWS ||
- file_rows.length > DIFF_FILE_BUFFER_ROWS
-
- flush_buffers!
- end
- end
-
- flush_buffers!
- rescue => e
- Rails.logger.info("#{self.class.name}: failed for IDs #{merge_request_diffs.map(&:id)} with #{e.class.name}")
-
- raise Error.new(e.inspect)
- end
-
- private
-
- def reset_buffers!
- @diff_ids = []
- @commit_rows = []
- @file_rows = []
- end
-
- def flush_buffers!
- if diff_ids.any?
- commit_rows.each_slice(BUFFER_ROWS).each do |commit_rows_slice|
- bulk_insert('merge_request_diff_commits', commit_rows_slice)
- end
-
- file_rows.each_slice(DIFF_FILE_BUFFER_ROWS).each do |file_rows_slice|
- bulk_insert('merge_request_diff_files', file_rows_slice)
- end
-
- MergeRequestDiff.where(id: diff_ids).update_all(st_commits: nil, st_diffs: nil)
- end
-
- reset_buffers!
- end
-
- def bulk_insert(table, rows)
- Gitlab::Database.bulk_insert(table, rows)
- rescue ActiveRecord::RecordNotUnique
- ids = rows.map { |row| row[:merge_request_diff_id] }.uniq.sort
-
- Rails.logger.info("#{self.class.name}: rows inserted twice for IDs #{ids}")
- end
-
- def single_diff_rows(merge_request_diff)
- sha_attribute = Gitlab::Database::ShaAttribute.new
- commits = YAML.load(merge_request_diff.st_commits) rescue []
- commits ||= []
-
- commit_rows = commits.map.with_index do |commit, index|
- commit_hash = commit.to_hash.with_indifferent_access.except(:parent_ids)
- sha = commit_hash.delete(:id)
-
- commit_hash.merge(
- merge_request_diff_id: merge_request_diff.id,
- relative_order: index,
- sha: sha_attribute.serialize(sha)
- )
- end
-
- diffs = YAML.load(merge_request_diff.st_diffs) rescue []
- diffs = [] unless valid_raw_diffs?(diffs)
-
- file_rows = diffs.map.with_index do |diff, index|
- diff_hash = diff.to_hash.with_indifferent_access.merge(
- binary: false,
- merge_request_diff_id: merge_request_diff.id,
- relative_order: index
- )
-
- diff_hash.tap do |hash|
- diff_text = hash[:diff]
-
- hash[:too_large] = !!hash[:too_large]
-
- hash[:a_mode] ||= guess_mode(hash[:new_file], hash[:diff])
- hash[:b_mode] ||= guess_mode(hash[:deleted_file], hash[:diff])
-
- # Compatibility with old diffs created with Psych.
- if diff_text.encoding == Encoding::BINARY && !diff_text.ascii_only?
- hash[:binary] = true
- hash[:diff] = [diff_text].pack('m0')
- end
- end
- end
-
- [commit_rows, file_rows]
- end
-
- # This doesn't have to be 100% accurate, because it's only used for
- # display - it won't change file modes in the repository. Submodules are
- # created as 600, regular files as 644.
- def guess_mode(file_missing, diff)
- return '0' if file_missing
-
- diff.include?('Subproject commit') ? '160000' : '100644'
- end
-
- # Unlike MergeRequestDiff#valid_raw_diff?, don't count Rugged objects as
- # valid, because we don't render them usefully anyway.
- def valid_raw_diffs?(diffs)
- return false unless diffs.respond_to?(:each)
-
- diffs.all? { |diff| diff.is_a?(Hash) }
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check.rb b/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check.rb
new file mode 100644
index 00000000000..e948cedaad5
--- /dev/null
+++ b/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # rubocop: disable Style/Documentation
+ class MergeRequestAssigneesMigrationProgressCheck
+ include Gitlab::Utils::StrongMemoize
+
+ RESCHEDULE_DELAY = 3.hours
+ WORKER = 'PopulateMergeRequestAssigneesTable'.freeze
+ DeadJobsError = Class.new(StandardError)
+
+ def perform
+ raise DeadJobsError, "Only dead background jobs in the queue for #{WORKER}" if !ongoing? && dead_jobs?
+
+ if ongoing?
+ BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, self.class.name)
+ else
+ Feature.enable(:multiple_merge_request_assignees)
+ end
+ end
+
+ private
+
+ def dead_jobs?
+ strong_memoize(:dead_jobs) do
+ migration_klass.dead_jobs?(WORKER)
+ end
+ end
+
+ def ongoing?
+ strong_memoize(:ongoing) do
+ migration_klass.exists?(WORKER) || migration_klass.retrying_jobs?(WORKER)
+ end
+ end
+
+ def migration_klass
+ Gitlab::BackgroundMigration
+ end
+ end
+ # rubocop: enable Style/Documentation
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_external_pipeline_source.rb b/lib/gitlab/background_migration/populate_external_pipeline_source.rb
deleted file mode 100644
index 036fe641757..00000000000
--- a/lib/gitlab/background_migration/populate_external_pipeline_source.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class PopulateExternalPipelineSource
- module Migratable
- class Pipeline < ActiveRecord::Base
- self.table_name = 'ci_pipelines'
-
- def self.sources
- {
- unknown: nil,
- push: 1,
- web: 2,
- trigger: 3,
- schedule: 4,
- api: 5,
- external: 6
- }
- end
- end
-
- class CommitStatus < ActiveRecord::Base
- self.table_name = 'ci_builds'
- self.inheritance_column = :_type_disabled
-
- scope :has_pipeline, -> { where('ci_builds.commit_id=ci_pipelines.id') }
- scope :of_type, -> (type) { where('type=?', type) }
- end
- end
-
- def perform(start_id, stop_id)
- external_pipelines(start_id, stop_id)
- .update_all(source: Migratable::Pipeline.sources[:external])
- end
-
- private
-
- def external_pipelines(start_id, stop_id)
- Migratable::Pipeline.where(id: (start_id..stop_id))
- .where(
- 'EXISTS (?) AND NOT EXISTS (?)',
- Migratable::CommitStatus.of_type('GenericCommitStatus').has_pipeline.select(1),
- Migratable::CommitStatus.of_type('Ci::Build').has_pipeline.select(1)
- )
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_import_state.rb b/lib/gitlab/background_migration/populate_import_state.rb
deleted file mode 100644
index 695a2a713c5..00000000000
--- a/lib/gitlab/background_migration/populate_import_state.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This background migration creates all the records on the
- # import state table for projects that are considered imports or forks
- class PopulateImportState
- def perform(start_id, end_id)
- move_attributes_data_to_import_state(start_id, end_id)
- rescue ActiveRecord::RecordNotUnique
- retry
- end
-
- def move_attributes_data_to_import_state(start_id, end_id)
- Rails.logger.info("#{self.class.name} - Moving import attributes data to project mirror data table: #{start_id} - #{end_id}")
-
- ActiveRecord::Base.connection.execute <<~SQL
- INSERT INTO project_mirror_data (project_id, status, jid, last_error)
- SELECT id, import_status, import_jid, import_error
- FROM projects
- WHERE projects.import_status != 'none'
- AND projects.id BETWEEN #{start_id} AND #{end_id}
- AND NOT EXISTS (
- SELECT id
- FROM project_mirror_data
- WHERE project_id = projects.id
- )
- SQL
-
- ActiveRecord::Base.connection.execute <<~SQL
- UPDATE projects
- SET import_status = 'none'
- WHERE import_status != 'none'
- AND id BETWEEN #{start_id} AND #{end_id}
- SQL
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb b/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb
deleted file mode 100644
index d89ce358bb9..00000000000
--- a/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb
+++ /dev/null
@@ -1,132 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class PopulateMergeRequestMetricsWithEventsData
- def perform(min_merge_request_id, max_merge_request_id)
- insert_metrics_for_range(min_merge_request_id, max_merge_request_id)
- update_metrics_with_events_data(min_merge_request_id, max_merge_request_id)
- end
-
- # Inserts merge_request_metrics records for merge_requests without it for
- # a given merge request batch.
- def insert_metrics_for_range(min, max)
- metrics_not_exists_clause =
- <<-SQL.strip_heredoc
- NOT EXISTS (SELECT 1 FROM merge_request_metrics
- WHERE merge_request_metrics.merge_request_id = merge_requests.id)
- SQL
-
- MergeRequest.where(metrics_not_exists_clause).where(id: min..max).each_batch do |batch|
- select_sql = batch.select(:id, :created_at, :updated_at).to_sql
-
- execute("INSERT INTO merge_request_metrics (merge_request_id, created_at, updated_at) #{select_sql}")
- end
- end
-
- def update_metrics_with_events_data(min, max)
- if Gitlab::Database.postgresql?
- # Uses WITH syntax in order to update merged and closed events with a single UPDATE.
- # WITH is not supported by MySQL.
- update_events_for_range(min, max)
- else
- update_merged_events_for_range(min, max)
- update_closed_events_for_range(min, max)
- end
- end
-
- private
-
- # Updates merge_request_metrics latest_closed_at, latest_closed_by_id and merged_by_id
- # based on the latest event records on events table for a given merge request batch.
- def update_events_for_range(min, max)
- sql = <<-SQL.strip_heredoc
- WITH events_for_update AS (
- SELECT DISTINCT ON (target_id, action) target_id, action, author_id, updated_at
- FROM events
- WHERE target_id BETWEEN #{min} AND #{max}
- AND target_type = 'MergeRequest'
- AND action IN (#{Event::CLOSED},#{Event::MERGED})
- ORDER BY target_id, action, id DESC
- )
- UPDATE merge_request_metrics met
- SET latest_closed_at = latest_closed.updated_at,
- latest_closed_by_id = latest_closed.author_id,
- merged_by_id = latest_merged.author_id
- FROM (SELECT * FROM events_for_update WHERE action = #{Event::CLOSED}) AS latest_closed
- FULL OUTER JOIN
- (SELECT * FROM events_for_update WHERE action = #{Event::MERGED}) AS latest_merged
- USING (target_id)
- WHERE target_id = merge_request_id;
- SQL
-
- execute(sql)
- end
-
- # Updates merge_request_metrics latest_closed_at, latest_closed_by_id based on the latest closed
- # records on events table for a given merge request batch.
- def update_closed_events_for_range(min, max)
- sql =
- <<-SQL.strip_heredoc
- UPDATE merge_request_metrics metrics,
- (#{select_events(min, max, Event::CLOSED)}) closed_events
- SET metrics.latest_closed_by_id = closed_events.author_id,
- metrics.latest_closed_at = closed_events.updated_at #{where_matches_closed_events};
- SQL
-
- execute(sql)
- end
-
- # Updates merge_request_metrics merged_by_id based on the latest merged
- # records on events table for a given merge request batch.
- def update_merged_events_for_range(min, max)
- sql =
- <<-SQL.strip_heredoc
- UPDATE merge_request_metrics metrics,
- (#{select_events(min, max, Event::MERGED)}) merged_events
- SET metrics.merged_by_id = merged_events.author_id #{where_matches_merged_events};
- SQL
-
- execute(sql)
- end
-
- def execute(sql)
- @connection ||= ActiveRecord::Base.connection
- @connection.execute(sql)
- end
-
- def select_events(min, max, action)
- select_max_event_id = <<-SQL.strip_heredoc
- SELECT max(id)
- FROM events
- WHERE action = #{action}
- AND target_type = 'MergeRequest'
- AND target_id BETWEEN #{min} AND #{max}
- GROUP BY target_id
- SQL
-
- <<-SQL.strip_heredoc
- SELECT author_id, updated_at, target_id
- FROM events
- WHERE id IN(#{select_max_event_id})
- SQL
- end
-
- def where_matches_closed_events
- <<-SQL.strip_heredoc
- WHERE metrics.merge_request_id = closed_events.target_id
- AND metrics.latest_closed_at IS NULL
- AND metrics.latest_closed_by_id IS NULL
- SQL
- end
-
- def where_matches_merged_events
- <<-SQL.strip_heredoc
- WHERE metrics.merge_request_id = merged_events.target_id
- AND metrics.merged_by_id IS NULL
- SQL
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved.rb b/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved.rb
deleted file mode 100644
index 37592d67dd9..00000000000
--- a/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class PopulateMergeRequestMetricsWithEventsDataImproved
- CLOSED_EVENT_ACTION = 3
- MERGED_EVENT_ACTION = 7
-
- def perform(min_merge_request_id, max_merge_request_id)
- insert_metrics_for_range(min_merge_request_id, max_merge_request_id)
- update_metrics_with_events_data(min_merge_request_id, max_merge_request_id)
- end
-
- # Inserts merge_request_metrics records for merge_requests without it for
- # a given merge request batch.
- def insert_metrics_for_range(min, max)
- metrics_not_exists_clause =
- <<-SQL.strip_heredoc
- NOT EXISTS (SELECT 1 FROM merge_request_metrics
- WHERE merge_request_metrics.merge_request_id = merge_requests.id)
- SQL
-
- MergeRequest.where(metrics_not_exists_clause).where(id: min..max).each_batch do |batch|
- select_sql = batch.select(:id, :created_at, :updated_at).to_sql
-
- execute("INSERT INTO merge_request_metrics (merge_request_id, created_at, updated_at) #{select_sql}")
- end
- end
-
- def update_metrics_with_events_data(min, max)
- if Gitlab::Database.postgresql?
- psql_update_metrics_with_events_data(min, max)
- else
- mysql_update_metrics_with_events_data(min, max)
- end
- end
-
- def psql_update_metrics_with_events_data(min, max)
- update_sql = <<-SQL.strip_heredoc
- UPDATE merge_request_metrics
- SET (latest_closed_at,
- latest_closed_by_id) =
- ( SELECT updated_at,
- author_id
- FROM events
- WHERE target_id = merge_request_id
- AND target_type = 'MergeRequest'
- AND action = #{CLOSED_EVENT_ACTION}
- ORDER BY id DESC
- LIMIT 1 ),
- merged_by_id =
- ( SELECT author_id
- FROM events
- WHERE target_id = merge_request_id
- AND target_type = 'MergeRequest'
- AND action = #{MERGED_EVENT_ACTION}
- ORDER BY id DESC
- LIMIT 1 )
- WHERE merge_request_id BETWEEN #{min} AND #{max}
- SQL
-
- execute(update_sql)
- end
-
- def mysql_update_metrics_with_events_data(min, max)
- closed_updated_at_subquery = mysql_events_select(:updated_at, CLOSED_EVENT_ACTION)
- closed_author_id_subquery = mysql_events_select(:author_id, CLOSED_EVENT_ACTION)
- merged_author_id_subquery = mysql_events_select(:author_id, MERGED_EVENT_ACTION)
-
- update_sql = <<-SQL.strip_heredoc
- UPDATE merge_request_metrics
- SET latest_closed_at = (#{closed_updated_at_subquery}),
- latest_closed_by_id = (#{closed_author_id_subquery}),
- merged_by_id = (#{merged_author_id_subquery})
- WHERE merge_request_id BETWEEN #{min} AND #{max}
- SQL
-
- execute(update_sql)
- end
-
- def mysql_events_select(column, action)
- <<-SQL.strip_heredoc
- SELECT #{column} FROM events
- WHERE target_id = merge_request_id
- AND target_type = 'MergeRequest'
- AND action = #{action}
- ORDER BY id DESC
- LIMIT 1
- SQL
- end
-
- def execute(sql)
- @connection ||= ActiveRecord::Base.connection
- @connection.execute(sql)
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/redact_links.rb b/lib/gitlab/background_migration/redact_links.rb
deleted file mode 100644
index 92256e59a6c..00000000000
--- a/lib/gitlab/background_migration/redact_links.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-require_relative 'redact_links/redactable'
-
-module Gitlab
- module BackgroundMigration
- class RedactLinks
- class Note < ActiveRecord::Base
- include EachBatch
- include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
-
- self.table_name = 'notes'
- self.inheritance_column = :_type_disabled
- end
-
- class Issue < ActiveRecord::Base
- include EachBatch
- include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
-
- self.table_name = 'issues'
- self.inheritance_column = :_type_disabled
- end
-
- class MergeRequest < ActiveRecord::Base
- include EachBatch
- include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
-
- self.table_name = 'merge_requests'
- self.inheritance_column = :_type_disabled
- end
-
- class Snippet < ActiveRecord::Base
- include EachBatch
- include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
-
- self.table_name = 'snippets'
- self.inheritance_column = :_type_disabled
- end
-
- def perform(model_name, field, start_id, stop_id)
- link_pattern = "%/sent_notifications/" + ("_" * 32) + "/unsubscribe%"
- model = "Gitlab::BackgroundMigration::RedactLinks::#{model_name}".constantize
-
- model.where("#{field} like ?", link_pattern).where(id: start_id..stop_id).each do |resource|
- resource.redact_field!(field)
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/redact_links/redactable.rb b/lib/gitlab/background_migration/redact_links/redactable.rb
deleted file mode 100644
index baab34221f1..00000000000
--- a/lib/gitlab/background_migration/redact_links/redactable.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class RedactLinks
- module Redactable
- extend ActiveSupport::Concern
-
- def redact_field!(field)
- self[field].gsub!(%r{/sent_notifications/\h{32}/unsubscribe}, '/sent_notifications/REDACTED/unsubscribe')
-
- if self.changed?
- self.update_columns(field => self[field],
- "#{field}_html" => nil)
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/rollback_import_state_data.rb b/lib/gitlab/background_migration/rollback_import_state_data.rb
deleted file mode 100644
index a7c986747d8..00000000000
--- a/lib/gitlab/background_migration/rollback_import_state_data.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This background migration migrates all the data of import_state
- # back to the projects table for projects that are considered imports or forks
- class RollbackImportStateData
- def perform(start_id, end_id)
- move_attributes_data_to_project(start_id, end_id)
- end
-
- def move_attributes_data_to_project(start_id, end_id)
- Rails.logger.info("#{self.class.name} - Moving import attributes data to projects table: #{start_id} - #{end_id}")
-
- if Gitlab::Database.mysql?
- ActiveRecord::Base.connection.execute <<~SQL
- UPDATE projects, project_mirror_data
- SET
- projects.import_status = project_mirror_data.status,
- projects.import_jid = project_mirror_data.jid,
- projects.import_error = project_mirror_data.last_error
- WHERE project_mirror_data.project_id = projects.id
- AND project_mirror_data.id BETWEEN #{start_id} AND #{end_id}
- SQL
- else
- ActiveRecord::Base.connection.execute <<~SQL
- UPDATE projects
- SET
- import_status = project_mirror_data.status,
- import_jid = project_mirror_data.jid,
- import_error = project_mirror_data.last_error
- FROM project_mirror_data
- WHERE project_mirror_data.project_id = projects.id
- AND project_mirror_data.id BETWEEN #{start_id} AND #{end_id}
- SQL
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/schedule_diff_files_deletion.rb b/lib/gitlab/background_migration/schedule_diff_files_deletion.rb
deleted file mode 100644
index 609cf19187c..00000000000
--- a/lib/gitlab/background_migration/schedule_diff_files_deletion.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class ScheduleDiffFilesDeletion
- class MergeRequestDiff < ActiveRecord::Base
- self.table_name = 'merge_request_diffs'
-
- belongs_to :merge_request
-
- include EachBatch
- end
-
- DIFF_BATCH_SIZE = 5_000
- INTERVAL = 5.minutes
- MIGRATION = 'DeleteDiffFiles'
-
- def perform
- diffs = MergeRequestDiff
- .from("(#{diffs_collection.to_sql}) merge_request_diffs")
- .where('merge_request_diffs.id != merge_request_diffs.latest_merge_request_diff_id')
- .select(:id)
-
- diffs.each_batch(of: DIFF_BATCH_SIZE) do |relation, index|
- ids = relation.pluck(:id)
-
- BackgroundMigrationWorker.perform_in(index * INTERVAL, MIGRATION, [ids])
- end
- end
-
- private
-
- def diffs_collection
- MergeRequestDiff
- .joins(:merge_request)
- .where("merge_requests.state = 'merged'")
- .where('merge_requests.latest_merge_request_diff_id IS NOT NULL')
- .where("merge_request_diffs.state NOT IN ('without_files', 'empty')")
- .select('merge_requests.latest_merge_request_diff_id, merge_request_diffs.id')
- end
- end
- end
-end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index cc61bb7fa02..1b5cd0fbb07 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -149,7 +149,7 @@ module Gitlab
# column - The name of the column to create the foreign key on.
# on_delete - The action to perform when associated data is removed,
# defaults to "CASCADE".
- def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade)
+ def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade, name: nil)
# Transactions would result in ALTER TABLE locks being held for the
# duration of the transaction, defeating the purpose of this method.
if transaction_open?
@@ -167,14 +167,18 @@ module Gitlab
return
end
- return add_foreign_key(source, target,
- column: column,
- on_delete: on_delete)
+ key_options = { column: column, on_delete: on_delete }
+
+ # The MySQL adapter tries to create a foreign key without a name when
+ # `:name` is nil, instead of generating a name for us.
+ key_options[:name] = name if name
+
+ return add_foreign_key(source, target, key_options)
else
on_delete = 'SET NULL' if on_delete == :nullify
end
- key_name = concurrent_foreign_key_name(source, column)
+ key_name = name || concurrent_foreign_key_name(source, column)
unless foreign_key_exists?(source, target, column: column)
Rails.logger.warn "Foreign key not created because it exists already " \
diff --git a/lib/gitlab/import_export/config.rb b/lib/gitlab/import_export/config.rb
new file mode 100644
index 00000000000..f6cd4eb5e0c
--- /dev/null
+++ b/lib/gitlab/import_export/config.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module ImportExport
+ class Config
+ # Returns a Hash of the YAML file, including EE specific data if EE is
+ # used.
+ def to_h
+ hash = parse_yaml
+ ee_hash = hash['ee']
+
+ if merge? && ee_hash
+ ee_hash.each do |key, value|
+ if key == 'project_tree'
+ merge_project_tree(value, hash[key])
+ else
+ merge_attributes_list(value, hash[key])
+ end
+ end
+ end
+
+ # We don't want to expose this section after this point, as it is no
+ # longer needed.
+ hash.delete('ee')
+
+ hash
+ end
+
+ # Merges a project relationships tree into the target tree.
+ #
+ # @param [Array<Hash|Symbol>] source_values
+ # @param [Array<Hash|Symbol>] target_values
+ def merge_project_tree(source_values, target_values)
+ source_values.each do |value|
+ if value.is_a?(Hash)
+ # Examples:
+ #
+ # { 'project_tree' => [{ 'labels' => [...] }] }
+ # { 'notes' => [:author, { 'events' => [:push_event_payload] }] }
+ value.each do |key, val|
+ target = target_values
+ .find { |h| h.is_a?(Hash) && h[key] }
+
+ if target
+ merge_project_tree(val, target[key])
+ else
+ target_values << { key => val.dup }
+ end
+ end
+ else
+ # Example: :priorities, :author, etc
+ target_values << value
+ end
+ end
+ end
+
+ # Merges a Hash containing a flat list of attributes, such as the entries
+ # in a `excluded_attributes` section.
+ #
+ # @param [Hash] source_values
+ # @param [Hash] target_values
+ def merge_attributes_list(source_values, target_values)
+ source_values.each do |key, values|
+ target_values[key] ||= []
+ target_values[key].concat(values)
+ end
+ end
+
+ def merge?
+ Gitlab.ee?
+ end
+
+ def parse_yaml
+ YAML.load_file(Gitlab::ImportExport.config_file)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_export/import_export.yml b/lib/gitlab/import_export/import_export.yml
index 71c44af9254..a0fb051e806 100644
--- a/lib/gitlab/import_export/import_export.yml
+++ b/lib/gitlab/import_export/import_export.yml
@@ -1,7 +1,11 @@
# Model relationships to be included in the project import/export
+#
+# This list _must_ only contain relationships that are available to both CE and
+# EE. EE specific relationships must be defined in the `ee` section further
+# down below.
project_tree:
- labels:
- :priorities
+ - :priorities
- milestones:
- events:
- :push_event_payload
@@ -15,18 +19,18 @@ project_tree:
- :push_event_payload
- label_links:
- label:
- :priorities
+ - :priorities
- milestone:
- events:
- :push_event_payload
- resource_label_events:
- label:
- :priorities
+ - :priorities
- :issue_assignees
- snippets:
- :award_emoji
- notes:
- :author
+ - :author
- releases:
- :links
- project_members:
@@ -46,13 +50,13 @@ project_tree:
- :timelogs
- label_links:
- label:
- :priorities
+ - :priorities
- milestone:
- events:
- :push_event_payload
- resource_label_events:
- label:
- :priorities
+ - :priorities
- ci_pipelines:
- notes:
- :author
@@ -121,12 +125,22 @@ excluded_attributes:
- :bfg_object_map
- :detected_repository_languages
- :tag_list
+ - :mirror_user_id
+ - :mirror_trigger_builds
+ - :only_mirror_protected_branches
+ - :pull_mirror_available_overridden
+ - :mirror_overwrites_diverged_branches
+ - :packages_enabled
+ - :mirror_last_update_at
+ - :mirror_last_successful_update_at
namespaces:
- :runners_token
- :runners_token_encrypted
project_import_state:
- :last_error
- :jid
+ - :last_update_at
+ - :last_successful_update_at
prometheus_metrics:
- :common
- :identifier
@@ -201,3 +215,12 @@ methods:
- :action
project_badges:
- :type
+
+# EE specific relationships and settings to include. All of this will be merged
+# into the previous structures if EE is used.
+ee:
+ project_tree:
+ - protected_branches:
+ - :unprotect_access_levels
+ - protected_environments:
+ - :deploy_access_levels
diff --git a/lib/gitlab/import_export/reader.rb b/lib/gitlab/import_export/reader.rb
index bc0d18e03fa..8bdf6ca491d 100644
--- a/lib/gitlab/import_export/reader.rb
+++ b/lib/gitlab/import_export/reader.rb
@@ -7,7 +7,7 @@ module Gitlab
def initialize(shared:)
@shared = shared
- config_hash = YAML.load_file(Gitlab::ImportExport.config_file).deep_symbolize_keys
+ config_hash = ImportExport::Config.new.to_h.deep_symbolize_keys
@tree = config_hash[:project_tree]
@attributes_finder = Gitlab::ImportExport::AttributesFinder.new(included_attributes: config_hash[:included_attributes],
excluded_attributes: config_hash[:excluded_attributes],
diff --git a/lib/tasks/gitlab/import_export.rake b/lib/tasks/gitlab/import_export.rake
index 900dbf7be24..5365bd3920f 100644
--- a/lib/tasks/gitlab/import_export.rake
+++ b/lib/tasks/gitlab/import_export.rake
@@ -7,7 +7,7 @@ namespace :gitlab do
desc "GitLab | Display exported DB structure"
task data: :environment do
- puts YAML.load_file(Gitlab::ImportExport.config_file)['project_tree'].to_yaml(SortKeys: true)
+ puts Gitlab::ImportExport::Config.new.to_h['project_tree'].to_yaml(SortKeys: true)
end
desc 'GitLab | Bumps the Import/Export version in fixtures and project templates'
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 33254d607c9..6cfec5f4017 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -9,9 +9,13 @@ describe 'Database schema' do
# Use if you are certain that this column should not have a foreign key
IGNORED_FK_COLUMNS = {
abuse_reports: %w[reporter_id user_id],
- application_settings: %w[performance_bar_allowed_group_id],
+ application_settings: %w[performance_bar_allowed_group_id slack_app_id snowplow_site_id],
+ approvers: %w[target_id user_id],
+ approvals: %w[user_id],
+ approver_groups: %w[target_id],
audit_events: %w[author_id entity_id],
award_emoji: %w[awardable_id user_id],
+ boards: %w[milestone_id],
chat_names: %w[chat_id service_id team_id user_id],
chat_teams: %w[team_id],
ci_builds: %w[erased_by_id runner_id trigger_request_id user_id],
@@ -21,15 +25,25 @@ describe 'Database schema' do
cluster_providers_gcp: %w[gcp_project_id operation_id],
deploy_keys_projects: %w[deploy_key_id],
deployments: %w[deployable_id environment_id user_id],
+ draft_notes: %w[discussion_id],
emails: %w[user_id],
events: %w[target_id],
+ epics: %w[updated_by_id last_edited_by_id start_date_sourcing_milestone_id due_date_sourcing_milestone_id],
forked_project_links: %w[forked_from_project_id],
+ geo_event_log: %w[hashed_storage_attachments_event_id],
+ geo_job_artifact_deleted_events: %w[job_artifact_id],
+ geo_lfs_object_deleted_events: %w[lfs_object_id],
+ geo_node_statuses: %w[last_event_id cursor_last_event_id],
+ geo_nodes: %w[oauth_application_id],
+ geo_repository_deleted_events: %w[project_id],
+ geo_upload_deleted_events: %w[upload_id model_id],
identities: %w[user_id],
issues: %w[last_edited_by_id state_id],
jira_tracker_data: %w[jira_issue_transition_id],
keys: %w[user_id],
label_links: %w[target_id],
lfs_objects_projects: %w[lfs_object_id project_id],
+ ldap_group_links: %w[group_id],
members: %w[source_id created_by_id],
merge_requests: %w[last_edited_by_id state_id],
namespaces: %w[owner_id parent_id],
@@ -40,7 +54,7 @@ describe 'Database schema' do
oauth_applications: %w[owner_id],
project_group_links: %w[group_id],
project_statistics: %w[namespace_id],
- projects: %w[creator_id namespace_id ci_id],
+ projects: %w[creator_id namespace_id ci_id mirror_user_id],
redirect_routes: %w[source_id],
repository_languages: %w[programming_language_id],
routes: %w[source_id],
@@ -48,14 +62,17 @@ describe 'Database schema' do
snippets: %w[author_id],
spam_logs: %w[user_id],
subscriptions: %w[user_id subscribable_id],
+ slack_integrations: %w[team_id user_id],
taggings: %w[tag_id taggable_id tagger_id],
timelogs: %w[user_id],
todos: %w[target_id commit_id],
uploads: %w[model_id],
user_agent_details: %w[subject_id],
- users: %w[color_scheme_id created_by_id theme_id],
+ users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id],
users_star_projects: %w[user_id],
- web_hooks: %w[service_id],
+ vulnerability_identifiers: %w[external_id],
+ vulnerability_scanners: %w[external_id],
+ web_hooks: %w[service_id group_id],
suggestions: %w[commit_id]
}.with_indifferent_access.freeze
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index 9d74a96ab3d..c71a778fc84 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -10,7 +10,7 @@ describe 'Import/Export - project export integration test', :js do
let(:user) { create(:admin) }
let(:export_path) { "#{Dir.tmpdir}/import_file_spec" }
- let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
+ let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:sensitive_words) { %w[pass secret token key encrypted html] }
let(:safe_list) do
diff --git a/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb
deleted file mode 100644
index f974dc8fda2..00000000000
--- a/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys, :migration, schema: 20171005130944 do
- context 'when GpgKey exists' do
- let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User3.public_key) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- before do
- GpgKeySubkey.destroy_all # rubocop: disable DestroyAll
- end
-
- it 'generate the subkeys' do
- expect do
- described_class.new.perform(gpg_key.id)
- end.to change { gpg_key.subkeys.count }.from(0).to(2)
- end
-
- it 'schedules the signature update worker' do
- expect(InvalidGpgSignatureUpdateWorker).to receive(:perform_async).with(gpg_key.id)
-
- described_class.new.perform(gpg_key.id)
- end
- end
-
- context 'when GpgKey does not exist' do
- it 'does not do anything' do
- expect(Gitlab::Gpg).not_to receive(:subkeys_from_key)
- expect(InvalidGpgSignatureUpdateWorker).not_to receive(:perform_async)
-
- described_class.new.perform(123)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
deleted file mode 100644
index 0a5b99d27e7..00000000000
--- a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, :sidekiq, schema: 20180619121030 do
- describe '#perform' do
- before do
- # This migration was created before we introduced ProjectCiCdSetting#default_git_depth
- allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth=).and_return(0)
- allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth).and_return(nil)
- end
-
- context 'when diff files can be deleted' do
- let(:merge_request) { create(:merge_request, :merged) }
- let!(:merge_request_diff) do
- merge_request.create_merge_request_diff
- merge_request.merge_request_diffs.first
- end
-
- let(:perform) do
- described_class.new.perform(MergeRequestDiff.pluck(:id))
- end
-
- it 'deletes all merge request diff files' do
- expect { perform }
- .to change { merge_request_diff.merge_request_diff_files.count }
- .from(20).to(0)
- end
-
- it 'updates state to without_files' do
- expect { perform }
- .to change { merge_request_diff.reload.state }
- .from('collected').to('without_files')
- end
-
- it 'rollsback if something goes wrong' do
- expect(described_class::MergeRequestDiffFile).to receive_message_chain(:where, :delete_all)
- .and_raise
-
- expect { perform }
- .to raise_error
-
- merge_request_diff.reload
-
- expect(merge_request_diff.state).to eq('collected')
- expect(merge_request_diff.merge_request_diff_files.count).to eq(20)
- end
- end
-
- it 'reschedules itself when should_wait_deadtuple_vacuum' do
- merge_request = create(:merge_request, :merged)
- first_diff = merge_request.merge_request_diff
- second_diff = merge_request.create_merge_request_diff
-
- Sidekiq::Testing.fake! do
- worker = described_class.new
- allow(worker).to receive(:should_wait_deadtuple_vacuum?) { true }
-
- worker.perform([first_diff.id, second_diff.id])
-
- expect(described_class.name.demodulize).to be_scheduled_delayed_migration(5.minutes, [first_diff.id, second_diff.id])
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
-
- describe '#should_wait_deadtuple_vacuum?' do
- it 'returns true when hitting merge_request_diff_files hits DEAD_TUPLES_THRESHOLD', :postgresql do
- worker = described_class.new
- threshold_query_result = [{ "n_dead_tup" => described_class::DEAD_TUPLES_THRESHOLD.to_s }]
- normal_query_result = [{ "n_dead_tup" => '3' }]
-
- allow(worker)
- .to receive(:execute_statement)
- .with(/SELECT n_dead_tup */)
- .and_return(threshold_query_result, normal_query_result)
-
- expect(worker.should_wait_deadtuple_vacuum?).to be(true)
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
deleted file mode 100644
index d3f7f1ded16..00000000000
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ /dev/null
@@ -1,326 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :migration, schema: 20171114162227 do
- include GitHelpers
-
- let(:merge_request_diffs) { table(:merge_request_diffs) }
- let(:merge_requests) { table(:merge_requests) }
-
- describe '#perform' do
- let(:project) { create(:project, :repository) }
- let(:merge_request) { merge_requests.create!(iid: 1, target_project_id: project.id, source_project_id: project.id, target_branch: 'feature', source_branch: 'master').becomes(MergeRequest) }
- let(:merge_request_diff) { MergeRequest.find(merge_request.id).create_merge_request_diff }
- let(:updated_merge_request_diff) { MergeRequestDiff.find(merge_request_diff.id) }
- let(:rugged) { rugged_repo(project.repository) }
-
- before do
- allow_any_instance_of(MergeRequestDiff)
- .to receive(:commits_count=).and_return(nil)
- end
-
- def diffs_to_hashes(diffs)
- diffs.as_json(only: Gitlab::Git::Diff::SERIALIZE_KEYS).map(&:with_indifferent_access)
- end
-
- def quote_yaml(value)
- MergeRequestDiff.connection.quote(YAML.dump(value))
- end
-
- def convert_to_yaml(merge_request_diff_id, commits, diffs)
- MergeRequestDiff.where(id: merge_request_diff_id).update_all(
- "st_commits = #{quote_yaml(commits)}, st_diffs = #{quote_yaml(diffs)}"
- )
- end
-
- shared_examples 'updated MR diff' do
- before do
- convert_to_yaml(merge_request_diff.id, commits, diffs)
-
- MergeRequestDiffCommit.delete_all
- MergeRequestDiffFile.delete_all
-
- subject.perform(merge_request_diff.id, merge_request_diff.id)
- end
-
- it 'creates correct entries in the merge_request_diff_commits table' do
- expect(updated_merge_request_diff.merge_request_diff_commits.count).to eq(expected_commits.count)
- expect(updated_merge_request_diff.commits.map(&:to_hash)).to eq(expected_commits)
- end
-
- it 'creates correct entries in the merge_request_diff_files table' do
- expect(updated_merge_request_diff.merge_request_diff_files.count).to eq(expected_diffs.count)
- expect(diffs_to_hashes(updated_merge_request_diff.raw_diffs)).to eq(expected_diffs)
- end
-
- it 'sets the st_commits and st_diffs columns to nil' do
- expect(updated_merge_request_diff.st_commits_before_type_cast).to be_nil
- expect(updated_merge_request_diff.st_diffs_before_type_cast).to be_nil
- end
- end
-
- context 'when the diff IDs passed do not exist' do
- it 'does not raise' do
- expect { subject.perform(0, 0) }.not_to raise_exception
- end
- end
-
- context 'when the merge request diff has no serialised commits or diffs' do
- before do
- merge_request_diff.update(st_commits: nil, st_diffs: nil)
- end
-
- it 'does not raise' do
- expect { subject.perform(merge_request_diff.id, merge_request_diff.id) }
- .not_to raise_exception
- end
- end
-
- context 'processing multiple merge request diffs' do
- let(:start_id) { described_class::MergeRequestDiff.minimum(:id) }
- let(:stop_id) { described_class::MergeRequestDiff.maximum(:id) }
-
- before do
- merge_request.create_merge_request_diff
-
- convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
- convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
-
- MergeRequestDiffCommit.delete_all
- MergeRequestDiffFile.delete_all
- end
-
- context 'when BUFFER_ROWS is exceeded' do
- before do
- stub_const("#{described_class}::BUFFER_ROWS", 1)
-
- allow(Gitlab::Database).to receive(:bulk_insert).and_call_original
- end
-
- it 'inserts commit rows in chunks of BUFFER_ROWS' do
- # There are 29 commits in each diff, so we should have slices of 20 + 9 + 20 + 9.
- stub_const("#{described_class}::BUFFER_ROWS", 20)
-
- expect(Gitlab::Database).to receive(:bulk_insert)
- .with('merge_request_diff_commits', anything)
- .exactly(4)
- .times
- .and_call_original
-
- subject.perform(start_id, stop_id)
- end
-
- it 'inserts diff rows in chunks of DIFF_FILE_BUFFER_ROWS' do
- # There are 20 files in each diff, so we should have slices of 20 + 20.
- stub_const("#{described_class}::DIFF_FILE_BUFFER_ROWS", 20)
-
- expect(Gitlab::Database).to receive(:bulk_insert)
- .with('merge_request_diff_files', anything)
- .exactly(2)
- .times
- .and_call_original
-
- subject.perform(start_id, stop_id)
- end
- end
-
- context 'when BUFFER_ROWS is not exceeded' do
- it 'only updates once' do
- expect(Gitlab::Database).to receive(:bulk_insert)
- .with('merge_request_diff_commits', anything)
- .once
- .and_call_original
-
- expect(Gitlab::Database).to receive(:bulk_insert)
- .with('merge_request_diff_files', anything)
- .once
- .and_call_original
-
- subject.perform(start_id, stop_id)
- end
- end
-
- context 'when some rows were already inserted due to a previous failure' do
- before do
- subject.perform(start_id, stop_id)
-
- convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
- convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
- end
-
- it 'does not raise' do
- expect { subject.perform(start_id, stop_id) }.not_to raise_exception
- end
-
- it 'logs a message' do
- expect(Rails.logger).to receive(:info)
- .with(
- a_string_matching(described_class.name).and(matching([start_id, stop_id].inspect))
- )
- .twice
-
- subject.perform(start_id, stop_id)
- end
-
- it 'ends up with the correct rows' do
- expect(updated_merge_request_diff.commits.count).to eq(29)
- expect(updated_merge_request_diff.raw_diffs.count).to eq(20)
- end
- end
-
- context 'when the merge request diff update fails' do
- let(:exception) { ActiveRecord::RecordNotFound }
-
- let(:perform_ignoring_exceptions) do
- subject.perform(start_id, stop_id)
- rescue described_class::Error
- end
-
- before do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:update_all).and_raise(exception)
- end
-
- it 'raises an error' do
- expect { subject.perform(start_id, stop_id) }
- .to raise_exception(described_class::Error)
- end
-
- it 'logs the error' do
- expect(Rails.logger).to receive(:info).with(
- a_string_matching(described_class.name)
- .and(matching([start_id, stop_id].inspect))
- .and(matching(exception.name))
- )
-
- perform_ignoring_exceptions
- end
-
- it 'still adds diff commits' do
- expect { perform_ignoring_exceptions }
- .to change { MergeRequestDiffCommit.count }
- end
-
- it 'still adds diff files' do
- expect { perform_ignoring_exceptions }
- .to change { MergeRequestDiffFile.count }
- end
- end
- end
-
- context 'when the merge request diff has valid commits and diffs' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:expected_commits) { commits }
- let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
- let(:expected_diffs) { diffs }
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diff has diffs but no commits' do
- let(:commits) { nil }
- let(:expected_commits) { [] }
- let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
- let(:expected_diffs) { diffs }
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diffs do not have too_large set' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:expected_commits) { commits }
- let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
-
- let(:diffs) do
- expected_diffs.map { |diff| diff.except(:too_large) }
- end
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diffs do not have a_mode and b_mode set' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:expected_commits) { commits }
- let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
-
- let(:diffs) do
- expected_diffs.map { |diff| diff.except(:a_mode, :b_mode) }
- end
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diffs have binary content' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:expected_commits) { commits }
- let(:expected_diffs) { diffs }
-
- # The start of a PDF created by Illustrator
- let(:binary_string) do
- "\x25\x50\x44\x46\x2d\x31\x2e\x35\x0d\x25\xe2\xe3\xcf\xd3\x0d\x0a".force_encoding(Encoding::BINARY)
- end
-
- let(:diffs) do
- [
- {
- 'diff' => binary_string,
- 'new_path' => 'path',
- 'old_path' => 'path',
- 'a_mode' => '100644',
- 'b_mode' => '100644',
- 'new_file' => false,
- 'renamed_file' => false,
- 'deleted_file' => false,
- 'too_large' => false
- }
- ]
- end
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diff has commits, but no diffs' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:expected_commits) { commits }
- let(:diffs) { [] }
- let(:expected_diffs) { diffs }
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diffs have invalid content' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:expected_commits) { commits }
- let(:diffs) { ['--broken-diff'] }
- let(:expected_diffs) { [] }
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diffs are Rugged::Patch instances' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
- let(:expected_commits) { commits }
- let(:diffs) { rugged_diff(first_commit.sha).patches }
- let(:expected_diffs) { [] }
-
- include_examples 'updated MR diff'
- end
-
- context 'when the merge request diffs are Rugged::Diff::Delta instances' do
- let(:commits) { merge_request_diff.commits.map(&:to_hash) }
- let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
- let(:expected_commits) { commits }
- let(:diffs) { rugged_diff(first_commit.sha).deltas }
- let(:expected_diffs) { [] }
-
- include_examples 'updated MR diff'
- end
-
- def rugged_diff(commit_sha)
- rugged_commit = rugged.lookup(commit_sha)
- rugged_commit.parents[0].diff(rugged_commit)
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
new file mode 100644
index 00000000000..eecd290e3ca
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressCheck do
+ context 'rescheduling' do
+ context 'when there are ongoing and no dead jobs' do
+ it 'reschedules check' do
+ allow(Gitlab::BackgroundMigration).to receive(:exists?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(true)
+
+ allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
+
+ described_class.new.perform
+ end
+ end
+
+ context 'when there are ongoing and dead jobs' do
+ it 'reschedules check' do
+ allow(Gitlab::BackgroundMigration).to receive(:exists?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(true)
+
+ allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(true)
+
+ expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
+
+ described_class.new.perform
+ end
+ end
+
+ context 'when there retrying jobs and no scheduled' do
+ it 'reschedules check' do
+ allow(Gitlab::BackgroundMigration).to receive(:exists?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(true)
+
+ expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
+
+ described_class.new.perform
+ end
+ end
+ end
+
+ context 'when there are no scheduled, or retrying or dead' do
+ it 'enables feature' do
+ allow(Gitlab::BackgroundMigration).to receive(:exists?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ expect(Feature).to receive(:enable).with(:multiple_merge_request_assignees)
+
+ described_class.new.perform
+ end
+ end
+
+ context 'when there are only dead jobs' do
+ it 'raises DeadJobsError error' do
+ allow(Gitlab::BackgroundMigration).to receive(:exists?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(false)
+
+ allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
+ .with('PopulateMergeRequestAssigneesTable')
+ .and_return(true)
+
+ expect { described_class.new.perform }
+ .to raise_error(described_class::DeadJobsError,
+ "Only dead background jobs in the queue for #{described_class::WORKER}")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_external_pipeline_source_spec.rb b/spec/lib/gitlab/background_migration/populate_external_pipeline_source_spec.rb
deleted file mode 100644
index c6bc3db88a3..00000000000
--- a/spec/lib/gitlab/background_migration/populate_external_pipeline_source_spec.rb
+++ /dev/null
@@ -1,72 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::PopulateExternalPipelineSource, :migration, schema: 20180916011959 do
- let(:migration) { described_class.new }
-
- before do
- # This migration was created before we introduced metadata configs
- stub_feature_flags(ci_build_metadata_config: false)
- # This migration was created before we introduced ProjectCiCdSetting#default_git_depth
- allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth).and_return(nil)
- allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth=).and_return(0)
- end
-
- let!(:internal_pipeline) { create(:ci_pipeline, source: :web) }
- let(:pipelines) { [internal_pipeline, unknown_pipeline].map(&:id) }
-
- let!(:unknown_pipeline) do
- build(:ci_pipeline, source: :unknown)
- .tap { |pipeline| pipeline.save(validate: false) }
- end
-
- subject { migration.perform(pipelines.min, pipelines.max) }
-
- shared_examples 'no changes' do
- it 'does not change the pipeline source' do
- expect { subject }.not_to change { unknown_pipeline.reload.source }
- end
- end
-
- context 'when unknown pipeline is external' do
- before do
- create(:generic_commit_status, pipeline: unknown_pipeline)
- end
-
- it 'populates the pipeline source' do
- subject
-
- expect(unknown_pipeline.reload.source).to eq('external')
- end
-
- it 'can be repeated without effect' do
- subject
-
- expect { subject }.not_to change { unknown_pipeline.reload.source }
- end
- end
-
- context 'when unknown pipeline has just a build' do
- before do
- create(:ci_build, pipeline: unknown_pipeline)
- end
-
- it_behaves_like 'no changes'
- end
-
- context 'when unknown pipeline has no statuses' do
- it_behaves_like 'no changes'
- end
-
- context 'when unknown pipeline has a build and a status' do
- before do
- create(:generic_commit_status, pipeline: unknown_pipeline)
- create(:ci_build, pipeline: unknown_pipeline)
- end
-
- it_behaves_like 'no changes'
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/populate_import_state_spec.rb b/spec/lib/gitlab/background_migration/populate_import_state_spec.rb
deleted file mode 100644
index fcb869022de..00000000000
--- a/spec/lib/gitlab/background_migration/populate_import_state_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateImportState, :migration, schema: 20180502134117 do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:import_state) { table(:project_mirror_data) }
-
- before do
- namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
-
- projects.create!(id: 1, namespace_id: 1, name: 'gitlab1',
- path: 'gitlab1', import_error: "foo", import_status: :started,
- import_url: generate(:url))
- projects.create!(id: 2, namespace_id: 1, name: 'gitlab2', path: 'gitlab2',
- import_status: :none, import_url: generate(:url))
- projects.create!(id: 3, namespace_id: 1, name: 'gitlab3',
- path: 'gitlab3', import_error: "bar", import_status: :failed,
- import_url: generate(:url))
-
- allow(BackgroundMigrationWorker).to receive(:perform_in)
- end
-
- it "creates new import_state records with project's import data" do
- expect(projects.where.not(import_status: :none).count).to eq(2)
-
- expect do
- migration.perform(1, 3)
- end.to change { import_state.all.count }.from(0).to(2)
-
- expect(import_state.first.last_error).to eq("foo")
- expect(import_state.last.last_error).to eq("bar")
- expect(import_state.first.status).to eq("started")
- expect(import_state.last.status).to eq("failed")
- expect(projects.first.import_status).to eq("none")
- expect(projects.last.import_status).to eq("none")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved_spec.rb
deleted file mode 100644
index d1d64574627..00000000000
--- a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_improved_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'rails_helper'
-
-describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsDataImproved, :migration, schema: 20181204154019 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
- let(:events) { table(:events) }
-
- let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
-
- let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
-
- def create_merge_request(id, params = {})
- params.merge!(id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}")
-
- merge_requests.create(params)
- end
-
- def create_merge_request_event(id, params = {})
- params.merge!(id: id,
- project_id: project.id,
- author_id: user.id,
- target_type: 'MergeRequest')
-
- events.create(params)
- end
-
- describe '#perform' do
- it 'creates and updates closed and merged events' do
- timestamp = Time.new('2018-01-01 12:00:00').utc
-
- create_merge_request(1)
- create_merge_request_event(1, target_id: 1, action: 3, updated_at: timestamp)
- create_merge_request_event(2, target_id: 1, action: 3, updated_at: timestamp + 10.seconds)
-
- create_merge_request_event(3, target_id: 1, action: 7, updated_at: timestamp)
- create_merge_request_event(4, target_id: 1, action: 7, updated_at: timestamp + 10.seconds)
-
- subject.perform(1, 1)
-
- merge_request = MergeRequest.first
-
- expect(merge_request.metrics).to have_attributes(latest_closed_by_id: user.id,
- latest_closed_at: timestamp + 10.seconds,
- merged_by_id: user.id)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
deleted file mode 100644
index ff1bd9f7850..00000000000
--- a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
+++ /dev/null
@@ -1,132 +0,0 @@
-require 'rails_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData, :migration, schema: 20171128214150 do
- # commits_count attribute is added in a next migration
- before do
- allow_any_instance_of(MergeRequestDiff)
- .to receive(:commits_count=).and_return(nil)
- end
-
- describe '#perform' do
- let(:mr_with_event) { create(:merge_request) }
- let!(:merged_event) { create(:event, :merged, target: mr_with_event) }
- let!(:closed_event) { create(:event, :closed, target: mr_with_event) }
-
- before do
- # Make sure no metrics are created and kept through after_* callbacks.
- mr_with_event.metrics.destroy!
- end
-
- it 'inserts metrics and updates closed and merged events' do
- subject.perform(mr_with_event.id, mr_with_event.id)
-
- mr_with_event.reload
-
- expect(mr_with_event.metrics).to have_attributes(latest_closed_by_id: closed_event.author_id,
- merged_by_id: merged_event.author_id)
- expect(mr_with_event.metrics.latest_closed_at.to_s).to eq(closed_event.updated_at.to_s)
- end
- end
-
- describe '#insert_metrics_for_range' do
- let!(:mrs_without_metrics) { create_list(:merge_request, 3) }
- let!(:mrs_with_metrics) { create_list(:merge_request, 2) }
-
- before do
- # Make sure no metrics are created and kept through after_* callbacks.
- mrs_without_metrics.each { |m| m.metrics.destroy! }
- end
-
- it 'inserts merge_request_metrics for merge_requests without one' do
- expect { subject.insert_metrics_for_range(MergeRequest.first.id, MergeRequest.last.id) }
- .to change(MergeRequest::Metrics, :count).from(2).to(5)
-
- mrs_without_metrics.each do |mr_without_metrics|
- expect(mr_without_metrics.reload.metrics).to be_present
- end
- end
-
- it 'does not inserts merge_request_metrics for MRs out of given range' do
- expect { subject.insert_metrics_for_range(mrs_with_metrics.first.id, mrs_with_metrics.last.id) }
- .not_to change(MergeRequest::Metrics, :count).from(2)
- end
- end
-
- describe '#update_metrics_with_events_data' do
- context 'closed events data update' do
- let(:users) { create_list(:user, 3) }
- let(:mrs_with_event) { create_list(:merge_request, 3) }
-
- before do
- create_list(:event, 2, :closed, author: users.first, target: mrs_with_event.first)
- create_list(:event, 3, :closed, author: users.second, target: mrs_with_event.second)
- create(:event, :closed, author: users.third, target: mrs_with_event.third)
- end
-
- it 'migrates multiple MR metrics with closed event data' do
- mr_without_event = create(:merge_request)
- create(:event, :merged)
-
- subject.update_metrics_with_events_data(mrs_with_event.first.id, mrs_with_event.last.id)
-
- mrs_with_event.each do |mr_with_event|
- latest_event = Event.where(action: 3, target: mr_with_event).last
-
- mr_with_event.metrics.reload
-
- expect(mr_with_event.metrics.latest_closed_by).to eq(latest_event.author)
- expect(mr_with_event.metrics.latest_closed_at.to_s).to eq(latest_event.updated_at.to_s)
- end
-
- expect(mr_without_event.metrics.reload).to have_attributes(latest_closed_by_id: nil,
- latest_closed_at: nil)
- end
-
- it 'does not updates metrics out of given range' do
- out_of_range_mr = create(:merge_request)
- create(:event, :closed, author: users.last, target: out_of_range_mr)
-
- expect { subject.perform(mrs_with_event.first.id, mrs_with_event.second.id) }
- .not_to change { out_of_range_mr.metrics.reload.merged_by }
- .from(nil)
- end
- end
-
- context 'merged events data update' do
- let(:users) { create_list(:user, 3) }
- let(:mrs_with_event) { create_list(:merge_request, 3) }
-
- before do
- create_list(:event, 2, :merged, author: users.first, target: mrs_with_event.first)
- create_list(:event, 3, :merged, author: users.second, target: mrs_with_event.second)
- create(:event, :merged, author: users.third, target: mrs_with_event.third)
- end
-
- it 'migrates multiple MR metrics with merged event data' do
- mr_without_event = create(:merge_request)
- create(:event, :merged)
-
- subject.update_metrics_with_events_data(mrs_with_event.first.id, mrs_with_event.last.id)
-
- mrs_with_event.each do |mr_with_event|
- latest_event = Event.where(action: Event::MERGED, target: mr_with_event).last
-
- expect(mr_with_event.metrics.reload.merged_by).to eq(latest_event.author)
- end
-
- expect(mr_without_event.metrics.reload).to have_attributes(merged_by_id: nil)
- end
-
- it 'does not updates metrics out of given range' do
- out_of_range_mr = create(:merge_request)
- create(:event, :merged, author: users.last, target: out_of_range_mr)
-
- expect { subject.perform(mrs_with_event.first.id, mrs_with_event.second.id) }
- .not_to change { out_of_range_mr.metrics.reload.merged_by }
- .from(nil)
- end
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/redact_links_spec.rb b/spec/lib/gitlab/background_migration/redact_links_spec.rb
deleted file mode 100644
index a40e68069cc..00000000000
--- a/spec/lib/gitlab/background_migration/redact_links_spec.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::RedactLinks, :migration, schema: 20181014121030 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:issues) { table(:issues) }
- let(:notes) { table(:notes) }
- let(:snippets) { table(:snippets) }
- let(:users) { table(:users) }
- let(:merge_requests) { table(:merge_requests) }
- let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create(namespace_id: namespace.id, name: 'foo') }
- let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
-
- def create_merge_request(id, params)
- params.merge!(id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}")
-
- merge_requests.create(params)
- end
-
- def create_issue(id, params)
- params.merge!(id: id, title: "issue#{id}", project_id: project.id)
-
- issues.create(params)
- end
-
- def create_note(id, params)
- params[:id] = id
-
- notes.create(params)
- end
-
- def create_snippet(id, params)
- params.merge!(id: id, author_id: user.id)
-
- snippets.create(params)
- end
-
- def create_resource(model, id, params)
- send("create_#{model.name.underscore}", id, params)
- end
-
- shared_examples_for 'redactable resource' do
- it 'updates only matching texts' do
- matching_text = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
- redacted_text = 'some text /sent_notifications/REDACTED/unsubscribe more text'
- create_resource(model, 1, { field => matching_text })
- create_resource(model, 2, { field => 'not matching text' })
- create_resource(model, 3, { field => matching_text })
- create_resource(model, 4, { field => redacted_text })
- create_resource(model, 5, { field => matching_text })
-
- expected = { field => 'some text /sent_notifications/REDACTED/unsubscribe more text',
- "#{field}_html" => nil }
- expect_any_instance_of("Gitlab::BackgroundMigration::RedactLinks::#{model}".constantize).to receive(:update_columns).with(expected).and_call_original
-
- subject.perform(model, field, 2, 4)
-
- expect(model.where(field => matching_text).pluck(:id)).to eq [1, 5]
- expect(model.find(3).reload[field]).to eq redacted_text
- end
- end
-
- context 'resource is Issue' do
- it_behaves_like 'redactable resource' do
- let(:model) { Issue }
- let(:field) { :description }
- end
- end
-
- context 'resource is Merge Request' do
- it_behaves_like 'redactable resource' do
- let(:model) { MergeRequest }
- let(:field) { :description }
- end
- end
-
- context 'resource is Note' do
- it_behaves_like 'redactable resource' do
- let(:model) { Note }
- let(:field) { :note }
- end
- end
-
- context 'resource is Snippet' do
- it_behaves_like 'redactable resource' do
- let(:model) { Snippet }
- let(:field) { :description }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/rollback_import_state_data_spec.rb b/spec/lib/gitlab/background_migration/rollback_import_state_data_spec.rb
deleted file mode 100644
index cef3b6e4568..00000000000
--- a/spec/lib/gitlab/background_migration/rollback_import_state_data_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::RollbackImportStateData, :migration, schema: 20180502134117 do
- let(:migration) { described_class.new }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:import_state) { table(:project_mirror_data) }
-
- before do
- namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
-
- projects.create!(id: 1, namespace_id: 1, name: 'gitlab1', import_url: generate(:url))
- projects.create!(id: 2, namespace_id: 1, name: 'gitlab2', path: 'gitlab2', import_url: generate(:url))
-
- import_state.create!(id: 1, project_id: 1, status: :started, last_error: "foo")
- import_state.create!(id: 2, project_id: 2, status: :failed)
-
- allow(BackgroundMigrationWorker).to receive(:perform_in)
- end
-
- it "creates new import_state records with project's import data" do
- migration.perform(1, 2)
-
- expect(projects.first.import_status).to eq("started")
- expect(projects.second.import_status).to eq("failed")
- expect(projects.first.import_error).to eq("foo")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb b/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
deleted file mode 100644
index ec8ba0ce127..00000000000
--- a/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::ScheduleDiffFilesDeletion, :migration, :sidekiq, schema: 20180619121030 do
- describe '#perform' do
- let(:merge_request_diffs) { table(:merge_request_diffs) }
- let(:merge_requests) { table(:merge_requests) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- before do
- stub_const("#{described_class.name}::DIFF_BATCH_SIZE", 3)
-
- namespaces.create!(id: 1, name: 'gitlab', path: 'gitlab')
- projects.create!(id: 1, namespace_id: 1, name: 'gitlab', path: 'gitlab')
-
- merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master', state: 'merged')
-
- merge_request_diffs.create!(id: 1, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 2, merge_request_id: 1, state: 'empty')
- merge_request_diffs.create!(id: 3, merge_request_id: 1, state: 'without_files')
- merge_request_diffs.create!(id: 4, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 5, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 6, merge_request_id: 1, state: 'collected')
- merge_request_diffs.create!(id: 7, merge_request_id: 1, state: 'collected')
-
- merge_requests.update(1, latest_merge_request_diff_id: 7)
- end
-
- it 'correctly schedules diff file deletion workers' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- described_class.new.perform
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, [1, 4, 5])
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, [6])
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 22e52901758..c8e65a3e59d 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -162,7 +162,6 @@ describe Gitlab::Danger::Helper do
'db/foo' | :database
'qa/foo' | :qa
- 'ee/db/foo' | :database
'ee/qa/foo' | :qa
'changelogs/foo' | :none
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 4e83b27e4a5..1e4c4c38f74 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -214,6 +214,23 @@ describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
+ it 'allows the use of a custom key name' do
+ expect(model).to receive(:add_foreign_key).with(
+ :projects,
+ :users,
+ column: :user_id,
+ on_delete: :cascade,
+ name: :foo
+ )
+
+ model.add_concurrent_foreign_key(
+ :projects,
+ :users,
+ column: :user_id,
+ name: :foo
+ )
+ end
+
it 'does not create a foreign key if it exists already' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true)
expect(model).not_to receive(:add_foreign_key)
@@ -257,6 +274,16 @@ describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
+
+ it 'allows the use of a custom key name' do
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/NOT VALID/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/)
+ expect(model).to receive(:execute).with(/RESET ALL/)
+
+ model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 002359e5cc0..7a250603b6b 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -20,6 +20,9 @@ issues:
- timelogs
- issue_assignees
- closed_by
+- epic_issue
+- epic
+- designs
events:
- author
- project
@@ -38,6 +41,7 @@ notes:
- system_note_metadata
- note_diff_file
- suggestions
+- review
label_links:
- target
- label
@@ -57,6 +61,7 @@ milestone:
- merge_requests
- participants
- events
+- boards
snippets:
- author
- project
@@ -103,6 +108,19 @@ merge_requests:
- merge_request_assignees
- suggestions
- assignees
+- reviews
+- approval_rules
+- approvals
+- approvers
+- approver_users
+- approver_groups
+- approved_by_users
+- draft_notes
+- merge_train
+- blocks_as_blocker
+- blocks_as_blockee
+- blocking_merge_requests
+- blocked_merge_requests
merge_request_diff:
- merge_request
- merge_request_diff_commits
@@ -135,6 +153,16 @@ ci_pipelines:
- deployments
- environments
- chat_data
+- source_pipeline
+- source_bridge
+- source_job
+- sourced_pipelines
+- triggered_by_pipeline
+- triggered_pipelines
+- downstream_bridges
+- job_artifacts
+- vulnerabilities_occurrence_pipelines
+- vulnerabilities
pipeline_variables:
- pipeline
stages:
@@ -184,13 +212,18 @@ protected_branches:
- project
- merge_access_levels
- push_access_levels
+- unprotect_access_levels
protected_tags:
- project
- create_access_levels
merge_access_levels:
- protected_branch
+- user
+- group
push_access_levels:
- protected_branch
+- user
+- group
create_access_levels:
- user
- protected_tag
@@ -325,6 +358,45 @@ project:
- kubernetes_namespaces
- error_tracking_setting
- metrics_setting
+- gitlab_slack_application_service
+- github_service
+- protected_environments
+- mirror_user
+- push_rule
+- jenkins_service
+- jenkins_deprecated_service
+- index_status
+- feature_usage
+- approval_rules
+- approvers
+- approver_users
+- pages_domains
+- audit_events
+- path_locks
+- approver_groups
+- repository_state
+- source_pipelines
+- sourced_pipelines
+- prometheus_metrics
+- vulnerabilities
+- vulnerability_feedback
+- vulnerability_identifiers
+- vulnerability_scanners
+- operations_feature_flags
+- operations_feature_flags_client
+- prometheus_alerts
+- prometheus_alert_events
+- software_license_policies
+- project_registry
+- packages
+- package_files
+- tracing_setting
+- alerting_setting
+- webide_pipelines
+- reviews
+- incident_management_setting
+- merge_trains
+- designs
award_emoji:
- awardable
- user
@@ -332,6 +404,7 @@ priorities:
- label
prometheus_metrics:
- project
+- prometheus_alerts
timelogs:
- issue
- merge_request
@@ -365,3 +438,34 @@ suggestions:
- note
metrics_setting:
- project
+protected_environments:
+- project
+- deploy_access_levels
+deploy_access_levels:
+- protected_environment
+- user
+- group
+unprotect_access_levels:
+- user
+- protected_branch
+- group
+prometheus_alerts:
+- project
+- prometheus_alert_events
+prometheus_alert_events:
+- project
+epic_issues:
+- issue
+- epic
+tracing_setting:
+- project
+reviews:
+- project
+- merge_request
+- author
+- notes
+incident_management_setting:
+- project
+merge_trains:
+- project
+- merge_request
diff --git a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
index ddfbb020a55..fef84c87509 100644
--- a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
@@ -10,7 +10,7 @@ require 'spec_helper'
describe 'Import/Export attribute configuration' do
include ConfigurationHelper
- let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
+ let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:relation_names) do
names = names_from_tree(config_hash['project_tree'])
@@ -23,9 +23,6 @@ describe 'Import/Export attribute configuration' do
let(:safe_attributes_file) { 'spec/lib/gitlab/import_export/safe_model_attributes.yml' }
let(:safe_model_attributes) { YAML.load_file(safe_attributes_file) }
- let(:ee_safe_attributes_file) { 'ee/spec/lib/gitlab/import_export/safe_model_attributes.yml' }
- let(:ee_safe_model_attributes) { File.exist?(ee_safe_attributes_file) ? YAML.load_file(ee_safe_attributes_file) : {} }
-
it 'has no new columns' do
relation_names.each do |relation_name|
relation_class = relation_class_for_name(relation_name)
@@ -34,10 +31,6 @@ describe 'Import/Export attribute configuration' do
current_attributes = parsed_attributes(relation_name, relation_attributes)
safe_attributes = safe_model_attributes[relation_class.to_s].dup || []
- ee_safe_model_attributes[relation_class.to_s].to_a.each do |attribute|
- safe_attributes << attribute
- end
-
expect(safe_attributes).not_to be_nil, "Expected exported class #{relation_class} to exist in safe_model_attributes"
new_attributes = current_attributes - safe_attributes
@@ -51,8 +44,7 @@ describe 'Import/Export attribute configuration' do
It looks like #{relation_class}, which is exported using the project Import/Export, has new attributes: #{new_attributes.join(',')}
Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if you consider this can be exported.
- #{"If the model/associations are EE-specific, use `#{File.expand_path(ee_safe_attributes_file)}`.\n" if ee_safe_model_attributes.any?}
- Otherwise, please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
+ Please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
model in the +excluded_attributes+ section.
SAFE_MODEL_ATTRIBUTES: #{File.expand_path(safe_attributes_file)}
diff --git a/spec/lib/gitlab/import_export/config_spec.rb b/spec/lib/gitlab/import_export/config_spec.rb
new file mode 100644
index 00000000000..cf396dba382
--- /dev/null
+++ b/spec/lib/gitlab/import_export/config_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::Config do
+ let(:yaml_file) { described_class.new }
+
+ describe '#to_h' do
+ context 'when using CE' do
+ before do
+ allow(yaml_file)
+ .to receive(:merge?)
+ .and_return(false)
+ end
+
+ it 'just returns the parsed Hash without the EE section' do
+ expected = YAML.load_file(Gitlab::ImportExport.config_file)
+ expected.delete('ee')
+
+ expect(yaml_file.to_h).to eq(expected)
+ end
+ end
+
+ context 'when using EE' do
+ before do
+ allow(yaml_file)
+ .to receive(:merge?)
+ .and_return(true)
+ end
+
+ it 'merges the EE project tree into the CE project tree' do
+ allow(yaml_file)
+ .to receive(:parse_yaml)
+ .and_return({
+ 'project_tree' => [
+ {
+ 'issues' => [
+ :id,
+ :title,
+ { 'notes' => [:id, :note, { 'author' => [:name] }] }
+ ]
+ }
+ ],
+ 'ee' => {
+ 'project_tree' => [
+ {
+ 'issues' => [
+ :description,
+ { 'notes' => [:date, { 'author' => [:email] }] }
+ ]
+ },
+ { 'foo' => [{ 'bar' => %i[baz] }] }
+ ]
+ }
+ })
+
+ expect(yaml_file.to_h).to eq({
+ 'project_tree' => [
+ {
+ 'issues' => [
+ :id,
+ :title,
+ {
+ 'notes' => [
+ :id,
+ :note,
+ { 'author' => [:name, :email] },
+ :date
+ ]
+ },
+ :description
+ ]
+ },
+ { 'foo' => [{ 'bar' => %i[baz] }] }
+ ]
+ })
+ end
+
+ it 'merges the excluded attributes list' do
+ allow(yaml_file)
+ .to receive(:parse_yaml)
+ .and_return({
+ 'project_tree' => [],
+ 'excluded_attributes' => {
+ 'project' => %i[id title],
+ 'notes' => %i[id]
+ },
+ 'ee' => {
+ 'project_tree' => [],
+ 'excluded_attributes' => {
+ 'project' => %i[date],
+ 'foo' => %i[bar baz]
+ }
+ }
+ })
+
+ expect(yaml_file.to_h).to eq({
+ 'project_tree' => [],
+ 'excluded_attributes' => {
+ 'project' => %i[id title date],
+ 'notes' => %i[id],
+ 'foo' => %i[bar baz]
+ }
+ })
+ end
+
+ it 'merges the included attributes list' do
+ allow(yaml_file)
+ .to receive(:parse_yaml)
+ .and_return({
+ 'project_tree' => [],
+ 'included_attributes' => {
+ 'project' => %i[id title],
+ 'notes' => %i[id]
+ },
+ 'ee' => {
+ 'project_tree' => [],
+ 'included_attributes' => {
+ 'project' => %i[date],
+ 'foo' => %i[bar baz]
+ }
+ }
+ })
+
+ expect(yaml_file.to_h).to eq({
+ 'project_tree' => [],
+ 'included_attributes' => {
+ 'project' => %i[id title date],
+ 'notes' => %i[id],
+ 'foo' => %i[bar baz]
+ }
+ })
+ end
+
+ it 'merges the methods list' do
+ allow(yaml_file)
+ .to receive(:parse_yaml)
+ .and_return({
+ 'project_tree' => [],
+ 'methods' => {
+ 'project' => %i[id title],
+ 'notes' => %i[id]
+ },
+ 'ee' => {
+ 'project_tree' => [],
+ 'methods' => {
+ 'project' => %i[date],
+ 'foo' => %i[bar baz]
+ }
+ }
+ })
+
+ expect(yaml_file.to_h).to eq({
+ 'project_tree' => [],
+ 'methods' => {
+ 'project' => %i[id title date],
+ 'notes' => %i[id],
+ 'foo' => %i[bar baz]
+ }
+ })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/model_configuration_spec.rb b/spec/lib/gitlab/import_export/model_configuration_spec.rb
index 2e28f978c3a..5ed9fef1597 100644
--- a/spec/lib/gitlab/import_export/model_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/model_configuration_spec.rb
@@ -6,7 +6,7 @@ require 'spec_helper'
describe 'Import/Export model configuration' do
include ConfigurationHelper
- let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
+ let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:model_names) do
names = names_from_tree(config_hash['project_tree'])
@@ -16,26 +16,9 @@ describe 'Import/Export model configuration' do
# - User, Author... Models we do not care about for checking models
names.flatten.uniq - %w(milestones labels user author) + ['project']
end
- let(:ce_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' }
- let(:ce_models_hash) { YAML.load_file(ce_models_yml) }
-
- let(:ee_models_yml) { 'ee/spec/lib/gitlab/import_export/all_models.yml' }
- let(:ee_models_hash) { File.exist?(ee_models_yml) ? YAML.load_file(ee_models_yml) : {} }
-
+ let(:all_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' }
+ let(:all_models_hash) { YAML.load_file(all_models_yml) }
let(:current_models) { setup_models }
- let(:all_models_hash) do
- all_models_hash = ce_models_hash.dup
-
- all_models_hash.each do |model, associations|
- associations.concat(ee_models_hash[model] || [])
- end
-
- ee_models_hash.each do |model, associations|
- all_models_hash[model] ||= associations
- end
-
- all_models_hash
- end
it 'has no new models' do
model_names.each do |model_name|
@@ -59,8 +42,7 @@ describe 'Import/Export model configuration' do
If you think this model should be included in the export, please add it to `#{Gitlab::ImportExport.config_file}`.
- Definitely add it to `#{File.expand_path(ce_models_yml)}`
- #{"or `#{File.expand_path(ee_models_yml)}` if the model/associations are EE-specific\n" if ee_models_hash.any?}
+ Definitely add it to `#{File.expand_path(all_models_yml)}`
to signal that you've handled this error and to prevent it from showing up in the future.
MSG
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 9093d21647a..a406c25b1d8 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -24,6 +24,7 @@ Issue:
- weight
- time_estimate
- relative_position
+- service_desk_reply_to
- last_edited_at
- last_edited_by_id
- discussion_locked
@@ -68,6 +69,7 @@ Note:
- resolved_by_push
- discussion_id
- original_discussion_id
+- review_id
LabelLink:
- id
- label_id
@@ -144,6 +146,8 @@ ProjectMember:
- invite_accepted_at
- requested_at
- expires_at
+- ldap
+- override
User:
- id
- username
@@ -316,6 +320,7 @@ CommitStatus:
- protected
- failure_reason
- scheduled_at
+- upstream_pipeline_id
Ci::Variable:
- id
- project_id
@@ -491,6 +496,17 @@ Project:
- printing_merge_request_link_enabled
- resolve_outdated_diff_discussions
- build_allow_git_fetch
+- merge_requests_template
+- merge_requests_rebase_enabled
+- approvals_before_merge
+- merge_requests_author_approval
+- reset_approvals_on_push
+- disable_overriding_approvers_per_merge_request
+- merge_requests_ff_only_enabled
+- issues_template
+- repository_size_limit
+- sync_time
+- service_desk_enabled
- last_repository_updated_at
- ci_config_path
- delete_error
@@ -498,7 +514,13 @@ Project:
- merge_requests_rebase_enabled
- jobs_cache_index
- external_authorization_classification_label
+- external_webhook_token
- pages_https_only
+- merge_requests_disable_committers_approval
+- merge_requests_require_code_owner_approval
+- require_password_to_approve
+ProjectTracingSetting:
+- external_url
Author:
- name
ProjectFeature:
@@ -519,12 +541,24 @@ ProtectedBranch::MergeAccessLevel:
- access_level
- created_at
- updated_at
+- user_id
+- group_id
ProtectedBranch::PushAccessLevel:
- id
- protected_branch_id
- access_level
- created_at
- updated_at
+- user_id
+- group_id
+ProtectedBranch::UnprotectAccessLevel:
+- id
+- protected_branch_id
+- access_level
+- created_at
+- updated_at
+- user_id
+- group_id
ProtectedTag::CreateAccessLevel:
- id
- protected_tag_id
@@ -587,6 +621,12 @@ PrometheusMetric:
- group
- common
- identifier
+PrometheusAlert:
+- threshold
+- operator
+- environment_id
+- project_id
+- prometheus_metric_id
Badge:
- id
- link_url
@@ -598,6 +638,20 @@ Badge:
- type
ProjectCiCdSetting:
- group_runners_enabled
+ProtectedEnvironment:
+- id
+- project_id
+- name
+- created_at
+- updated_at
+ProtectedEnvironment::DeployAccessLevel:
+- id
+- protected_environment_id
+- access_level
+- created_at
+- updated_at
+- user_id
+- group_id
ResourceLabelEvent:
- id
- action
diff --git a/spec/migrations/active_record/schema_spec.rb b/spec/migrations/active_record/schema_spec.rb
index 9d35b3cd642..fbf5d387d0e 100644
--- a/spec/migrations/active_record/schema_spec.rb
+++ b/spec/migrations/active_record/schema_spec.rb
@@ -5,8 +5,7 @@ require 'spec_helper'
describe ActiveRecord::Schema do
let(:latest_migration_timestamp) do
- migrations_paths = %w[db ee/db]
- .product(%w[migrate post_migrate])
+ migrations_paths = %w[db/migrate db/post_migrate]
.map { |path| Rails.root.join(*path, '*') }
migrations = Dir[*migrations_paths]
diff --git a/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb b/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
new file mode 100644
index 00000000000..cad10ba30ef
--- /dev/null
+++ b/spec/migrations/add_unique_constraint_to_approvals_user_id_and_merge_request_id_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb')
+
+describe AddUniqueConstraintToApprovalsUserIdAndMergeRequestId, :migration do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:approvals) { table(:approvals) }
+
+ describe '#up' do
+ before do
+ namespaces.create(id: 1, name: 'ns', path: 'ns')
+ projects.create(id: 1, namespace_id: 1)
+ merge_requests.create(id: 1, target_branch: 'master', source_branch: 'feature-1', target_project_id: 1)
+ merge_requests.create(id: 2, target_branch: 'master', source_branch: 'feature-2', target_project_id: 1)
+ end
+
+ it 'deletes duplicate records and keeps the first one' do
+ first_approval = approvals.create(id: 1, merge_request_id: 1, user_id: 1)
+ approvals.create(id: 2, merge_request_id: 1, user_id: 1)
+
+ migration.up
+
+ expect(approvals.all.to_a).to contain_exactly(first_approval)
+ end
+
+ it 'does not delete unique records' do
+ unique_approvals = [
+ approvals.create(id: 1, merge_request_id: 1, user_id: 1),
+ approvals.create(id: 2, merge_request_id: 1, user_id: 2),
+ approvals.create(id: 3, merge_request_id: 2, user_id: 1)
+ ]
+
+ migration.up
+
+ expect(approvals.all.to_a).to contain_exactly(*unique_approvals)
+ end
+
+ it 'creates unique index' do
+ migration.up
+
+ expect(migration.index_exists?(:approvals, [:user_id, :merge_request_id], unique: true)).to be_truthy
+ end
+ end
+
+ describe '#down' do
+ it 'removes unique index' do
+ migration.up
+ migration.down
+
+ expect(migration.index_exists?(:approvals, [:user_id, :merge_request_id], unique: true)).to be_falsey
+ end
+ end
+end
diff --git a/spec/migrations/clean_up_for_members_spec.rb b/spec/migrations/clean_up_for_members_spec.rb
deleted file mode 100644
index 1a79f94cf0d..00000000000
--- a/spec/migrations/clean_up_for_members_spec.rb
+++ /dev/null
@@ -1,83 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'migrate', '20171216111734_clean_up_for_members.rb')
-
-describe CleanUpForMembers, :migration do
- before do
- stub_feature_flags(enforced_sso: false)
- end
-
- let(:migration) { described_class.new }
- let(:groups) { table(:namespaces) }
- let!(:group_member) { create_group_member }
- let!(:unbinded_group_member) { create_group_member }
- let!(:invited_group_member) { create_group_member(true) }
- let!(:not_valid_group_member) { create_group_member }
- let!(:project_member) { create_project_member }
- let!(:invited_project_member) { create_project_member(true) }
- let!(:unbinded_project_member) { create_project_member }
- let!(:not_valid_project_member) { create_project_member }
-
- it 'removes members without proper user_id' do
- unbinded_group_member.update_column(:user_id, nil)
- not_valid_group_member.update_column(:user_id, 9999)
- unbinded_project_member.update_column(:user_id, nil)
- not_valid_project_member.update_column(:user_id, 9999)
-
- migrate!
-
- expect(Member.all).not_to include(unbinded_group_member, not_valid_group_member, unbinded_project_member, not_valid_project_member)
- expect(Member.all).to include(group_member, invited_group_member, project_member, invited_project_member)
- end
-
- def create_group_member(invited = false)
- fill_member(GroupMember.new(source_id: create_group.id, source_type: 'Namespace'), invited)
- end
-
- def create_project_member(invited = false)
- fill_member(ProjectMember.new(project: create_project), invited)
- end
-
- def fill_member(member_object, invited)
- member_object.tap do |m|
- m.access_level = 40
- m.notification_level = 3
-
- if invited
- m.user_id = nil
- m.invite_token = 'xxx'
- m.invite_email = 'email@email.com'
- else
- m.user_id = create_user.id
- end
-
- m.save
- end
-
- member_object
- end
-
- def create_group
- name = FFaker::Lorem.characters(10)
-
- groups.create!(type: 'Group', name: name, path: name.downcase.gsub(/\s/, '_'))
- end
-
- def create_project
- name = FFaker::Lorem.characters(10)
- creator = create_user
-
- Project.create(name: name,
- path: name.downcase.gsub(/\s/, '_'),
- namespace: creator.namespace,
- creator: creator)
- end
-
- def create_user
- User.create(email: FFaker::Internet.email,
- password: '12345678',
- name: FFaker::Name.name,
- username: FFaker::Internet.user_name,
- confirmed_at: Time.now,
- confirmation_token: nil)
- end
-end
diff --git a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb b/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb
deleted file mode 100644
index 651341906c2..00000000000
--- a/spec/migrations/cleanup_namespaceless_pending_delete_projects_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespaceless_pending_delete_projects.rb')
-
-describe CleanupNamespacelessPendingDeleteProjects, :migration, schema: 20180222043024 do
- let(:projects) { table(:projects) }
-
- before do
- # Stub after_save callbacks that will fail when Project has no namespace
- allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil)
- allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
- end
-
- describe '#up' do
- it 'only cleans up pending delete projects' do
- projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
- projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ee', namespace_id: 2, pending_delete: true)
- project = Project.new(pending_delete: true, namespace_id: nil)
- project.save(validate: false)
-
- expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]])
-
- described_class.new.up
- end
-
- it 'does nothing when no pending delete projects without namespace found' do
- projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
- projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ee', namespace_id: 2, pending_delete: true)
-
- expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async)
-
- described_class.new.up
- end
- end
-end
diff --git a/spec/migrations/create_missing_namespace_for_internal_users_spec.rb b/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
index ac3a4b1f68f..3fd4c5bc8d6 100644
--- a/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
+++ b/spec/migrations/create_missing_namespace_for_internal_users_spec.rb
@@ -6,37 +6,32 @@ describe CreateMissingNamespaceForInternalUsers, :migration do
let(:namespaces) { table(:namespaces) }
let(:routes) { table(:routes) }
- internal_user_types = [:ghost]
- internal_user_types << :support_bot if ActiveRecord::Base.connection.column_exists?(:users, :support_bot)
-
- internal_user_types.each do |attr|
- context "for #{attr} user" do
- let(:internal_user) do
- users.create!(email: 'test@example.com', projects_limit: 100, username: 'test', attr => true)
- end
+ context "for ghost user" do
+ let(:internal_user) do
+ users.create!(email: 'test@example.com', projects_limit: 100, username: 'test', ghost: true)
+ end
- it 'creates the missing namespace' do
- expect(namespaces.find_by(owner_id: internal_user.id)).to be_nil
+ it 'creates the missing namespace' do
+ expect(namespaces.find_by(owner_id: internal_user.id)).to be_nil
- migrate!
+ migrate!
- namespace = Namespace.find_by(type: nil, owner_id: internal_user.id)
- route = namespace.route
+ namespace = Namespace.find_by(type: nil, owner_id: internal_user.id)
+ route = namespace.route
- expect(namespace.path).to eq(route.path)
- expect(namespace.name).to eq(route.name)
- end
+ expect(namespace.path).to eq(route.path)
+ expect(namespace.name).to eq(route.name)
+ end
- it 'sets notification email' do
- users.update(internal_user.id, notification_email: nil)
+ it 'sets notification email' do
+ users.update(internal_user.id, notification_email: nil)
- expect(users.find(internal_user.id).notification_email).to be_nil
+ expect(users.find(internal_user.id).notification_email).to be_nil
- migrate!
+ migrate!
- user = users.find(internal_user.id)
- expect(user.notification_email).to eq(user.email)
- end
+ user = users.find(internal_user.id)
+ expect(user.notification_email).to eq(user.email)
end
end
end
diff --git a/spec/migrations/delete_inconsistent_internal_id_records_spec.rb b/spec/migrations/delete_inconsistent_internal_id_records_spec.rb
deleted file mode 100644
index 58b8b4a16f0..00000000000
--- a/spec/migrations/delete_inconsistent_internal_id_records_spec.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20180723130817_delete_inconsistent_internal_id_records.rb')
-
-describe DeleteInconsistentInternalIdRecords, :migration do
- let!(:namespace) { table(:namespaces).create!(name: 'test', path: 'test') }
- let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:project3) { table(:projects).create!(namespace_id: namespace.id) }
-
- let(:internal_ids) { table(:internal_ids) }
- let(:internal_id_query) { ->(project) { InternalId.where(usage: InternalId.usages[scope.to_s.tableize], project_id: project.id) } }
-
- let(:create_models) do
- [project1, project2, project3].each do |project|
- 3.times do |i|
- attributes = required_attributes.merge(project_id: project.id,
- iid: i.succ)
-
- table(scope.to_s.pluralize).create!(attributes)
- end
- end
- end
-
- shared_examples_for 'deleting inconsistent internal_id records' do
- before do
- create_models
-
- [project1, project2, project3].each do |project|
- internal_ids.create!(project_id: project.id, usage: InternalId.usages[scope.to_s.tableize], last_value: 3)
- end
-
- internal_id_query.call(project1).first.tap do |iid|
- iid.last_value = iid.last_value - 2
- # This is an inconsistent record
- iid.save!
- end
-
- internal_id_query.call(project3).first.tap do |iid|
- iid.last_value = iid.last_value + 2
- # This is a consistent record
- iid.save!
- end
- end
-
- it "deletes inconsistent records" do
- expect { migrate! }.to change { internal_id_query.call(project1).size }.from(1).to(0)
- end
-
- it "retains consistent records" do
- expect { migrate! }.not_to change { internal_id_query.call(project2).size }
- end
-
- it "retains consistent records, especially those with a greater last_value" do
- expect { migrate! }.not_to change { internal_id_query.call(project3).size }
- end
- end
-
- context 'for issues' do
- let(:scope) { :issue }
- let(:required_attributes) { {} }
-
- it_behaves_like 'deleting inconsistent internal_id records'
- end
-
- context 'for merge_requests' do
- let(:scope) { :merge_request }
-
- let(:create_models) do
- [project1, project2, project3].each do |project|
- 3.times do |i|
- table(:merge_requests).create!(
- target_project_id: project.id,
- source_project_id: project.id,
- target_branch: 'master',
- source_branch: j.to_s,
- iid: i.succ
- )
- end
- end
- end
-
- it_behaves_like 'deleting inconsistent internal_id records'
- end
-
- context 'for deployments' do
- let(:scope) { :deployment }
- let(:deployments) { table(:deployments) }
-
- let(:create_models) do
- 3.times { |i| deployments.create!(project_id: project1.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
- 3.times { |i| deployments.create!(project_id: project2.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
- 3.times { |i| deployments.create!(project_id: project3.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
- end
-
- it_behaves_like 'deleting inconsistent internal_id records'
- end
-
- context 'for milestones (by project)' do
- let(:scope) { :milestone }
- let(:required_attributes) { { title: 'test' } }
-
- it_behaves_like 'deleting inconsistent internal_id records'
- end
-
- context 'for ci_pipelines' do
- let(:scope) { :ci_pipeline }
- let(:required_attributes) { { ref: 'test' } }
-
- it_behaves_like 'deleting inconsistent internal_id records'
- end
-
- context 'for milestones (by group)' do
- # milestones (by group) is a little different than most of the other models
- let(:groups) { table(:namespaces) }
- let(:group1) { groups.create(name: 'Group 1', type: 'Group', path: 'group_1') }
- let(:group2) { groups.create(name: 'Group 2', type: 'Group', path: 'group_2') }
- let(:group3) { groups.create(name: 'Group 2', type: 'Group', path: 'group_3') }
-
- let(:internal_id_query) { ->(group) { InternalId.where(usage: InternalId.usages['milestones'], namespace_id: group.id) } }
-
- before do
- [group1, group2, group3].each do |group|
- 3.times do |i|
- table(:milestones).create!(
- group_id: group.id,
- title: 'test',
- iid: i.succ
- )
- end
-
- internal_ids.create!(namespace_id: group.id, usage: InternalId.usages['milestones'], last_value: 3)
- end
-
- internal_id_query.call(group1).first.tap do |iid|
- iid.last_value = iid.last_value - 2
- # This is an inconsistent record
- iid.save!
- end
-
- internal_id_query.call(group3).first.tap do |iid|
- iid.last_value = iid.last_value + 2
- # This is a consistent record
- iid.save!
- end
- end
-
- it "deletes inconsistent records" do
- expect { migrate! }.to change { internal_id_query.call(group1).size }.from(1).to(0)
- end
-
- it "retains consistent records" do
- expect { migrate! }.not_to change { internal_id_query.call(group2).size }
- end
-
- it "retains consistent records, especially those with a greater last_value" do
- expect { migrate! }.not_to change { internal_id_query.call(group3).size }
- end
- end
-end
diff --git a/spec/migrations/enqueue_delete_diff_files_workers_spec.rb b/spec/migrations/enqueue_delete_diff_files_workers_spec.rb
deleted file mode 100644
index 6bae870920c..00000000000
--- a/spec/migrations/enqueue_delete_diff_files_workers_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20180619121030_enqueue_delete_diff_files_workers.rb')
-
-describe EnqueueDeleteDiffFilesWorkers, :migration, :sidekiq do
- it 'correctly schedules diff files deletion schedulers' do
- Sidekiq::Testing.fake! do
- expect(BackgroundMigrationWorker)
- .to receive(:perform_async)
- .with(described_class::SCHEDULER)
- .and_call_original
-
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
-end
diff --git a/spec/migrations/enqueue_redact_links_spec.rb b/spec/migrations/enqueue_redact_links_spec.rb
deleted file mode 100644
index a5da76977b7..00000000000
--- a/spec/migrations/enqueue_redact_links_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20181014121030_enqueue_redact_links.rb')
-
-describe EnqueueRedactLinks, :migration, :sidekiq do
- let(:merge_requests) { table(:merge_requests) }
- let(:issues) { table(:issues) }
- let(:notes) { table(:notes) }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:snippets) { table(:snippets) }
- let(:users) { table(:users) }
- let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
-
- text = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
- group = namespaces.create!(name: 'gitlab', path: 'gitlab')
- project = projects.create!(namespace_id: group.id)
-
- merge_requests.create!(id: 1, target_project_id: project.id, source_project_id: project.id, target_branch: 'feature', source_branch: 'master', description: text)
- issues.create!(id: 1, description: text)
- notes.create!(id: 1, note: text)
- notes.create!(id: 2, note: text)
- snippets.create!(id: 1, description: text, author_id: user.id)
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "Note", "note", 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, "Note", "note", 2, 2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "Issue", "description", 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "MergeRequest", "description", 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "Snippet", "description", 1, 1)
- expect(BackgroundMigrationWorker.jobs.size).to eq 5
- end
- end
- end
-end
diff --git a/spec/migrations/migrate_import_attributes_data_from_projects_to_project_mirror_data_spec.rb b/spec/migrations/migrate_import_attributes_data_from_projects_to_project_mirror_data_spec.rb
deleted file mode 100644
index 972c6dffc6f..00000000000
--- a/spec/migrations/migrate_import_attributes_data_from_projects_to_project_mirror_data_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb')
-
-describe MigrateImportAttributesDataFromProjectsToProjectMirrorData, :sidekiq, :migration do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:import_state) { table(:project_mirror_data) }
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
-
- projects.create!(id: 1, namespace_id: 1, name: 'gitlab1',
- path: 'gitlab1', import_error: "foo", import_status: :started,
- import_url: generate(:url))
- projects.create!(id: 2, namespace_id: 1, name: 'gitlab2',
- path: 'gitlab2', import_error: "bar", import_status: :failed,
- import_url: generate(:url))
- projects.create!(id: 3, namespace_id: 1, name: 'gitlab3', path: 'gitlab3', import_status: :none, import_url: generate(:url))
- end
-
- it 'schedules delayed background migrations in batches in bulk' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- expect(projects.where.not(import_status: :none).count).to eq(2)
-
- subject.up
-
- expect(BackgroundMigrationWorker.jobs.size).to eq 2
- expect(described_class::UP_MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
- expect(described_class::UP_MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
- end
- end
- end
-
- describe '#down' do
- before do
- import_state.create!(id: 1, project_id: 1, status: :started)
- import_state.create!(id: 2, project_id: 2, status: :started)
- end
-
- it 'schedules delayed background migrations in batches in bulk for rollback' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- expect(import_state.where.not(status: :none).count).to eq(2)
-
- subject.down
-
- expect(BackgroundMigrationWorker.jobs.size).to eq 2
- expect(described_class::DOWN_MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
- expect(described_class::DOWN_MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
- end
- end
- end
- end
-end
diff --git a/spec/migrations/migrate_remaining_mr_metrics_populating_background_migration_spec.rb b/spec/migrations/migrate_remaining_mr_metrics_populating_background_migration_spec.rb
deleted file mode 100644
index 47dab18183c..00000000000
--- a/spec/migrations/migrate_remaining_mr_metrics_populating_background_migration_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20180521162137_migrate_remaining_mr_metrics_populating_background_migration.rb')
-
-describe MigrateRemainingMrMetricsPopulatingBackgroundMigration, :migration, :sidekiq do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:mrs) { table(:merge_requests) }
-
- before do
- namespaces.create!(id: 1, name: 'foo', path: 'foo')
- projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 1)
- projects.create!(id: 456, name: 'gitlab2', path: 'gitlab2', namespace_id: 1)
- projects.create!(id: 789, name: 'gitlab3', path: 'gitlab3', namespace_id: 1)
- mrs.create!(title: 'foo', target_branch: 'target', source_branch: 'source', target_project_id: 123)
- mrs.create!(title: 'bar', target_branch: 'target', source_branch: 'source', target_project_id: 456)
- mrs.create!(title: 'kux', target_branch: 'target', source_branch: 'source', target_project_id: 789)
- end
-
- it 'correctly schedules background migrations' do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(10.minutes, mrs.first.id, mrs.second.id)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(20.minutes, mrs.third.id, mrs.third.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/populate_mr_metrics_with_events_data_spec.rb b/spec/migrations/populate_mr_metrics_with_events_data_spec.rb
deleted file mode 100644
index 291a52b904d..00000000000
--- a/spec/migrations/populate_mr_metrics_with_events_data_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20181204154019_populate_mr_metrics_with_events_data.rb')
-
-describe PopulateMrMetricsWithEventsData, :migration, :sidekiq do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create(namespace_id: namespace.id, name: 'foo') }
- let(:merge_requests) { table(:merge_requests) }
-
- def create_merge_request(id)
- params = {
- id: id,
- target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: 'mr name',
- title: "mr name#{id}"
- }
-
- merge_requests.create!(params)
- end
-
- it 'correctly schedules background migrations' do
- create_merge_request(1)
- create_merge_request(2)
- create_merge_request(3)
-
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(8.minutes, 1, 2)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(16.minutes, 3, 3)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/populate_project_statistics_packages_size_spec.rb b/spec/migrations/populate_project_statistics_packages_size_spec.rb
new file mode 100644
index 00000000000..4ad91342f25
--- /dev/null
+++ b/spec/migrations/populate_project_statistics_packages_size_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20190418132125_populate_project_statistics_packages_size.rb')
+
+describe PopulateProjectStatisticsPackagesSize, :migration do
+ let(:project_statistics) { table(:project_statistics) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:packages) { table(:packages_packages) }
+ let(:package_files) { table(:packages_package_files) }
+
+ let(:file_size) { 1.kilobyte }
+ let(:repo_size) { 2.megabytes }
+ let(:lfs_size) { 3.gigabytes }
+ let(:artifacts_size) { 4.terabytes }
+ let(:storage_size) { repo_size + lfs_size + artifacts_size }
+
+ let(:namespace) { namespaces.create(name: 'foo', path: 'foo') }
+ let(:package) { packages.create!(project_id: project.id, name: 'a package', package_type: 1) }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+
+ let!(:statistics) { project_statistics.create!(project_id: project.id, namespace_id: namespace.id, storage_size: storage_size, repository_size: repo_size, lfs_objects_size: lfs_size, build_artifacts_size: artifacts_size) }
+ let!(:package_file) { package_files.create!(package_id: package.id, file: 'a file.txt', file_name: 'a file.txt', size: file_size)}
+
+ it 'backfills ProjectStatistics packages_size' do
+ expect { migrate! }
+ .to change { statistics.reload.packages_size }
+ .from(nil).to(file_size)
+ end
+
+ it 'updates ProjectStatistics storage_size' do
+ expect { migrate! }
+ .to change { statistics.reload.storage_size }
+ .by(file_size)
+ end
+end
diff --git a/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb b/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
new file mode 100644
index 00000000000..99dfb165173
--- /dev/null
+++ b/spec/migrations/populate_rule_type_on_approval_merge_request_rules_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20190520201748_populate_rule_type_on_approval_merge_request_rules.rb')
+
+describe PopulateRuleTypeOnApprovalMergeRequestRules, :migration do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:approval_rules) { table(:approval_merge_request_rules) }
+
+ # We use integers here since at the time of writing CE does not yet have the
+ # appropriate models and enum definitions.
+ let(:regular_rule_type) { 1 }
+ let(:code_owner_rule_type) { 2 }
+
+ before do
+ namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab')
+ projects.create!(id: 101, namespace_id: 11, name: 'gitlab', path: 'gitlab')
+ merge_requests.create!(id: 1, target_project_id: 101, source_project_id: 101, target_branch: 'feature', source_branch: 'master')
+
+ approval_rules.create!(id: 1, merge_request_id: 1, name: "Default", code_owner: false, rule_type: regular_rule_type)
+ approval_rules.create!(id: 2, merge_request_id: 1, name: "Code Owners", code_owner: true, rule_type: regular_rule_type)
+ end
+
+ it 'backfills ApprovalMergeRequestRules code_owner rule_type' do
+ expect(approval_rules.where(rule_type: regular_rule_type).pluck(:id)).to contain_exactly(1, 2)
+ expect(approval_rules.where(rule_type: code_owner_rule_type).pluck(:id)).to be_empty
+
+ migrate!
+
+ expect(approval_rules.where(rule_type: regular_rule_type).pluck(:id)).to contain_exactly(1)
+ expect(approval_rules.where(rule_type: code_owner_rule_type).pluck(:id)).to contain_exactly(2)
+ end
+ end
+end
diff --git a/spec/migrations/remove_orphaned_label_links_spec.rb b/spec/migrations/remove_orphaned_label_links_spec.rb
deleted file mode 100644
index e8c44c141c3..00000000000
--- a/spec/migrations/remove_orphaned_label_links_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20180906051323_remove_orphaned_label_links.rb')
-
-describe RemoveOrphanedLabelLinks, :migration do
- let(:label_links) { table(:label_links) }
- let(:labels) { table(:labels) }
-
- let(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let(:label) { create_label }
-
- before do
- # This migration was created before we introduced ProjectCiCdSetting#default_git_depth
- allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth).and_return(nil)
- allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth=).and_return(0)
- end
-
- context 'add foreign key on label_id' do
- let!(:label_link_with_label) { create_label_link(label_id: label.id) }
- let!(:label_link_without_label) { create_label_link(label_id: nil) }
-
- it 'removes orphaned labels without corresponding label' do
- expect { migrate! }.to change { LabelLink.count }.from(2).to(1)
- end
-
- it 'does not remove entries with valid label_id' do
- expect { migrate! }.not_to change { label_link_with_label.reload }
- end
- end
-
- def create_label(**opts)
- labels.create!(
- project_id: project.id,
- **opts
- )
- end
-
- def create_label_link(**opts)
- label_links.create!(
- target_id: 1,
- target_type: 'Issue',
- **opts
- )
- end
-end
diff --git a/spec/migrations/remove_soft_removed_objects_spec.rb b/spec/migrations/remove_soft_removed_objects_spec.rb
deleted file mode 100644
index d0bde98b80e..00000000000
--- a/spec/migrations/remove_soft_removed_objects_spec.rb
+++ /dev/null
@@ -1,99 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20171207150343_remove_soft_removed_objects.rb')
-
-describe RemoveSoftRemovedObjects, :migration do
- describe '#up' do
- let!(:groups) do
- table(:namespaces).tap do |t|
- t.inheritance_column = nil
- end
- end
-
- let!(:routes) do
- table(:routes).tap do |t|
- t.inheritance_column = nil
- end
- end
-
- it 'removes various soft removed objects' do
- 5.times do
- create_with_deleted_at(:issue)
- end
-
- regular_issue = create(:issue) # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- run_migration
-
- expect(Issue.count).to eq(1)
- expect(Issue.first).to eq(regular_issue)
- end
-
- it 'removes the temporary indexes once soft removed data has been removed' do
- migration = described_class.new
-
- run_migration
-
- disable_migrations_output do
- expect(migration.temporary_index_exists?(Issue)).to eq(false)
- end
- end
-
- it 'removes routes of soft removed personal namespaces' do
- namespace = create_with_deleted_at(:namespace)
- group = groups.create!(name: 'group', path: 'group_path', type: 'Group')
- routes.create!(source_id: group.id, source_type: 'Group', name: 'group', path: 'group_path')
-
- expect(routes.where(source_id: namespace.id).exists?).to eq(true)
- expect(routes.where(source_id: group.id).exists?).to eq(true)
-
- run_migration
-
- expect(routes.where(source_id: namespace.id).exists?).to eq(false)
- expect(routes.where(source_id: group.id).exists?).to eq(true)
- end
-
- it 'schedules the removal of soft removed groups' do
- group = create_deleted_group
- admin = create(:user, admin: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- expect_any_instance_of(GroupDestroyWorker)
- .to receive(:perform)
- .with(group.id, admin.id)
-
- run_migration
- end
-
- it 'does not remove soft removed groups when no admin user could be found' do
- create_deleted_group
-
- expect_any_instance_of(GroupDestroyWorker)
- .not_to receive(:perform)
-
- run_migration
- end
- end
-
- def run_migration
- disable_migrations_output do
- migrate!
- end
- end
-
- def create_with_deleted_at(*args)
- row = create(*args) # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- # We set "deleted_at" this way so we don't run into any column cache issues.
- row.class.where(id: row.id).update_all(deleted_at: 1.year.ago)
-
- row
- end
-
- def create_deleted_group
- group = groups.create!(name: 'group', path: 'group_path', type: 'Group')
- routes.create!(source_id: group.id, source_type: 'Group', name: 'group', path: 'group_path')
-
- groups.where(id: group.id).update_all(deleted_at: 1.year.ago)
-
- group
- end
-end
diff --git a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
deleted file mode 100644
index c4427910518..00000000000
--- a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys')
-
-describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
- before do
- create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key) # rubocop:disable RSpec/FactoriesInMigrationSpecs
- create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key) # rubocop:disable RSpec/FactoriesInMigrationSpecs
- # Delete all subkeys so they can be recreated
- GpgKeySubkey.destroy_all # rubocop: disable DestroyAll
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_migration(1)
- expect(described_class::MIGRATION).to be_scheduled_migration(2)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
-
- it 'schedules background migrations' do
- perform_enqueued_jobs do
- expect(GpgKeySubkey.count).to eq(0)
-
- migrate!
-
- expect(GpgKeySubkey.count).to eq(3)
- end
- end
-end
diff --git a/spec/migrations/schedule_merge_request_assignees_migration_progress_check_spec.rb b/spec/migrations/schedule_merge_request_assignees_migration_progress_check_spec.rb
new file mode 100644
index 00000000000..bea985fabb1
--- /dev/null
+++ b/spec/migrations/schedule_merge_request_assignees_migration_progress_check_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20190402224749_schedule_merge_request_assignees_migration_progress_check.rb')
+
+describe ScheduleMergeRequestAssigneesMigrationProgressCheck do
+ describe '#up' do
+ it 'schedules MergeRequestAssigneesMigrationProgressCheck background job' do
+ expect(BackgroundMigrationWorker).to receive(:perform_async)
+ .with(described_class::MIGRATION)
+ .and_call_original
+
+ subject.up
+ end
+ end
+end
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
deleted file mode 100644
index 9f7e47bae0d..00000000000
--- a/spec/migrations/schedule_merge_request_diff_migrations_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20170703130158_schedule_merge_request_diff_migrations')
-
-describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do
- let(:merge_request_diffs) { table(:merge_request_diffs) }
- let(:merge_requests) { table(:merge_requests) }
- let(:projects) { table(:projects) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
-
- projects.create!(id: 1, name: 'gitlab', path: 'gitlab')
-
- merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master')
-
- merge_request_diffs.create!(id: 1, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: nil)
- merge_request_diffs.create!(id: 2, merge_request_id: 1, st_commits: nil, st_diffs: YAML.dump([]))
- merge_request_diffs.create!(id: 3, merge_request_id: 1, st_commits: nil, st_diffs: nil)
- merge_request_diffs.create!(id: 4, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: YAML.dump([]))
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(15.minutes, 4, 4)
- expect(BackgroundMigrationWorker.jobs.size).to eq 3
- end
- end
- end
-
- it 'schedules background migrations' do
- perform_enqueued_jobs do
- non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
-
- expect(merge_request_diffs.where(non_empty).count).to eq 3
-
- migrate!
-
- expect(merge_request_diffs.where(non_empty).count).to eq 0
- end
- end
-end
diff --git a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb b/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
deleted file mode 100644
index 5bcb923af7b..00000000000
--- a/spec/migrations/schedule_merge_request_diff_migrations_take_two_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20170926150348_schedule_merge_request_diff_migrations_take_two')
-
-describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
- let(:merge_request_diffs) { table(:merge_request_diffs) }
- let(:merge_requests) { table(:merge_requests) }
- let(:projects) { table(:projects) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
-
- projects.create!(id: 1, name: 'gitlab', path: 'gitlab')
-
- merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master')
-
- merge_request_diffs.create!(id: 1, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: nil)
- merge_request_diffs.create!(id: 2, merge_request_id: 1, st_commits: nil, st_diffs: YAML.dump([]))
- merge_request_diffs.create!(id: 3, merge_request_id: 1, st_commits: nil, st_diffs: nil)
- merge_request_diffs.create!(id: 4, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: YAML.dump([]))
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, 1, 1)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(20.minutes, 2, 2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(30.minutes, 4, 4)
- expect(BackgroundMigrationWorker.jobs.size).to eq 3
- end
- end
- end
-
- it 'migrates the data' do
- perform_enqueued_jobs do
- non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
-
- expect(merge_request_diffs.where(non_empty).count).to eq 3
-
- migrate!
-
- expect(merge_request_diffs.where(non_empty).count).to eq 0
- end
- end
-end
diff --git a/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb b/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb
deleted file mode 100644
index 578440cba20..00000000000
--- a/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb')
-
-describe SchedulePopulateMergeRequestMetricsWithEventsData, :migration, :sidekiq do
- # commits_count attribute is added in a next migration
- before do
- allow_any_instance_of(MergeRequestDiff)
- .to receive(:commits_count=).and_return(nil)
- end
-
- let!(:mrs) { create_list(:merge_request, 3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- it 'correctly schedules background migrations' do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- Timecop.freeze do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(10.minutes, mrs.first.id, mrs.second.id)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(20.minutes, mrs.third.id, mrs.third.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index e9a26400723..11b06ef5019 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -38,7 +38,7 @@ describe Ci::RetryBuildService do
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines artifacts_file_store artifacts_metadata_store
- metadata runner_session trace_chunks
+ metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size].freeze
shared_examples 'build duplication' do