summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorNick Thomas <nick@gitlab.com>2019-07-01 14:37:18 +0100
committerNick Thomas <nick@gitlab.com>2019-07-02 09:35:03 +0100
commit351392f4090ee7e8fe5c4f47286afe6da14b1895 (patch)
tree64fa15613b0e862847eb71395f000e1467d371a0 /spec
parent876d4151167d237fda853585442d9a8e9681c23e (diff)
downloadgitlab-ce-351392f4090ee7e8fe5c4f47286afe6da14b1895.tar.gz
Remove background migrations for old schemas
On the assumption that a background migration whose specs need a schema older than 2018 is obsoleted by this migration squash, we can remove both specs and code for those that fail to run in CI as a result of the schema at that date no longer existing. This is true for all but the MigrateStageStatus background migration, which is also used from the MigrateBuildStage background migration.
Diffstat (limited to 'spec')
-rw-r--r--spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb125
-rw-r--r--spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb35
-rw-r--r--spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb433
-rw-r--r--spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb92
-rw-r--r--spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb97
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb62
7 files changed, 0 insertions, 880 deletions
diff --git a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb b/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
deleted file mode 100644
index 5076996474f..00000000000
--- a/spec/lib/gitlab/background_migration/create_fork_network_memberships_range_spec.rb
+++ /dev/null
@@ -1,125 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migration, schema: 20170929131201 do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
-
- let(:base1) { projects.create }
- let(:base1_fork1) { projects.create }
- let(:base1_fork2) { projects.create }
-
- let(:base2) { projects.create }
- let(:base2_fork1) { projects.create }
- let(:base2_fork2) { projects.create }
-
- let(:fork_of_fork) { projects.create }
- let(:fork_of_fork2) { projects.create }
- let(:second_level_fork) { projects.create }
- let(:third_level_fork) { projects.create }
-
- let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
- let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
-
- let!(:forked_project_links) { table(:forked_project_links) }
- let!(:fork_networks) { table(:fork_networks) }
- let!(:fork_network_members) { table(:fork_network_members) }
-
- before do
- # The fork-network relation created for the forked project
- fork_networks.create(id: 1, root_project_id: base1.id)
- fork_network_members.create(project_id: base1.id, fork_network_id: 1)
- fork_networks.create(id: 2, root_project_id: base2.id)
- fork_network_members.create(project_id: base2.id, fork_network_id: 2)
-
- # Normal fork links
- forked_project_links.create(id: 1, forked_from_project_id: base1.id, forked_to_project_id: base1_fork1.id)
- forked_project_links.create(id: 2, forked_from_project_id: base1.id, forked_to_project_id: base1_fork2.id)
- forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id)
- forked_project_links.create(id: 4, forked_from_project_id: base2.id, forked_to_project_id: base2_fork2.id)
-
- # Fork links
- forked_project_links.create(id: 5, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork.id)
- forked_project_links.create(id: 6, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork2.id)
-
- # Forks 3 levels down
- forked_project_links.create(id: 7, forked_from_project_id: fork_of_fork.id, forked_to_project_id: second_level_fork.id)
- forked_project_links.create(id: 8, forked_from_project_id: second_level_fork.id, forked_to_project_id: third_level_fork.id)
-
- migration.perform(1, 8)
- end
-
- it 'creates a memberships for the direct forks' do
- base1_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1_fork1.id)
- base1_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1_fork2.id)
- base2_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2_fork1.id)
- base2_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2_fork2.id)
-
- expect(base1_fork1_membership.forked_from_project_id).to eq(base1.id)
- expect(base1_fork2_membership.forked_from_project_id).to eq(base1.id)
- expect(base2_fork1_membership.forked_from_project_id).to eq(base2.id)
- expect(base2_fork2_membership.forked_from_project_id).to eq(base2.id)
- end
-
- it 'adds the fork network members for forks of forks' do
- fork_of_fork_membership = fork_network_members.find_by(project_id: fork_of_fork.id,
- fork_network_id: fork_network1.id)
- fork_of_fork2_membership = fork_network_members.find_by(project_id: fork_of_fork2.id,
- fork_network_id: fork_network1.id)
- second_level_fork_membership = fork_network_members.find_by(project_id: second_level_fork.id,
- fork_network_id: fork_network1.id)
- third_level_fork_membership = fork_network_members.find_by(project_id: third_level_fork.id,
- fork_network_id: fork_network1.id)
-
- expect(fork_of_fork_membership.forked_from_project_id).to eq(base1_fork1.id)
- expect(fork_of_fork2_membership.forked_from_project_id).to eq(base1_fork1.id)
- expect(second_level_fork_membership.forked_from_project_id).to eq(fork_of_fork.id)
- expect(third_level_fork_membership.forked_from_project_id).to eq(second_level_fork.id)
- end
-
- it 'reschedules itself when there are missing members' do
- allow(migration).to receive(:missing_members?).and_return(true)
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [1, 3])
-
- migration.perform(1, 3)
- end
-
- it 'can be repeated without effect' do
- expect { fork_network_members.count }.not_to change { migration.perform(1, 7) }
- end
-
- it 'knows it is finished for this range' do
- expect(migration.missing_members?(1, 8)).to be_falsy
- end
-
- it 'does not miss members for forks of forks for which the root was deleted' do
- forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: projects.create.id)
- base1.destroy
-
- expect(migration.missing_members?(7, 10)).to be_falsy
- end
-
- context 'with more forks' do
- before do
- forked_project_links.create(id: 9, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 10, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
- end
-
- it 'only processes a single batch of links at a time' do
- expect(fork_network_members.count).to eq(10)
-
- migration.perform(8, 10)
-
- expect(fork_network_members.count).to eq(12)
- end
-
- it 'knows when not all memberships within a batch have been created' do
- expect(migration.missing_members?(8, 10)).to be_truthy
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb b/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
deleted file mode 100644
index 9bae7e53b71..00000000000
--- a/spec/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange, :migration, schema: 20170907170235 do
- let!(:redirect_routes) { table(:redirect_routes) }
- let!(:routes) { table(:routes) }
-
- before do
- routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
- routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
- routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
- routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
- routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
-
- # Valid redirects
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
- redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
-
- # Conflicting redirects
- redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
- redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
- end
-
- # No-op. See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
- it 'NO-OP: does not delete any redirect_routes' do
- expect(redirect_routes.count).to eq(8)
-
- described_class.new.perform(1, 5)
-
- expect(redirect_routes.count).to eq(8)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb b/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
deleted file mode 100644
index 188969951a6..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_events_to_push_event_payloads_spec.rb
+++ /dev/null
@@ -1,433 +0,0 @@
-require 'spec_helper'
-
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
-describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event, :migration, schema: 20170608152748 do
- describe '#commit_title' do
- it 'returns nil when there are no commits' do
- expect(described_class.new.commit_title).to be_nil
- end
-
- it 'returns nil when there are commits without commit messages' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ id: '123' }])
-
- expect(event.commit_title).to be_nil
- end
-
- it 'returns the commit message when it is less than 70 characters long' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: 'Hello world' }])
-
- expect(event.commit_title).to eq('Hello world')
- end
-
- it 'returns the first line of a commit message if multiple lines are present' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: "Hello\n\nworld" }])
-
- expect(event.commit_title).to eq('Hello')
- end
-
- it 'truncates the commit to 70 characters when it is too long' do
- event = described_class.new
-
- allow(event).to receive(:commits).and_return([{ message: 'a' * 100 }])
-
- expect(event.commit_title).to eq(('a' * 67) + '...')
- end
- end
-
- describe '#commit_from_sha' do
- it 'returns nil when pushing to a new ref' do
- event = described_class.new
-
- allow(event).to receive(:create?).and_return(true)
-
- expect(event.commit_from_sha).to be_nil
- end
-
- it 'returns the ID of the first commit when pushing to an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:data).and_return(before: '123')
-
- expect(event.commit_from_sha).to eq('123')
- end
- end
-
- describe '#commit_to_sha' do
- it 'returns nil when removing an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:remove?).and_return(true)
-
- expect(event.commit_to_sha).to be_nil
- end
-
- it 'returns the ID of the last commit when pushing to an existing ref' do
- event = described_class.new
-
- allow(event).to receive(:remove?).and_return(false)
- allow(event).to receive(:data).and_return(after: '123')
-
- expect(event.commit_to_sha).to eq('123')
- end
- end
-
- describe '#data' do
- it 'returns the deserialized data' do
- event = described_class.new(data: { before: '123' })
-
- expect(event.data).to eq(before: '123')
- end
-
- it 'returns an empty hash when no data is present' do
- event = described_class.new
-
- expect(event.data).to eq({})
- end
- end
-
- describe '#commits' do
- it 'returns an Array of commits' do
- event = described_class.new(data: { commits: [{ id: '123' }] })
-
- expect(event.commits).to eq([{ id: '123' }])
- end
-
- it 'returns an empty array when no data is present' do
- event = described_class.new
-
- expect(event.commits).to eq([])
- end
- end
-
- describe '#commit_count' do
- it 'returns the number of commits' do
- event = described_class.new(data: { total_commits_count: 2 })
-
- expect(event.commit_count).to eq(2)
- end
-
- it 'returns 0 when no data is present' do
- event = described_class.new
-
- expect(event.commit_count).to eq(0)
- end
- end
-
- describe '#ref' do
- it 'returns the name of the ref' do
- event = described_class.new(data: { ref: 'refs/heads/master' })
-
- expect(event.ref).to eq('refs/heads/master')
- end
- end
-
- describe '#trimmed_ref_name' do
- it 'returns the trimmed ref name for a branch' do
- event = described_class.new(data: { ref: 'refs/heads/master' })
-
- expect(event.trimmed_ref_name).to eq('master')
- end
-
- it 'returns the trimmed ref name for a tag' do
- event = described_class.new(data: { ref: 'refs/tags/v1.2' })
-
- expect(event.trimmed_ref_name).to eq('v1.2')
- end
- end
-
- describe '#create?' do
- it 'returns true when creating a new ref' do
- event = described_class.new(data: { before: described_class::BLANK_REF })
-
- expect(event.create?).to eq(true)
- end
-
- it 'returns false when pushing to an existing ref' do
- event = described_class.new(data: { before: '123' })
-
- expect(event.create?).to eq(false)
- end
- end
-
- describe '#remove?' do
- it 'returns true when removing an existing ref' do
- event = described_class.new(data: { after: described_class::BLANK_REF })
-
- expect(event.remove?).to eq(true)
- end
-
- it 'returns false when pushing to an existing ref' do
- event = described_class.new(data: { after: '123' })
-
- expect(event.remove?).to eq(false)
- end
- end
-
- describe '#push_action' do
- let(:event) { described_class.new }
-
- it 'returns :created when creating a new ref' do
- allow(event).to receive(:create?).and_return(true)
-
- expect(event.push_action).to eq(:created)
- end
-
- it 'returns :removed when removing an existing ref' do
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:remove?).and_return(true)
-
- expect(event.push_action).to eq(:removed)
- end
-
- it 'returns :pushed when pushing to an existing ref' do
- allow(event).to receive(:create?).and_return(false)
- allow(event).to receive(:remove?).and_return(false)
-
- expect(event.push_action).to eq(:pushed)
- end
- end
-
- describe '#ref_type' do
- let(:event) { described_class.new }
-
- it 'returns :tag for a tag' do
- allow(event).to receive(:ref).and_return('refs/tags/1.2')
-
- expect(event.ref_type).to eq(:tag)
- end
-
- it 'returns :branch for a branch' do
- allow(event).to receive(:ref).and_return('refs/heads/1.2')
-
- expect(event.ref_type).to eq(:branch)
- end
- end
-end
-
-##
-# The background migration relies on a temporary table, hence we're migrating
-# to a specific version of the database where said table is still present.
-#
-describe Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads, :migration, schema: 20170825154015 do
- let(:user_class) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'users'
- end
- end
-
- let(:migration) { described_class.new }
- let(:user_class) { table(:users) }
- let(:author) { build(:user).becomes(user_class).tap(&:save!).becomes(User) }
- let(:namespace) { create(:namespace, owner: author) }
- let(:projects) { table(:projects) }
- let(:project) { projects.create(namespace_id: namespace.id, creator_id: author.id) }
-
- # We can not rely on FactoryBot as the state of Event may change in ways that
- # the background migration does not expect, hence we use the Event class of
- # the migration itself.
- def create_push_event(project, author, data = nil)
- klass = Gitlab::BackgroundMigration::MigrateEventsToPushEventPayloads::Event
-
- klass.create!(
- action: klass::PUSHED,
- project_id: project.id,
- author_id: author.id,
- data: data
- )
- end
-
- describe '#perform' do
- it 'returns if data should not be migrated' do
- allow(migration).to receive(:migrate?).and_return(false)
-
- expect(migration).not_to receive(:find_events)
-
- migration.perform(1, 10)
- end
-
- it 'migrates the range of events if data is to be migrated' do
- event1 = create_push_event(project, author, { commits: [] })
- event2 = create_push_event(project, author, { commits: [] })
-
- allow(migration).to receive(:migrate?).and_return(true)
-
- expect(migration).to receive(:process_event).twice
-
- migration.perform(event1.id, event2.id)
- end
- end
-
- describe '#process_event' do
- it 'processes a regular event' do
- event = double(:event, push_event?: false)
-
- expect(migration).to receive(:replicate_event)
- expect(migration).not_to receive(:create_push_event_payload)
-
- migration.process_event(event)
- end
-
- it 'processes a push event' do
- event = double(:event, push_event?: true)
-
- expect(migration).to receive(:replicate_event)
- expect(migration).to receive(:create_push_event_payload)
-
- migration.process_event(event)
- end
-
- it 'handles an error gracefully' do
- event1 = create_push_event(project, author, { commits: [] })
-
- expect(migration).to receive(:replicate_event).and_call_original
- expect(migration).to receive(:create_push_event_payload).and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key')
-
- migration.process_event(event1)
-
- expect(described_class::EventForMigration.all.count).to eq(0)
- end
- end
-
- describe '#replicate_event' do
- it 'replicates the event to the "events_for_migration" table' do
- event = create_push_event(
- project,
- author,
- data: { commits: [] },
- title: 'bla'
- )
-
- attributes = event
- .attributes.with_indifferent_access.except(:title, :data)
-
- expect(described_class::EventForMigration)
- .to receive(:create!)
- .with(attributes)
-
- migration.replicate_event(event)
- end
- end
-
- describe '#create_push_event_payload' do
- let(:push_data) do
- {
- commits: [],
- ref: 'refs/heads/master',
- before: '156e0e9adc587a383a7eeb5b21ddecb9044768a8',
- after: '0' * 40,
- total_commits_count: 1
- }
- end
-
- let(:event) do
- create_push_event(project, author, push_data)
- end
-
- before do
- # The foreign key in push_event_payloads at this point points to the
- # "events_for_migration" table so we need to make sure a row exists in
- # said table.
- migration.replicate_event(event)
- end
-
- it 'creates a push event payload for an event' do
- payload = migration.create_push_event_payload(event)
-
- expect(PushEventPayload.count).to eq(1)
- expect(payload.valid?).to eq(true)
- end
-
- it 'does not create push event payloads for removed events' do
- allow(event).to receive(:id).and_return(-1)
-
- expect { migration.create_push_event_payload(event) }.to raise_error(ActiveRecord::InvalidForeignKey)
-
- expect(PushEventPayload.count).to eq(0)
- end
-
- it 'encodes and decodes the commit IDs from and to binary data' do
- payload = migration.create_push_event_payload(event)
- packed = migration.pack(push_data[:before])
-
- expect(payload.commit_from).to eq(packed)
- expect(payload.commit_to).to be_nil
- end
- end
-
- describe '#find_events' do
- it 'returns the events for the given ID range' do
- event1 = create_push_event(project, author, { commits: [] })
- event2 = create_push_event(project, author, { commits: [] })
- event3 = create_push_event(project, author, { commits: [] })
- events = migration.find_events(event1.id, event2.id)
-
- expect(events.length).to eq(2)
- expect(events.pluck(:id)).not_to include(event3.id)
- end
- end
-
- describe '#migrate?' do
- it 'returns true when data should be migrated' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::EventForMigration)
- .to receive(:table_exists?).and_return(true)
-
- expect(migration.migrate?).to eq(true)
- end
-
- it 'returns false if the "events" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
-
- it 'returns false if the "push_event_payloads" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
-
- it 'returns false when the "events_for_migration" table does not exist' do
- allow(described_class::Event)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::PushEventPayload)
- .to receive(:table_exists?).and_return(true)
-
- allow(described_class::EventForMigration)
- .to receive(:table_exists?).and_return(false)
-
- expect(migration.migrate?).to eq(false)
- end
- end
-
- describe '#pack' do
- it 'packs a SHA1 into a 20 byte binary string' do
- packed = migration.pack('156e0e9adc587a383a7eeb5b21ddecb9044768a8')
-
- expect(packed.bytesize).to eq(20)
- end
-
- it 'returns nil if the input value is nil' do
- expect(migration.pack(nil)).to be_nil
- end
- end
-end
-# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb b/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb
deleted file mode 100644
index 89b56906ed0..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_stage_status_spec.rb
+++ /dev/null
@@ -1,92 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:stages) { table(:ci_stages) }
- let(:jobs) { table(:ci_builds) }
-
- let(:statuses) do
- {
- created: 0,
- pending: 1,
- running: 2,
- success: 3,
- failed: 4,
- canceled: 5,
- skipped: 6,
- manual: 7
- }
- end
-
- before do
- projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
- pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
- stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
- stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil)
- end
-
- context 'when stage status is known' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:running]
- expect(stages.second.status).to eq statuses[:failed]
- end
- end
-
- context 'when stage status is not known' do
- it 'sets a skipped stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:skipped]
- expect(stages.second.status).to eq statuses[:skipped]
- end
- end
-
- context 'when stage status includes status of a retried job' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true)
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:canceled]
- expect(stages.second.status).to eq statuses[:success]
- end
- end
-
- context 'when some job in the stage is blocked / manual' do
- before do
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed')
- create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual')
- create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual')
- end
-
- it 'sets a correct stage status' do
- described_class.new.perform(1, 2)
-
- expect(stages.first.status).to eq statuses[:manual]
- expect(stages.second.status).to eq statuses[:success]
- end
- end
-
- def create_job(project:, pipeline:, stage:, status:, **opts)
- stages = { test: 1, build: 2, deploy: 3 }
-
- jobs.create!(project_id: project, commit_id: pipeline,
- stage_idx: stages[stage.to_sym], stage: stage,
- status: status, **opts)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb b/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
deleted file mode 100644
index dfbf1bb681a..00000000000
--- a/spec/lib/gitlab/background_migration/normalize_ldap_extern_uids_range_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::NormalizeLdapExternUidsRange, :migration, schema: 20170921101004 do
- let!(:identities) { table(:identities) }
-
- before do
- # LDAP identities
- (1..4).each do |i|
- identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
- end
-
- # Non-LDAP identity
- identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
-
- # Another LDAP identity
- identities.create!(id: 6, provider: 'ldapmain', extern_uid: " uid = foo 6, ou = People, dc = example, dc = com ", user_id: 6)
- end
-
- it 'normalizes the LDAP identities in the range' do
- described_class.new.perform(1, 3)
- expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
- expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
- expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
- expect(identities.find(4).extern_uid).to eq(" uid = foo 4, ou = People, dc = example, dc = com ")
- expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
- expect(identities.find(6).extern_uid).to eq(" uid = foo 6, ou = People, dc = example, dc = com ")
-
- described_class.new.perform(4, 6)
- expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
- expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
- expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
- expect(identities.find(4).extern_uid).to eq("uid=foo 4,ou=people,dc=example,dc=com")
- expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
- expect(identities.find(6).extern_uid).to eq("uid=foo 6,ou=people,dc=example,dc=com")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb b/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
deleted file mode 100644
index 0e73c8c59c9..00000000000
--- a/spec/lib/gitlab/background_migration/populate_fork_networks_range_spec.rb
+++ /dev/null
@@ -1,97 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
- let(:migration) { described_class.new }
- let(:projects) { table(:projects) }
- let(:base1) { projects.create }
-
- let(:base2) { projects.create }
- let(:base2_fork1) { projects.create }
-
- let!(:forked_project_links) { table(:forked_project_links) }
- let!(:fork_networks) { table(:fork_networks) }
- let!(:fork_network_members) { table(:fork_network_members) }
-
- let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
- let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
-
- before do
- # A normal fork link
- forked_project_links.create(id: 1,
- forked_from_project_id: base1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 2,
- forked_from_project_id: base1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 3,
- forked_from_project_id: base2.id,
- forked_to_project_id: base2_fork1.id)
-
- # create a fork of a fork
- forked_project_links.create(id: 4,
- forked_from_project_id: base2_fork1.id,
- forked_to_project_id: projects.create.id)
- forked_project_links.create(id: 5,
- forked_from_project_id: projects.create.id,
- forked_to_project_id: projects.create.id)
-
- # Stub out the calls to the other migrations
- allow(BackgroundMigrationWorker).to receive(:perform_in)
-
- migration.perform(1, 3)
- end
-
- it 'creates the fork network' do
- expect(fork_network1).not_to be_nil
- expect(fork_network2).not_to be_nil
- end
-
- it 'does not create a fork network for a fork-of-fork' do
- # perfrom the entire batch
- migration.perform(1, 5)
-
- expect(fork_networks.find_by(root_project_id: base2_fork1.id)).to be_nil
- end
-
- it 'creates memberships for the root of fork networks' do
- base1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
- project_id: base1.id)
- base2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
- project_id: base2.id)
-
- expect(base1_membership).not_to be_nil
- expect(base2_membership).not_to be_nil
- end
-
- it 'creates a fork network for the fork of which the source was deleted' do
- fork = projects.create
- forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
-
- migration.perform(5, 8)
-
- expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
- expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
- expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
- end
-
- it 'schedules a job for inserting memberships for forks-of-forks' do
- delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in).with(delay, "CreateForkNetworkMembershipsRange", [1, 3])
-
- migration.perform(1, 3)
- end
-
- it 'only processes a single batch of links at a time' do
- expect(fork_networks.count).to eq(2)
-
- migration.perform(3, 5)
-
- expect(fork_networks.count).to eq(3)
- end
-
- it 'can be repeated without effect' do
- expect { migration.perform(1, 3) }.not_to change { fork_network_members.count }
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
deleted file mode 100644
index 0cb753c5853..00000000000
--- a/spec/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id_spec.rb
+++ /dev/null
@@ -1,62 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
- let(:projects_table) { table(:projects) }
- let(:merge_requests_table) { table(:merge_requests) }
- let(:merge_request_diffs_table) { table(:merge_request_diffs) }
-
- let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
-
- def create_mr!(name, diffs: 0)
- merge_request =
- merge_requests_table.create!(target_project_id: project.id,
- target_branch: 'master',
- source_project_id: project.id,
- source_branch: name,
- title: name)
-
- diffs.times do
- merge_request_diffs_table.create!(merge_request_id: merge_request.id)
- end
-
- merge_request
- end
-
- def diffs_for(merge_request)
- merge_request_diffs_table.where(merge_request_id: merge_request.id)
- end
-
- describe '#perform' do
- it 'ignores MRs without diffs' do
- merge_request_without_diff = create_mr!('without_diff')
- mr_id = merge_request_without_diff.id
-
- expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
-
- expect { subject.perform(mr_id, mr_id) }
- .not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
- end
-
- it 'ignores MRs that have a diff ID already set' do
- merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
- diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
- mr_id = merge_request_with_multiple_diffs.id
-
- merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
-
- expect { subject.perform(mr_id, mr_id) }
- .not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
- end
-
- it 'migrates multiple MR diffs to the correct values' do
- merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
-
- subject.perform(merge_requests.first.id, merge_requests.last.id)
-
- merge_requests.each do |merge_request|
- expect(merge_request.reload.latest_merge_request_diff_id)
- .to eq(diffs_for(merge_request).maximum(:id))
- end
- end
- end
-end