summaryrefslogtreecommitdiff
path: root/spec/workers
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-03-18 20:02:30 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-03-18 20:02:30 +0000
commit41fe97390ceddf945f3d967b8fdb3de4c66b7dea (patch)
tree9c8d89a8624828992f06d892cd2f43818ff5dcc8 /spec/workers
parent0804d2dc31052fb45a1efecedc8e06ce9bc32862 (diff)
downloadgitlab-ce-41fe97390ceddf945f3d967b8fdb3de4c66b7dea.tar.gz
Add latest changes from gitlab-org/gitlab@14-9-stable-eev14.9.0-rc42
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb25
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb28
-rw-r--r--spec/workers/database/batched_background_migration/ci_database_worker_spec.rb7
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb118
-rw-r--r--spec/workers/deployments/hooks_worker_spec.rb4
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb49
-rw-r--r--spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb96
-rw-r--r--spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb17
-rw-r--r--spec/workers/quality/test_data_cleanup_worker_spec.rb44
-rw-r--r--spec/workers/web_hook_worker_spec.rb9
11 files changed, 261 insertions, 137 deletions
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index f838bff528c..4f452e3dd60 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -28,6 +28,31 @@ RSpec.describe BulkImports::ExportRequestWorker do
perform_multiple(job_args)
end
+
+ context 'when network error is raised' do
+ it 'logs export failure and marks entity as failed' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:post).and_raise(BulkImports::NetworkError, 'Export error').twice
+ end
+
+ expect(Gitlab::Import::Logger).to receive(:warn).with(
+ bulk_import_entity_id: entity.id,
+ pipeline_class: 'ExportRequestWorker',
+ exception_class: 'BulkImports::NetworkError',
+ exception_message: 'Export error',
+ correlation_id_value: anything,
+ bulk_import_id: bulk_import.id,
+ bulk_import_entity_type: entity.source_type
+ ).twice
+
+ perform_multiple(job_args)
+
+ failure = entity.failures.last
+
+ expect(failure.pipeline_class).to eq('ExportRequestWorker')
+ expect(failure.exception_message).to eq('Export error')
+ end
+ end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 2da9195a6ef..cb7e70a6749 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -136,6 +136,34 @@ RSpec.describe BulkImports::PipelineWorker do
expect(pipeline_tracker.jid).to eq('jid')
end
+ context 'when entity is failed' do
+ it 'marks tracker as failed and logs the error' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Pipeline',
+ status_event: 'enqueue'
+ )
+
+ entity.update!(status: -1)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'Pipeline',
+ entity_id: entity.id,
+ message: 'Failed entity status'
+ )
+ end
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ expect(pipeline_tracker.reload.status_name).to eq(:failed)
+ end
+ end
+
context 'when it is a network error' do
it 'reenqueue on retriable network errors' do
pipeline_tracker = create(
diff --git a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
new file mode 100644
index 00000000000..2663c650986
--- /dev/null
+++ b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state do
+ it_behaves_like 'it runs batched background migration jobs', 'ci'
+end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
index b13d1f5c7aa..a6c7db60abe 100644
--- a/spec/workers/database/batched_background_migration_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -2,120 +2,6 @@
require 'spec_helper'
-RSpec.describe Database::BatchedBackgroundMigrationWorker, '#perform', :clean_gitlab_redis_shared_state do
- include ExclusiveLeaseHelpers
-
- let(:worker) { described_class.new }
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: false)
- end
-
- it 'does nothing' do
- expect(worker).not_to receive(:active_migration)
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(execute_batched_migrations_on_schedule: true)
-
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
- end
-
- context 'when no active migrations exist' do
- it 'does nothing' do
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when active migrations exist' do
- let(:job_interval) { 5.minutes }
- let(:lease_timeout) { 15.minutes }
- let(:lease_key) { 'batched_background_migration_worker' }
- let(:migration) { build(:batched_background_migration, :active, interval: job_interval) }
- let(:interval_variance) { described_class::INTERVAL_VARIANCE }
-
- before do
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
- .and_return(migration)
-
- allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
- allow(migration).to receive(:reload)
- end
-
- context 'when the reloaded migration is no longer active' do
- it 'does not run the migration' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect(migration).to receive(:reload)
- expect(migration).to receive(:active?).and_return(false)
-
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when the interval has not elapsed' do
- it 'does not run the migration' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
-
- expect(worker).not_to receive(:run_active_migration)
-
- worker.perform
- end
- end
-
- context 'when the reloaded migration is still active and the interval has elapsed' do
- it 'runs the migration' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
- expect(instance).to receive(:run_migration_job).with(migration)
- end
-
- expect(worker).to receive(:run_active_migration).and_call_original
-
- worker.perform
- end
- end
-
- context 'when the calculated timeout is less than the minimum allowed' do
- let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT }
- let(:job_interval) { 2.minutes }
-
- it 'sets the lease timeout to the minimum value' do
- expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
- expect(instance).to receive(:run_migration_job).with(migration)
- end
-
- expect(worker).to receive(:run_active_migration).and_call_original
-
- worker.perform
- end
- end
-
- it 'always cleans up the exclusive lease' do
- lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
-
- expect(lease).to receive(:try_obtain).and_return(true)
-
- expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
- expect(lease).to receive(:cancel)
-
- expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
- end
- end
- end
+RSpec.describe Database::BatchedBackgroundMigrationWorker do
+ it_behaves_like 'it runs batched background migration jobs', :main
end
diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb
index 50ead66cfbf..29b3e8d3ee4 100644
--- a/spec/workers/deployments/hooks_worker_spec.rb
+++ b/spec/workers/deployments/hooks_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Deployments::HooksWorker do
it 'executes project services for deployment_hooks' do
deployment = create(:deployment, :running)
project = deployment.project
- service = create(:integration, type: 'SlackService', project: project, deployment_events: true, active: true)
+ service = create(:integrations_slack, project: project, deployment_events: true)
expect(ProjectServiceWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
@@ -23,7 +23,7 @@ RSpec.describe Deployments::HooksWorker do
it 'does not execute an inactive service' do
deployment = create(:deployment, :running)
project = deployment.project
- create(:integration, type: 'SlackService', project: project, deployment_events: true, active: false)
+ create(:integrations_slack, project: project, deployment_events: true, active: false)
expect(ProjectServiceWorker).not_to receive(:perform_async)
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 1cd5d23d8fc..47205943f70 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -395,6 +395,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Projects::PostCreationWorker' => 3,
'Projects::ScheduleBulkRepositoryShardMovesWorker' => 3,
'Projects::UpdateRepositoryStorageWorker' => 3,
+ 'Projects::RefreshBuildArtifactsSizeStatisticsWorker' => 0,
'Prometheus::CreateDefaultAlertsWorker' => 3,
'PropagateIntegrationGroupWorker' => 3,
'PropagateIntegrationInheritDescendantWorker' => 3,
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index 6f4389a7541..1814abfac1d 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -102,8 +102,22 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
loose_fk_parent_table_2.delete_all
end
+ def perform_for(db:)
+ time = Time.current.midnight
+
+ if db == :main
+ time += 2.minutes
+ elsif db == :ci
+ time += 3.minutes
+ end
+
+ travel_to(time) do
+ described_class.new.perform
+ end
+ end
+
it 'cleans up all rows' do
- described_class.new.perform
+ perform_for(db: :main)
expect(loose_fk_child_table_1_1.count).to eq(0)
expect(loose_fk_child_table_1_2.where(parent_id_with_different_column: nil).count).to eq(4)
@@ -118,7 +132,7 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
it 'cleans up all rows' do
expect(LooseForeignKeys::BatchCleanerService).to receive(:new).exactly(:twice).and_call_original
- described_class.new.perform
+ perform_for(db: :main)
expect(loose_fk_child_table_1_1.count).to eq(0)
expect(loose_fk_child_table_1_2.where(parent_id_with_different_column: nil).count).to eq(4)
@@ -137,25 +151,40 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
end
it 'cleans up 2 rows' do
- expect { described_class.new.perform }.to change { count_deletable_rows }.by(-2)
+ expect { perform_for(db: :main) }.to change { count_deletable_rows }.by(-2)
end
end
describe 'multi-database support' do
- where(:current_minute, :configured_base_models, :expected_connection) do
- 2 | { main: ApplicationRecord, ci: Ci::ApplicationRecord } | ApplicationRecord.connection
- 3 | { main: ApplicationRecord, ci: Ci::ApplicationRecord } | Ci::ApplicationRecord.connection
- 2 | { main: ApplicationRecord } | ApplicationRecord.connection
- 3 | { main: ApplicationRecord } | ApplicationRecord.connection
+ where(:current_minute, :configured_base_models, :expected_connection_model) do
+ 2 | { main: 'ApplicationRecord', ci: 'Ci::ApplicationRecord' } | 'ApplicationRecord'
+ 3 | { main: 'ApplicationRecord', ci: 'Ci::ApplicationRecord' } | 'Ci::ApplicationRecord'
+ 2 | { main: 'ApplicationRecord' } | 'ApplicationRecord'
+ 3 | { main: 'ApplicationRecord' } | 'ApplicationRecord'
end
with_them do
+ let(:database_base_models) { configured_base_models.transform_values(&:constantize) }
+
+ let(:expected_connection) { expected_connection_model.constantize.connection }
+
before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(configured_base_models)
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(database_base_models)
+
+ if database_base_models.has_key?(:ci)
+ Gitlab::Database::SharedModel.using_connection(database_base_models[:ci].connection) do
+ LooseForeignKeys::DeletedRecord.create!(fully_qualified_table_name: 'public._test_loose_fk_parent_table_1', primary_key_value: 999)
+ LooseForeignKeys::DeletedRecord.create!(fully_qualified_table_name: 'public._test_loose_fk_parent_table_1', primary_key_value: 9991)
+ end
+ end
end
it 'uses the correct connection' do
- LooseForeignKeys::DeletedRecord.count.times do
+ record_count = Gitlab::Database::SharedModel.using_connection(expected_connection) do
+ LooseForeignKeys::DeletedRecord.count
+ end
+
+ record_count.times do
expect_next_found_instance_of(LooseForeignKeys::DeletedRecord) do |instance|
expect(instance.class.connection).to eq(expected_connection)
end
diff --git a/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
new file mode 100644
index 00000000000..4a6a525a5a7
--- /dev/null
+++ b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform_work' do
+ before do
+ expect_next_instance_of(Projects::RefreshBuildArtifactsSizeStatisticsService) do |instance|
+ expect(instance).to receive(:execute).and_return(refresh)
+ end
+ end
+
+ context 'when refresh job is present' do
+ let(:refresh) do
+ build(
+ :project_build_artifacts_size_refresh,
+ :running,
+ project_id: 77,
+ last_job_artifact_id: 123
+ )
+ end
+
+ it 'logs refresh information' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, refresh.project_id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:last_job_artifact_id, refresh.last_job_artifact_id)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:last_batch, refresh.destroyed?)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:refresh_started_at, refresh.refresh_started_at)
+
+ worker.perform_work
+ end
+ end
+
+ context 'when refresh job is not present' do
+ let(:refresh) { nil }
+
+ it 'logs refresh information' do
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ worker.perform_work
+ end
+ end
+ end
+
+ describe '#remaining_work_count' do
+ subject { worker.remaining_work_count }
+
+ context 'and there are remaining refresh jobs' do
+ before do
+ create_list(:project_build_artifacts_size_refresh, 2, :pending)
+ end
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'and there are no remaining refresh jobs' do
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#max_running_jobs' do
+ subject { worker.max_running_jobs }
+
+ context 'when all projects_build_artifacts_size_refresh flags are enabled' do
+ it { is_expected.to eq(described_class::MAX_RUNNING_HIGH) }
+ end
+
+ context 'when projects_build_artifacts_size_refresh_high flags is disabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_MEDIUM) }
+ end
+
+ context 'when projects_build_artifacts_size_refresh_high and projects_build_artifacts_size_refresh_medium flags are disabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh_medium: false)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_LOW) }
+ end
+
+ context 'when all projects_build_artifacts_size_refresh flags are disabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_low: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh_medium: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: false)
+ end
+
+ it { is_expected.to eq(0) }
+ end
+ end
+end
diff --git a/spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb b/spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb
new file mode 100644
index 00000000000..b5775f37678
--- /dev/null
+++ b/spec/workers/projects/schedule_refresh_build_artifacts_size_statistics_worker_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ScheduleRefreshBuildArtifactsSizeStatisticsWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ include_examples 'an idempotent worker' do
+ it 'schedules Projects::RefreshBuildArtifactsSizeStatisticsWorker to be performed with capacity' do
+ expect(Projects::RefreshBuildArtifactsSizeStatisticsWorker).to receive(:perform_with_capacity).twice
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/quality/test_data_cleanup_worker_spec.rb b/spec/workers/quality/test_data_cleanup_worker_spec.rb
new file mode 100644
index 00000000000..a17e6e0cb1a
--- /dev/null
+++ b/spec/workers/quality/test_data_cleanup_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Quality::TestDataCleanupWorker do
+ subject { described_class.new }
+
+ shared_examples 'successful deletion' do
+ before do
+ allow(Gitlab).to receive(:staging?).and_return(true)
+ end
+
+ it 'removes test groups' do
+ expect { subject.perform }.to change(Group, :count).by(-test_group_count)
+ end
+ end
+
+ describe "#perform" do
+ context 'with multiple test groups to remove' do
+ let(:test_group_count) { 5 }
+ let!(:groups_to_remove) { create_list(:group, test_group_count, :test_group) }
+ let!(:group_to_keep) { create(:group, path: 'test-group-fulfillment-keep', created_at: 1.day.ago) }
+ let!(:non_test_group) { create(:group) }
+ let(:non_test_owner_group) { create(:group, path: 'test-group-fulfillment1234', created_at: 4.days.ago) }
+
+ before do
+ non_test_owner_group.add_owner(create(:user))
+ end
+
+ it_behaves_like 'successful deletion'
+ end
+
+ context 'with paid groups' do
+ let(:test_group_count) { 1 }
+ let!(:paid_group) { create(:group, :test_group) }
+
+ before do
+ allow(paid_group).to receive(:paid?).and_return(true)
+ end
+
+ it_behaves_like 'successful deletion'
+ end
+ end
+end
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
index dbdf7a2b978..e2ff36975c4 100644
--- a/spec/workers/web_hook_worker_spec.rb
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -28,15 +28,6 @@ RSpec.describe WebHookWorker do
.to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(uuid)
end
- it 'retrieves recursion detection data, reinstates it, and cleans it from payload when passed through as data', :request_store, :aggregate_failures do
- uuid = SecureRandom.uuid
- full_data = data.merge({ _gitlab_recursion_detection_request_uuid: uuid })
-
- expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name, anything).to receive(:execute)
- expect { subject.perform(project_hook.id, full_data, hook_name) }
- .to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(uuid)
- end
-
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed