summaryrefslogtreecommitdiff
path: root/spec/workers
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb63
-rw-r--r--spec/workers/background_migration_worker_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb38
-rw-r--r--spec/workers/build_hooks_worker_spec.rb5
-rw-r--r--spec/workers/bulk_import_worker_spec.rb22
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb124
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb125
-rw-r--r--spec/workers/ci/drop_pipeline_worker_spec.rb36
-rw-r--r--spec/workers/ci/initial_pipeline_process_worker_spec.rb21
-rw-r--r--spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb53
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/ci/test_failure_history_worker_spec.rb4
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb74
-rw-r--r--spec/workers/concerns/worker_context_spec.rb2
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb123
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb121
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb2
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb21
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb17
-rw-r--r--spec/workers/merge_requests/assignees_change_worker_spec.rb59
-rw-r--r--spec/workers/merge_requests/create_pipeline_worker_spec.rb61
-rw-r--r--spec/workers/merge_requests/handle_assignees_change_worker_spec.rb62
-rw-r--r--spec/workers/merge_requests/resolve_todos_worker_spec.rb41
-rw-r--r--spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb56
-rw-r--r--spec/workers/new_issue_worker_spec.rb11
-rw-r--r--spec/workers/new_merge_request_worker_spec.rb2
-rw-r--r--spec/workers/packages/go/sync_packages_worker_spec.rb101
-rw-r--r--spec/workers/packages/maven/metadata/sync_worker_spec.rb6
-rw-r--r--spec/workers/packages/rubygems/extraction_worker_spec.rb54
-rw-r--r--spec/workers/pages_update_configuration_worker_spec.rb2
-rw-r--r--spec/workers/post_receive_spec.rb5
-rw-r--r--spec/workers/projects/post_creation_worker_spec.rb86
-rw-r--r--spec/workers/remove_expired_members_worker_spec.rb18
-rw-r--r--spec/workers/repository_check/dispatch_worker_spec.rb7
-rw-r--r--spec/workers/ssh_keys/expired_notification_worker_spec.rb58
-rw-r--r--spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb66
-rw-r--r--spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb15
38 files changed, 1419 insertions, 148 deletions
diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
index a27c431523e..0501fc3b8cf 100644
--- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
@@ -3,16 +3,67 @@
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker do
- let(:start_user_id) { 42 }
- let(:end_user_id) { 4242 }
+ let(:project) { create(:project) }
+ let(:user) { project.namespace.owner }
+ let(:start_user_id) { user.id }
+ let(:end_user_id) { start_user_id }
+ let(:execute_worker) { subject.perform(start_user_id, end_user_id) }
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :periodic_project_authorization_update_via_replica,
+ data_consistency: :delayed
describe '#perform' do
- it 'calls AuthorizedProjectUpdate::RecalculateForUserRangeService' do
- expect_next_instance_of(AuthorizedProjectUpdate::RecalculateForUserRangeService) do |service|
- expect(service).to receive(:execute)
+ context 'when the feature flag `periodic_project_authorization_update_via_replica` is enabled' do
+ before do
+ stub_feature_flags(periodic_project_authorization_update_via_replica: true)
+ end
+
+ context 'checks if project authorization update is required' do
+ it 'checks if a project_authorization refresh is needed for each of the users' do
+ User.where(id: start_user_id..end_user_id).each do |user|
+ expect(AuthorizedProjectUpdate::FindRecordsDueForRefreshService).to(
+ receive(:new).with(user).and_call_original)
+ end
+
+ execute_worker
+ end
+ end
+
+ context 'when there are project authorization records due for either removal or addition for a specific user' do
+ before do
+ user.project_authorizations.delete_all
+ end
+
+ it 'enqueues a new project authorization update job for the user' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to receive(:perform_async).with(user.id)
+
+ execute_worker
+ end
end
- subject.perform(start_user_id, end_user_id)
+ context 'when there are no additions or removals to be made to project authorizations for a specific user' do
+ it 'does not enqueue a new project authorization update job for the user' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to receive(:perform_async)
+
+ execute_worker
+ end
+ end
+ end
+
+ context 'when the feature flag `periodic_project_authorization_update_via_replica` is disabled' do
+ before do
+ stub_feature_flags(periodic_project_authorization_update_via_replica: false)
+ end
+
+ it 'calls AuthorizedProjectUpdate::RecalculateForUserRangeService' do
+ expect_next_instance_of(AuthorizedProjectUpdate::RecalculateForUserRangeService, start_user_id, end_user_id) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ execute_worker
+ end
end
end
end
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 8094efcaf04..4575c270042 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do
it 'sets the class that will be executed as the caller_id' do
expect(Gitlab::BackgroundMigration).to receive(:perform) do
- expect(Labkit::Context.current.to_h).to include('meta.caller_id' => 'Foo')
+ expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo')
end
worker.perform('Foo', [10, 20])
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 6d040f83dc7..5aca5d68677 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -6,10 +6,8 @@ RSpec.describe BuildFinishedWorker do
subject { described_class.new.perform(build.id) }
describe '#perform' do
- let(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
-
context 'when build exists' do
- let!(:build) { create(:ci_build) }
+ let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
before do
expect(Ci::Build).to receive(:find_by).with(id: build.id).and_return(build)
@@ -30,6 +28,30 @@ RSpec.describe BuildFinishedWorker do
subject
end
+
+ context 'when build is failed' do
+ before do
+ build.update!(status: :failed)
+ end
+
+ it 'adds a todo' do
+ expect(::Ci::MergeRequests::AddTodoWhenBuildFailsWorker).to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when build has a chat' do
+ before do
+ build.pipeline.update!(source: :chat)
+ end
+
+ it 'schedules a ChatNotification job' do
+ expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
+
+ subject
+ end
+ end
end
context 'when build does not exist' do
@@ -38,15 +60,5 @@ RSpec.describe BuildFinishedWorker do
.not_to raise_error
end
end
-
- context 'when build has a chat' do
- let(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline, source: :chat)) }
-
- it 'schedules a ChatNotification job' do
- expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index aefbd7e590e..7e469958a84 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -22,4 +22,9 @@ RSpec.describe BuildHooksWorker do
end
end
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :load_balancing_for_build_hooks_worker,
+ data_consistency: :delayed
end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 8cf14ed6f8b..5964ec45563 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -4,10 +4,6 @@ require 'spec_helper'
RSpec.describe BulkImportWorker do
describe '#perform' do
- before do
- stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
- end
-
context 'when no bulk import is found' do
it 'does nothing' do
expect(described_class).not_to receive(:perform_in)
@@ -59,10 +55,26 @@ RSpec.describe BulkImportWorker do
expect(bulk_import.reload.started?).to eq(true)
end
+ it 'creates all the required pipeline trackers' do
+ bulk_import = create(:bulk_import, :created)
+ entity_1 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ expect { subject.perform(bulk_import.id) }
+ .to change(BulkImports::Tracker, :count)
+ .by(BulkImports::Stage.pipelines.size * 2)
+
+ expect(entity_1.trackers).not_to be_empty
+ expect(entity_2.trackers).not_to be_empty
+ end
+
context 'when there are created entities to process' do
it 'marks a batch of entities as started, enqueues BulkImports::EntityWorker and reenqueues' do
+ stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
+
bulk_import = create(:bulk_import, :created)
- (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :created, bulk_import: bulk_import) }
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect(BulkImports::EntityWorker).to receive(:perform_async)
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index cd9a6f605b9..deae15a3ca2 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -3,51 +3,107 @@
require 'spec_helper'
RSpec.describe BulkImports::EntityWorker do
- describe '#execute' do
- let(:bulk_import) { create(:bulk_import) }
-
- context 'when started entity exists' do
- let(:entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
-
- it 'executes BulkImports::Importers::GroupImporter' do
- expect(BulkImports::Importers::GroupImporter).to receive(:new).with(entity).and_call_original
+ let_it_be(:entity) { create(:bulk_import_entity) }
+
+ let_it_be(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Stage0::Pipeline',
+ stage: 0
+ )
+ end
- subject.perform(entity.id)
- end
+ it 'enqueues the first stage pipelines work' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: nil
+ )
+ end
- it 'sets jid' do
- jid = 'jid'
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .with(
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ entity.id
+ )
- allow(subject).to receive(:jid).and_return(jid)
+ subject.perform(entity.id)
+ end
- subject.perform(entity.id)
+ it 'do not enqueue a new pipeline job if the current stage still running' do
+ expect(BulkImports::PipelineWorker)
+ .not_to receive(:perform_async)
- expect(entity.reload.jid).to eq(jid)
- end
+ subject.perform(entity.id, 0)
+ end
- context 'when exception occurs' do
- it 'tracks the exception & marks entity as failed' do
- allow(BulkImports::Importers::GroupImporter).to receive(:new) { raise StandardError }
+ it 'enqueues the next stage pipelines when the current stage is finished' do
+ next_stage_pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Stage1::Pipeline',
+ stage: 1
+ )
+
+ pipeline_tracker.fail_op!
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: 0
+ )
+ end
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(kind_of(StandardError), bulk_import_id: bulk_import.id, entity_id: entity.id)
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .with(
+ next_stage_pipeline_tracker.id,
+ next_stage_pipeline_tracker.stage,
+ entity.id
+ )
- subject.perform(entity.id)
+ subject.perform(entity.id, 0)
+ end
- expect(entity.reload.failed?).to eq(true)
- end
- end
+ it 'logs and tracks the raised exceptions' do
+ exception = StandardError.new('Error!')
+
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .and_raise(exception)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: nil
+ )
+
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: nil,
+ error_message: 'Error!'
+ )
end
- context 'when started entity does not exist' do
- it 'does not execute BulkImports::Importers::GroupImporter' do
- entity = create(:bulk_import_entity, bulk_import: bulk_import)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(exception, entity_id: entity.id)
- expect(BulkImports::Importers::GroupImporter).not_to receive(:new)
-
- subject.perform(entity.id)
- end
- end
+ subject.perform(entity.id)
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
new file mode 100644
index 00000000000..27151177634
--- /dev/null
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::PipelineWorker do
+ let(:pipeline_class) do
+ Class.new do
+ def initialize(_); end
+
+ def run; end
+ end
+ end
+
+ let_it_be(:entity) { create(:bulk_import_entity) }
+
+ before do
+ stub_const('FakePipeline', pipeline_class)
+ end
+
+ it 'runs the given pipeline successfully' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ expect(BulkImports::Stage)
+ .to receive(:pipeline_exists?)
+ .with('FakePipeline')
+ .and_return(true)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'FakePipeline',
+ entity_id: entity.id
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ expect(subject).to receive(:jid).and_return('jid')
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:finished)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+
+ context 'when the pipeline cannot be found' do
+ it 'logs the error' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ :started,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ pipeline_tracker_id: pipeline_tracker.id,
+ entity_id: entity.id,
+ message: 'Unstarted pipeline not found'
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
+
+ context 'when the pipeline raises an exception' do
+ it 'logs the error' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'InexistentPipeline'
+ )
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'InexistentPipeline',
+ entity_id: entity.id,
+ message: "'InexistentPipeline' is not a valid BulkImport Pipeline"
+ )
+ end
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(NameError),
+ entity_id: entity.id,
+ pipeline_name: pipeline_tracker.pipeline_name
+ )
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ expect(subject).to receive(:jid).and_return('jid')
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:failed)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+ end
+end
diff --git a/spec/workers/ci/drop_pipeline_worker_spec.rb b/spec/workers/ci/drop_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..5e626112520
--- /dev/null
+++ b/spec/workers/ci/drop_pipeline_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DropPipelineWorker do
+ include AfterNextHelpers
+
+ let(:pipeline) { create(:ci_pipeline, :running) }
+ let(:failure_reason) { :user_blocked }
+
+ describe '#perform' do
+ subject { described_class.new.perform(pipeline.id, failure_reason) }
+
+ it 'calls delegates to the service' do
+ expect_next(Ci::DropPipelineService).to receive(:execute).with(pipeline, failure_reason)
+
+ subject
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let!(:running_build) { create(:ci_build, :running, pipeline: pipeline) }
+ let!(:success_build) { create(:ci_build, :success, pipeline: pipeline) }
+
+ let(:job_args) { [pipeline.id, failure_reason] }
+
+ it 'executes the service', :aggregate_failures do
+ subject
+
+ expect(running_build.reload).to be_failed
+ expect(running_build.failure_reason).to eq(failure_reason.to_s)
+
+ expect(success_build.reload).to be_success
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/initial_pipeline_process_worker_spec.rb b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
new file mode 100644
index 00000000000..5db9287fe96
--- /dev/null
+++ b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::InitialPipelineProcessWorker do
+ describe '#perform' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :with_job, status: :created) }
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { pipeline.id }
+
+ it 'marks the pipeline as pending' do
+ expect(pipeline).to be_created
+
+ subject
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
new file mode 100644
index 00000000000..4690c73d121
--- /dev/null
+++ b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::MergeRequests::AddTodoWhenBuildFailsWorker do
+ describe '#perform' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline) }
+ let_it_be(:job) { create(:ci_build, project: project, pipeline: pipeline, status: :failed) }
+
+ let(:job_args) { job.id }
+
+ subject(:perform_twice) { perform_multiple(job_args, exec_times: 2) }
+
+ include_examples 'an idempotent worker' do
+ it 'executes todo service' do
+ service = double
+ expect(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).with(project, nil).and_return(service).twice
+ expect(service).to receive(:execute).with(job).twice
+
+ perform_twice
+ end
+ end
+
+ context 'when job does not exist' do
+ let(:job_args) { 0 }
+
+ it 'returns nil' do
+ expect(described_class.new.perform(job_args)).to eq(nil)
+ end
+ end
+
+ context 'when project does not exist' do
+ before do
+ job.update!(project_id: nil)
+ end
+
+ it 'returns nil' do
+ expect(described_class.new.perform(job_args)).to eq(nil)
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ before do
+ job.update_attribute('pipeline_id', nil)
+ end
+
+ it 'returns nil' do
+ expect(described_class.new.perform(job_args)).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
index 2bdd8345374..ad9c08d02cb 100644
--- a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
end
it 'executes a service' do
- expect_next_instance_of(::Ci::PipelineArtifacts::DestroyExpiredArtifactsService) do |instance|
+ expect_next_instance_of(::Ci::PipelineArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute)
end
diff --git a/spec/workers/ci/test_failure_history_worker_spec.rb b/spec/workers/ci/test_failure_history_worker_spec.rb
index d2896c08209..7530077d4ad 100644
--- a/spec/workers/ci/test_failure_history_worker_spec.rb
+++ b/spec/workers/ci/test_failure_history_worker_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe ::Ci::TestFailureHistoryWorker do
subject
- expect(Ci::TestCase.count).to eq(2)
- expect(Ci::TestCaseFailure.count).to eq(2)
+ expect(Ci::UnitTest.count).to eq(2)
+ expect(Ci::UnitTestFailure.count).to eq(2)
end
end
end
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
new file mode 100644
index 00000000000..a654ecbd3e2
--- /dev/null
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkerAttributes do
+ let(:worker) do
+ Class.new do
+ def self.name
+ "TestWorker"
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ describe '.data_consistency' do
+ context 'with valid data_consistency' do
+ it 'returns correct data_consistency' do
+ worker.data_consistency(:sticky)
+
+ expect(worker.get_data_consistency).to eq(:sticky)
+ end
+ end
+
+ context 'when data_consistency is not provided' do
+ it 'defaults to :always' do
+ expect(worker.get_data_consistency).to eq(:always)
+ end
+ end
+
+ context 'with invalid data_consistency' do
+ it 'raise exception' do
+ expect { worker.data_consistency(:invalid) }
+ .to raise_error('Invalid data consistency: invalid')
+ end
+ end
+
+ context 'when job is idempotent' do
+ context 'when data_consistency is not :always' do
+ it 'raise exception' do
+ worker.idempotent!
+
+ expect { worker.data_consistency(:sticky) }
+ .to raise_error("Class can't be marked as idempotent if data_consistency is not set to :always")
+ end
+ end
+
+ context 'when feature_flag is provided' do
+ before do
+ stub_feature_flags(test_feature_flag: false)
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ it 'returns correct feature flag value' do
+ worker.data_consistency(:sticky, feature_flag: :test_feature_flag)
+
+ expect(worker.get_data_consistency_feature_flag_enabled?).not_to be_truthy
+ end
+ end
+ end
+ end
+
+ describe '.idempotent!' do
+ context 'when data consistency is not :always' do
+ it 'raise exception' do
+ worker.data_consistency(:sticky)
+
+ expect { worker.idempotent! }
+ .to raise_error("Class can't be marked as idempotent if data_consistency is not set to :always")
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb
index 3de37b99aba..ebdb752d900 100644
--- a/spec/workers/concerns/worker_context_spec.rb
+++ b/spec/workers/concerns/worker_context_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe WorkerContext do
describe '#with_context' do
it 'allows modifying context when the job is running' do
worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do
- expect(Labkit::Context.current.to_h).to include('meta.user' => 'jane-doe')
+ expect(Gitlab::ApplicationContext.current).to include('meta.user' => 'jane-doe')
end
end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index d9a4f6396f8..2d5176e874d 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ContainerExpirationPolicyWorker do
describe '#perform' do
subject { worker.perform }
- RSpec.shared_examples 'not executing any policy' do
+ shared_examples 'not executing any policy' do
it 'does not run any policy' do
expect(ContainerExpirationPolicyService).not_to receive(:new)
@@ -19,6 +19,21 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
end
+ shared_examples 'handling a taken exclusive lease' do
+ context 'with exclusive lease taken' do
+ before do
+ stub_exclusive_lease_taken(worker.lease_key, timeout: 5.hours)
+ end
+
+ it 'does not do anything' do
+ expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).not_to receive(:perform_with_capacity)
+ expect(worker).not_to receive(:runnable_policies)
+
+ expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ end
+ end
+ end
+
context 'With no container expiration policies' do
it 'does not execute any policies' do
expect(ContainerRepository).not_to receive(:for_project_id)
@@ -27,66 +42,86 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
end
- context 'with container expiration policies' do
- let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
- let_it_be(:container_repository) { create(:container_repository, project: container_expiration_policy.project) }
+ context 'with throttling enabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: true)
+ end
- context 'with a valid container expiration policy' do
- it 'schedules the next run' do
- expect { subject }.to change { container_expiration_policy.reload.next_run_at }
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
end
- it 'marks the container repository as scheduled for cleanup' do
- expect { subject }.to change { container_repository.reload.cleanup_scheduled? }.from(false).to(true)
- expect(ContainerRepository.cleanup_scheduled.count).to eq(1)
- end
+ context 'with container expiration policies' do
+ let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
+ let_it_be(:container_repository) { create(:container_repository, project: container_expiration_policy.project) }
- it 'calls the limited capacity worker' do
- expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
+ before do
+ expect(worker).to receive(:with_runnable_policy).and_call_original
+ end
- subject
- end
- end
+ context 'with a valid container expiration policy' do
+ it 'schedules the next run' do
+ expect { subject }.to change { container_expiration_policy.reload.next_run_at }
+ end
- context 'with a disabled container expiration policy' do
- before do
- container_expiration_policy.disable!
- end
+ it 'marks the container repository as scheduled for cleanup' do
+ expect { subject }.to change { container_repository.reload.cleanup_scheduled? }.from(false).to(true)
+ expect(ContainerRepository.cleanup_scheduled.count).to eq(1)
+ end
- it 'does not run the policy' do
- expect(ContainerRepository).not_to receive(:for_project_id)
+ it 'calls the limited capacity worker' do
+ expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
- expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
- end
- end
+ subject
+ end
+ end
- context 'with an invalid container expiration policy' do
- let(:user) { container_expiration_policy.project.owner }
+ context 'with a disabled container expiration policy' do
+ before do
+ container_expiration_policy.disable!
+ end
- before do
- container_expiration_policy.update_column(:name_regex, '*production')
- end
+ it 'does not run the policy' do
+ expect(ContainerRepository).not_to receive(:for_project_id)
- it 'disables the policy and tracks an error' do
- expect(ContainerRepository).not_to receive(:for_project_id)
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(described_class::InvalidPolicyError), container_expiration_policy_id: container_expiration_policy.id)
+ expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ end
+ end
- expect { subject }.to change { container_expiration_policy.reload.enabled }.from(true).to(false)
- expect(ContainerRepository.cleanup_scheduled).to be_empty
+ context 'with an invalid container expiration policy' do
+ let(:user) { container_expiration_policy.project.owner }
+
+ before do
+ container_expiration_policy.update_column(:name_regex, '*production')
+ end
+
+ it 'disables the policy and tracks an error' do
+ expect(ContainerRepository).not_to receive(:for_project_id)
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(described_class::InvalidPolicyError), container_expiration_policy_id: container_expiration_policy.id)
+
+ expect { subject }.to change { container_expiration_policy.reload.enabled }.from(true).to(false)
+ expect(ContainerRepository.cleanup_scheduled).to be_empty
+ end
+ end
end
- end
- end
- context 'with exclusive lease taken' do
- before do
- stub_exclusive_lease_taken(worker.lease_key, timeout: 5.hours)
+ it_behaves_like 'handling a taken exclusive lease'
end
- it 'does not execute any policy' do
- expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).not_to receive(:perform_with_capacity)
- expect(worker).not_to receive(:runnable_policies)
+ context 'with loopless enabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: true)
+ expect(worker).not_to receive(:with_runnable_policy)
+ end
- expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ it 'calls the limited capacity worker' do
+ expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
+
+ subject
+ end
+
+ it_behaves_like 'handling a taken exclusive lease'
end
end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
new file mode 100644
index 00000000000..b13d1f5c7aa
--- /dev/null
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigrationWorker, '#perform', :clean_gitlab_redis_shared_state do
+ include ExclusiveLeaseHelpers
+
+ let(:worker) { described_class.new }
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: false)
+ end
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:active_migration)
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
+
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
+ end
+
+ context 'when no active migrations exist' do
+ it 'does nothing' do
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when active migrations exist' do
+ let(:job_interval) { 5.minutes }
+ let(:lease_timeout) { 15.minutes }
+ let(:lease_key) { 'batched_background_migration_worker' }
+ let(:migration) { build(:batched_background_migration, :active, interval: job_interval) }
+ let(:interval_variance) { described_class::INTERVAL_VARIANCE }
+
+ before do
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .and_return(migration)
+
+ allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
+ allow(migration).to receive(:reload)
+ end
+
+ context 'when the reloaded migration is no longer active' do
+ it 'does not run the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect(migration).to receive(:reload)
+ expect(migration).to receive(:active?).and_return(false)
+
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the interval has not elapsed' do
+ it 'does not run the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
+
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the reloaded migration is still active and the interval has elapsed' do
+ it 'runs the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect(worker).to receive(:run_active_migration).and_call_original
+
+ worker.perform
+ end
+ end
+
+ context 'when the calculated timeout is less than the minimum allowed' do
+ let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT }
+ let(:job_interval) { 2.minutes }
+
+ it 'sets the lease timeout to the minimum value' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect(worker).to receive(:run_active_migration).and_call_original
+
+ worker.perform
+ end
+ end
+
+ it 'always cleans up the exclusive lease' do
+ lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
+
+ expect(lease).to receive(:try_obtain).and_return(true)
+
+ expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
+ expect(lease).to receive(:cancel)
+
+ expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 3bb9db07ff3..5a22529b6d6 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Every Sidekiq worker' do
let(:workers_without_defaults) do
- Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS
+ Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS.values
end
it 'does not use the default queue' do
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 6d73d715d21..3f8da3fb71c 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
- expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
+ expect_next_instance_of(Ci::JobArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index 95c54a762a4..8efead31a42 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe ExpireJobCacheWorker do
describe '#perform' do
context 'with a job in the pipeline' do
- let(:job) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:job) { create(:ci_build, pipeline: pipeline) }
+
let(:job_args) { job.id }
include_examples 'an idempotent worker' do
@@ -31,6 +32,24 @@ RSpec.describe ExpireJobCacheWorker do
subject
end
end
+
+ it 'does not perform extra queries', :aggregate_failures do
+ worker = described_class.new
+ recorder = ActiveRecord::QueryRecorder.new { worker.perform(job.id) }
+
+ occurences = recorder.data.values.flat_map {|v| v[:occurrences]}
+ project_queries = occurences.select {|s| s.include?('FROM "projects"')}
+ namespace_queries = occurences.select {|s| s.include?('FROM "namespaces"')}
+ route_queries = occurences.select {|s| s.include?('FROM "routes"')}
+
+ # This worker is run 1 million times an hour, so we need to save as much
+ # queries as possible.
+ expect(recorder.count).to be <= 1
+
+ expect(project_queries.size).to eq(0)
+ expect(namespace_queries.size).to eq(0)
+ expect(route_queries.size).to eq(0)
+ end
end
context 'when there is no job in the pipeline' do
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index a8c21aa9f83..de42eeeab75 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -18,6 +18,23 @@ RSpec.describe ExpirePipelineCacheWorker do
subject.perform(pipeline.id)
end
+ it 'does not perform extra queries', :aggregate_failures do
+ recorder = ActiveRecord::QueryRecorder.new { subject.perform(pipeline.id) }
+
+ project_queries = recorder.data.values.flat_map {|v| v[:occurrences]}.select {|s| s.include?('FROM "projects"')}
+ namespace_queries = recorder.data.values.flat_map {|v| v[:occurrences]}.select {|s| s.include?('FROM "namespaces"')}
+ route_queries = recorder.data.values.flat_map {|v| v[:occurrences]}.select {|s| s.include?('FROM "routes"')}
+
+ # This worker is run 1 million times an hour, so we need to save as much
+ # queries as possible.
+ expect(recorder.count).to be <= 6
+
+ # These arises from #update_etag_cache
+ expect(project_queries.size).to eq(1)
+ expect(namespace_queries.size).to eq(1)
+ expect(route_queries.size).to eq(1)
+ end
+
it "doesn't do anything if the pipeline not exist" do
expect_any_instance_of(Ci::ExpirePipelineCacheService).not_to receive(:execute)
expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch)
diff --git a/spec/workers/merge_requests/assignees_change_worker_spec.rb b/spec/workers/merge_requests/assignees_change_worker_spec.rb
new file mode 100644
index 00000000000..33478daf8d3
--- /dev/null
+++ b/spec/workers/merge_requests/assignees_change_worker_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::AssigneesChangeWorker do
+ include AfterNextHelpers
+
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:old_assignees) { create_list(:user, 3) }
+
+ let(:user_ids) { old_assignees.map(&:id).to_a }
+ let(:worker) { described_class.new }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request.id, user.id, user_ids] }
+ end
+
+ describe '#perform' do
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id, user_ids)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id, user_ids)
+ end
+ end
+
+ context 'when there are no changes' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, user.id, merge_request.assignee_ids)
+ end
+ end
+
+ context 'when the old users cannot be found' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, user.id, [non_existing_record_id])
+ end
+ end
+
+ it 'gets MergeRequests::UpdateAssigneesService to handle the changes' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, match_array(old_assignees), execute_hooks: true)
+
+ worker.perform(merge_request.id, user.id, user_ids)
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/create_pipeline_worker_spec.rb b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..8efce5220be
--- /dev/null
+++ b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreatePipelineWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:merge_request) { create(:merge_request) }
+
+ context 'when the objects exist' do
+ it 'calls the merge request create pipeline service and calls update head pipeline' do
+ aggregate_failures do
+ expect_next_instance_of(MergeRequests::CreatePipelineService, project, user) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ expect(MergeRequest).to receive(:find_by_id).with(merge_request.id).and_return(merge_request)
+ expect(merge_request).to receive(:update_head_pipeline)
+
+ subject.perform(project.id, user.id, merge_request.id)
+ end
+ end
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'does not call the create pipeline service' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+
+ expect { subject.perform(project.id, user.id, merge_request.id) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the project does not exist' do
+ before do
+ project.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/handle_assignees_change_worker_spec.rb b/spec/workers/merge_requests/handle_assignees_change_worker_spec.rb
new file mode 100644
index 00000000000..4b45f3562d6
--- /dev/null
+++ b/spec/workers/merge_requests/handle_assignees_change_worker_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::HandleAssigneesChangeWorker do
+ include AfterNextHelpers
+
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:old_assignees) { create_list(:user, 3) }
+
+ let(:user_ids) { old_assignees.map(&:id).to_a }
+ let(:options) { {} }
+ let(:worker) { described_class.new }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request.id, user.id, user_ids, options] }
+ end
+
+ describe '#perform' do
+ it 'calls MergeRequests::HandleAssigneesChangeService#execute to handle the changes' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, match_array(old_assignees), options)
+
+ worker.perform(merge_request.id, user.id, user_ids, options)
+ end
+
+ context 'when there are no changes' do
+ it 'still calls MergeRequests::HandleAssigneesChangeService#execute' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, [], options)
+
+ worker.perform(merge_request.id, user.id, merge_request.assignee_ids, options)
+ end
+ end
+
+ context 'when the old assignees cannot be found' do
+ it 'still calls MergeRequests::HandleAssigneesChangeService#execute' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, [], options)
+
+ worker.perform(merge_request.id, user.id, [non_existing_record_id], options)
+ end
+ end
+
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id, user_ids, options)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id, user_ids, options)
+ end
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/resolve_todos_worker_spec.rb b/spec/workers/merge_requests/resolve_todos_worker_spec.rb
new file mode 100644
index 00000000000..223b8b6803c
--- /dev/null
+++ b/spec/workers/merge_requests/resolve_todos_worker_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ResolveTodosWorker do
+ include AfterNextHelpers
+
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+
+ let(:worker) { described_class.new }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request.id, user.id] }
+ end
+
+ describe '#perform' do
+ it 'calls MergeRequests::ResolveTodosService#execute' do
+ expect_next(::MergeRequests::ResolveTodosService, merge_request, user)
+ .to receive(:execute)
+
+ worker.perform(merge_request.id, user.id)
+ end
+
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::ResolveTodosService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::ResolveTodosService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
index 24143e8cf8a..3b94eb0d1be 100644
--- a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
+++ b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
@@ -3,45 +3,37 @@
require 'spec_helper'
RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform' do
- context 'when the application setting is enabled' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: true)
+ using RSpec::Parameterized::TableSyntax
+
+ # Running this in EE would call the overridden method, which can't be tested in CE.
+ # The EE code is covered in a separate EE spec.
+ context 'not on gitlab.com', unless: Gitlab.ee? do
+ let(:is_gitlab_com) { false }
+
+ where(:in_product_marketing_emails_enabled, :experiment_active, :executes_service) do
+ true | true | 1
+ true | false | 1
+ false | false | 0
+ false | true | 0
end
- context 'when the experiment is inactive' do
- before do
- stub_experiment(in_product_marketing_emails: false)
- end
-
- it 'does not execute the in product marketing emails service' do
- expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
-
- subject.perform
- end
- end
-
- context 'when the experiment is active' do
- before do
- stub_experiment(in_product_marketing_emails: true)
- end
-
- it 'calls the send_for_all_tracks_and_intervals method on the in product marketing emails service' do
- expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
-
- subject.perform
- end
+ with_them do
+ it_behaves_like 'in-product marketing email'
end
end
- context 'when the application setting is disabled' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: false)
- end
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
- it 'does not execute the in product marketing emails service' do
- expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
+ where(:in_product_marketing_emails_enabled, :experiment_active, :executes_service) do
+ true | true | 1
+ true | false | 0
+ false | false | 0
+ false | true | 0
+ end
- subject.perform
+ with_them do
+ it_behaves_like 'in-product marketing email'
end
end
end
diff --git a/spec/workers/new_issue_worker_spec.rb b/spec/workers/new_issue_worker_spec.rb
index ec129ad3380..35b83c3bee8 100644
--- a/spec/workers/new_issue_worker_spec.rb
+++ b/spec/workers/new_issue_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe NewIssueWorker do
+ include AfterNextHelpers
+
describe '#perform' do
let(:worker) { described_class.new }
@@ -49,7 +51,7 @@ RSpec.describe NewIssueWorker do
expect(Notify).not_to receive(:new_issue_email)
.with(mentioned.id, issue.id, NotificationReason::MENTIONED)
- expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: issue.class, object_id: issue.id)
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: issue.class.to_s, object_id: issue.id)
worker.perform(issue.id, user.id)
end
@@ -80,6 +82,13 @@ RSpec.describe NewIssueWorker do
worker.perform(issue.id, user.id)
end
+
+ it 'calls Issues::AfterCreateService' do
+ expect_next(::Issues::AfterCreateService)
+ .to receive(:execute)
+
+ worker.perform(issue.id, user.id)
+ end
end
end
end
diff --git a/spec/workers/new_merge_request_worker_spec.rb b/spec/workers/new_merge_request_worker_spec.rb
index 0d64973b0fa..358939a963a 100644
--- a/spec/workers/new_merge_request_worker_spec.rb
+++ b/spec/workers/new_merge_request_worker_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe NewMergeRequestWorker do
expect(Notify).not_to receive(:new_merge_request_email)
.with(mentioned.id, merge_request.id, NotificationReason::MENTIONED)
- expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: merge_request.class, object_id: merge_request.id)
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: merge_request.class.to_s, object_id: merge_request.id)
worker.perform(merge_request.id, user.id)
end
diff --git a/spec/workers/packages/go/sync_packages_worker_spec.rb b/spec/workers/packages/go/sync_packages_worker_spec.rb
new file mode 100644
index 00000000000..ad1a85b26e4
--- /dev/null
+++ b/spec/workers/packages/go/sync_packages_worker_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::SyncPackagesWorker, type: :worker do
+ include_context 'basic Go module'
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ def perform(ref_name, path)
+ described_class.new.perform(project.id, ref_name, path)
+ end
+
+ def validate_package(package, mod, ver)
+ expect(package).not_to be_nil
+ expect(package.name).to eq(mod.name)
+ expect(package.version).to eq(ver.name)
+ expect(package.package_type).to eq('golang')
+ expect(package.created_at).to eq(ver.commit.committed_date)
+ expect(package.package_files.count).to eq(2)
+ end
+
+ shared_examples 'it creates a package' do |path, version, exists: false|
+ subject { perform(version, path) }
+
+ it "returns a package for example.com/project#{path.empty? ? '' : '/' + path}@#{version}" do
+ expect { subject }
+ .to change { project.packages.count }.by(exists ? 0 : 1)
+ .and change { Packages::PackageFile.count }.by(exists ? 0 : 2)
+
+ mod = create :go_module, project: project, path: path
+ ver = create :go_module_version, :tagged, mod: mod, name: version
+ validate_package(subject, mod, ver)
+ end
+ end
+
+ describe '#perform' do
+ context 'with no existing packages' do
+ it_behaves_like 'it creates a package', '', 'v1.0.1'
+ it_behaves_like 'it creates a package', '', 'v1.0.2'
+ it_behaves_like 'it creates a package', '', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'mod', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'v2', 'v2.0.0'
+ end
+
+ context 'with existing packages' do
+ before do
+ mod = create :go_module, project: project
+ ver = create :go_module_version, :tagged, mod: mod, name: 'v1.0.1'
+ Packages::Go::CreatePackageService.new(project, nil, version: ver).execute
+ end
+
+ it_behaves_like 'it creates a package', '', 'v1.0.1', exists: true
+ it_behaves_like 'it creates a package', '', 'v1.0.2'
+ it_behaves_like 'it creates a package', '', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'mod', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'v2', 'v2.0.0'
+ end
+
+ context 'with a package that exceeds project limits' do
+ before do
+ Plan.default.actual_limits.update!({ 'golang_max_file_size': 1 })
+ end
+
+ it 'logs an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(::Packages::Go::CreatePackageService::GoZipSizeError))
+
+ perform('v2.0.0', 'v2')
+ end
+ end
+
+ where(:path, :version) do
+ [
+ ['', 'v1.0.1'],
+ ['', 'v1.0.2'],
+ ['', 'v1.0.3'],
+ ['mod', 'v1.0.3'],
+ ['v2', 'v2.0.0']
+ ]
+ end
+
+ with_them do
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id, version, path] }
+
+ it 'creates a package' do
+ expect { subject }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(2)
+
+ mod = create :go_module, project: project, path: path
+ ver = create :go_module_version, :tagged, mod: mod, name: version
+ package = ::Packages::Go::PackageFinder.new(project, mod.name, ver.name).execute
+ validate_package(package, mod, ver)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/packages/maven/metadata/sync_worker_spec.rb b/spec/workers/packages/maven/metadata/sync_worker_spec.rb
index 7e0f3616491..10482b3e327 100644
--- a/spec/workers/packages/maven/metadata/sync_worker_spec.rb
+++ b/spec/workers/packages/maven/metadata/sync_worker_spec.rb
@@ -61,9 +61,10 @@ RSpec.describe Packages::Maven::Metadata::SyncWorker, type: :worker do
let(:project) { create(:project) }
it 'does not create the updated metadata files' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:message, 'Non existing versionless package(s). Nothing to do.')
+
expect { subject }
.to change { ::Packages::PackageFile.count }.by(0)
- .and raise_error(described_class::SyncError, 'Non existing versionless package')
end
end
@@ -146,9 +147,10 @@ RSpec.describe Packages::Maven::Metadata::SyncWorker, type: :worker do
let(:project) { create(:project) }
it 'does not create the updated metadata files' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:message, 'Non existing versionless package(s). Nothing to do.')
+
expect { subject }
.to change { ::Packages::PackageFile.count }.by(0)
- .and raise_error(described_class::SyncError, 'Non existing versionless package')
end
end
diff --git a/spec/workers/packages/rubygems/extraction_worker_spec.rb b/spec/workers/packages/rubygems/extraction_worker_spec.rb
new file mode 100644
index 00000000000..15c0a3be90c
--- /dev/null
+++ b/spec/workers/packages/rubygems/extraction_worker_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
+ describe '#perform' do
+ let_it_be(:package) { create(:rubygems_package) }
+
+ let(:package_file) { package.package_files.first }
+ let(:package_file_id) { package_file.id }
+ let(:package_name) { 'TempProject.TempPackage' }
+ let(:package_version) { '1.0.0' }
+ let(:job_args) { package_file_id }
+
+ subject { described_class.new.perform(*job_args) }
+
+ include_examples 'an idempotent worker' do
+ it 'processes the gem', :aggregate_failures do
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(2)
+
+ expect(Packages::Package.last.id).to be(package.id)
+ expect(package.name).not_to be(package_name)
+ end
+ end
+
+ it 'handles a processing failure', :aggregate_failures do
+ expect(::Packages::Rubygems::ProcessGemService).to receive(:new)
+ .and_raise(::Packages::Rubygems::ProcessGemService::ExtractionError)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(::Packages::Rubygems::ProcessGemService::ExtractionError),
+ project_id: package.project_id
+ )
+
+ expect { subject }
+ .to change { Packages::Package.count }.by(-1)
+ .and change { Packages::PackageFile.count }.by(-2)
+ end
+
+ context 'returns when there is no package file' do
+ let(:package_file_id) { 999999 }
+
+ it 'returns without action' do
+ expect(::Packages::Rubygems::ProcessGemService).not_to receive(:new)
+
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ end
+ end
+ end
+end
diff --git a/spec/workers/pages_update_configuration_worker_spec.rb b/spec/workers/pages_update_configuration_worker_spec.rb
index ff3727646c7..7cceeaa52d6 100644
--- a/spec/workers/pages_update_configuration_worker_spec.rb
+++ b/spec/workers/pages_update_configuration_worker_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe PagesUpdateConfigurationWorker do
end
it "doesn't schedule a worker if updates on legacy storage are disabled", :sidekiq_inline do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(Projects::UpdatePagesConfigurationService).not_to receive(:new)
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index be501318920..f7fd1b1a0a7 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -102,7 +102,10 @@ RSpec.describe PostReceive do
perform
- expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0', data: anything }])
+ expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{
+ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
+ data: anything
+ }])
end
it 'does not track an event for the empty_repo_upload experiment when project is not empty', :snowplow do
diff --git a/spec/workers/projects/post_creation_worker_spec.rb b/spec/workers/projects/post_creation_worker_spec.rb
new file mode 100644
index 00000000000..b15b7b76b56
--- /dev/null
+++ b/spec/workers/projects/post_creation_worker_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::PostCreationWorker do
+ let_it_be(:user) { create :user }
+
+ let(:worker) { described_class.new }
+ let(:project) { create(:project) }
+
+ subject { described_class.new.perform(project.id) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id] }
+
+ describe 'Prometheus service' do
+ context 'project is nil' do
+ let(:job_args) { [nil] }
+
+ it 'does not create prometheus service' do
+ expect { subject }.not_to change { Service.count }
+ end
+ end
+
+ context 'when project has access to shared service' do
+ context 'Prometheus application is shared via group cluster' do
+ let(:project) { create(:project, group: group) }
+ let(:cluster) { create(:cluster, :group, groups: [group]) }
+ let(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'creates PrometheusService record', :aggregate_failures do
+ subject
+
+ service = project.prometheus_service
+ expect(service.active).to be true
+ expect(service.manual_configuration?).to be false
+ expect(service.persisted?).to be true
+ end
+ end
+
+ context 'Prometheus application is shared via instance cluster' do
+ let(:cluster) { create(:cluster, :instance) }
+
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'creates PrometheusService record', :aggregate_failures do
+ subject
+
+ service = project.prometheus_service
+ expect(service.active).to be true
+ expect(service.manual_configuration?).to be false
+ expect(service.persisted?).to be true
+ end
+
+ it 'cleans invalid record and logs warning', :aggregate_failures do
+ invalid_service_record = build(:prometheus_service, properties: { api_url: nil, manual_configuration: true }.to_json)
+ allow(PrometheusService).to receive(:new).and_return(invalid_service_record)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) })).twice
+ subject
+
+ expect(project.prometheus_service).to be_nil
+ end
+ end
+
+ context 'shared Prometheus application is not available' do
+ it 'does not persist PrometheusService record', :aggregate_failures do
+ subject
+
+ expect(project.prometheus_service).to be_nil
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb
index 5642de05731..6d0d4aeef89 100644
--- a/spec/workers/remove_expired_members_worker_spec.rb
+++ b/spec/workers/remove_expired_members_worker_spec.rb
@@ -29,6 +29,15 @@ RSpec.describe RemoveExpiredMembersWorker do
worker.perform
expect(non_expiring_project_member.reload).to be_present
end
+
+ it 'adds context to resulting jobs' do
+ worker.perform
+
+ new_job = Sidekiq::Worker.jobs.last
+
+ expect(new_job).to include('meta.project' => expired_project_member.project.full_path,
+ 'meta.user' => expired_project_member.user.username)
+ end
end
context 'project bots' do
@@ -98,6 +107,15 @@ RSpec.describe RemoveExpiredMembersWorker do
worker.perform
expect(non_expiring_group_member.reload).to be_present
end
+
+ it 'adds context to resulting jobs' do
+ worker.perform
+
+ new_job = Sidekiq::Worker.jobs.last
+
+ expect(new_job).to include('meta.root_namespace' => expired_group_member.group.full_path,
+ 'meta.user' => expired_group_member.user.username)
+ end
end
context 'when the last group owner expires' do
diff --git a/spec/workers/repository_check/dispatch_worker_spec.rb b/spec/workers/repository_check/dispatch_worker_spec.rb
index 5e1bc76ec8e..829abc7d895 100644
--- a/spec/workers/repository_check/dispatch_worker_spec.rb
+++ b/spec/workers/repository_check/dispatch_worker_spec.rb
@@ -42,5 +42,12 @@ RSpec.describe RepositoryCheck::DispatchWorker do
subject.perform
end
+
+ it 'logs unhealthy shards' do
+ log_data = { message: "Excluding unhealthy shards", failed_checks: [{ labels: { shard: unhealthy_shard_name }, message: '14:Connect Failed', status: 'failed' }], class: described_class.name }
+ expect(Gitlab::AppLogger).to receive(:error).with(a_hash_including(log_data))
+
+ subject.perform
+ end
end
end
diff --git a/spec/workers/ssh_keys/expired_notification_worker_spec.rb b/spec/workers/ssh_keys/expired_notification_worker_spec.rb
new file mode 100644
index 00000000000..249ee404870
--- /dev/null
+++ b/spec/workers/ssh_keys/expired_notification_worker_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SshKeys::ExpiredNotificationWorker, type: :worker do
+ subject(:worker) { described_class.new }
+
+ it 'uses a cronjob queue' do
+ expect(worker.sidekiq_options_hash).to include(
+ 'queue' => 'cronjob:ssh_keys_expired_notification',
+ 'queue_namespace' => :cronjob
+ )
+ end
+
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+
+ context 'with expiring key today' do
+ let_it_be_with_reload(:expired_today) { create(:key, expires_at: Time.current, user: user) }
+
+ it 'invoke the notification service' do
+ expect_next_instance_of(Keys::ExpiryNotificationService) do |expiry_service|
+ expect(expiry_service).to receive(:execute)
+ end
+
+ worker.perform
+ end
+
+ it 'updates notified column' do
+ expect { worker.perform }.to change { expired_today.reload.expiry_notification_delivered_at }
+ end
+
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple(worker: worker)
+ end
+ end
+
+ context 'when feature is not enabled' do
+ before do
+ stub_feature_flags(ssh_key_expiration_email_notification: false)
+ end
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_today.reload.expiry_notification_delivered_at }
+ end
+ end
+ end
+
+ context 'when key has expired in the past' do
+ let_it_be(:expired_past) { create(:key, expires_at: 1.day.ago, user: user) }
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_past.reload.expiry_notification_delivered_at }
+ end
+ end
+ end
+end
diff --git a/spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb b/spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb
new file mode 100644
index 00000000000..f9276c86cdf
--- /dev/null
+++ b/spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SshKeys::ExpiringSoonNotificationWorker, type: :worker do
+ subject(:worker) { described_class.new }
+
+ it 'uses a cronjob queue' do
+ expect(worker.sidekiq_options_hash).to include(
+ 'queue' => 'cronjob:ssh_keys_expiring_soon_notification',
+ 'queue_namespace' => :cronjob
+ )
+ end
+
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+
+ context 'with key expiring soon' do
+ let_it_be_with_reload(:expiring_soon) { create(:key, expires_at: 6.days.from_now, user: user) }
+
+ it 'invoke the notification service' do
+ expect_next_instance_of(Keys::ExpiryNotificationService) do |expiry_service|
+ expect(expiry_service).to receive(:execute)
+ end
+
+ worker.perform
+ end
+
+ it 'updates notified column' do
+ expect { worker.perform }.to change { expiring_soon.reload.before_expiry_notification_delivered_at }
+ end
+
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple(worker: worker)
+ end
+ end
+
+ context 'when feature is not enabled' do
+ before do
+ stub_feature_flags(ssh_key_expiration_email_notification: false)
+ end
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expiring_soon.reload.before_expiry_notification_delivered_at }
+ end
+ end
+ end
+
+ context 'when key has expired in the past' do
+ let_it_be(:expired_past) { create(:key, expires_at: 1.day.ago, user: user) }
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_past.reload.before_expiry_notification_delivered_at }
+ end
+ end
+
+ context 'when key is not expiring soon' do
+ let_it_be(:expires_future) { create(:key, expires_at: 8.days.from_now, user: user) }
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expires_future.reload.before_expiry_notification_delivered_at }
+ end
+ end
+ end
+end
diff --git a/spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb b/spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb
new file mode 100644
index 00000000000..6ccad25ad76
--- /dev/null
+++ b/spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TodosDestroyer::DestroyedIssuableWorker do
+ let(:job_args) { [1, 'MergeRequest'] }
+
+ it 'calls the Todos::Destroy::DestroyedIssuableService' do
+ expect_next_instance_of(::Todos::Destroy::DestroyedIssuableService, *job_args) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ described_class.new.perform(*job_args)
+ end
+end