summaryrefslogtreecommitdiff
path: root/spec/workers
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/archive_trace_worker_spec.rb31
-rw-r--r--spec/workers/build_finished_worker_spec.rb88
-rw-r--r--spec/workers/build_hooks_worker_spec.rb4
-rw-r--r--spec/workers/ci/build_finished_worker_spec.rb21
-rw-r--r--spec/workers/ci/cancel_pipeline_worker_spec.rb50
-rw-r--r--spec/workers/ci/runners/process_runner_version_update_worker_spec.rb48
-rw-r--r--spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb45
-rw-r--r--spec/workers/ci/track_failed_build_worker_spec.rb28
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb2
-rw-r--r--spec/workers/concerns/waitable_worker_spec.rb40
-rw-r--r--spec/workers/database/batched_background_migration/ci_database_worker_spec.rb2
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb2
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb53
-rw-r--r--spec/workers/merge_requests/create_approval_event_worker_spec.rb51
-rw-r--r--spec/workers/merge_requests/create_approval_note_worker_spec.rb52
-rw-r--r--spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb57
-rw-r--r--spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb56
-rw-r--r--spec/workers/new_issue_worker_spec.rb10
-rw-r--r--spec/workers/packages/cleanup/execute_policy_worker_spec.rb2
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb112
-rw-r--r--spec/workers/post_receive_spec.rb6
-rw-r--r--spec/workers/project_cache_worker_spec.rb2
-rw-r--r--spec/workers/projects/import_export/relation_export_worker_spec.rb36
-rw-r--r--spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb8
-rw-r--r--spec/workers/update_project_statistics_worker_spec.rb28
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb21
27 files changed, 676 insertions, 180 deletions
diff --git a/spec/workers/archive_trace_worker_spec.rb b/spec/workers/archive_trace_worker_spec.rb
deleted file mode 100644
index a9f256b1b3b..00000000000
--- a/spec/workers/archive_trace_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ArchiveTraceWorker do
- describe '#perform' do
- subject { described_class.new.perform(job&.id) }
-
- context 'when job is found' do
- let(:job) { create(:ci_build, :trace_live) }
-
- it 'executes service' do
- expect_any_instance_of(Ci::ArchiveTraceService)
- .to receive(:execute).with(job, anything)
-
- subject
- end
- end
-
- context 'when job is not found' do
- let(:job) { nil }
-
- it 'does not execute service' do
- expect_any_instance_of(Ci::ArchiveTraceService)
- .not_to receive(:execute)
-
- subject
- end
- end
- end
-end
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
deleted file mode 100644
index 2ff173c1558..00000000000
--- a/spec/workers/build_finished_worker_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BuildFinishedWorker do
- let(:worker) { described_class.new }
-
- subject { worker.perform(build.id) }
-
- describe '#perform' do
- context 'when build exists' do
- let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
-
- before do
- expect(Ci::Build).to receive(:find_by).with({ id: build.id }).and_return(build)
- end
-
- it 'calculates coverage and calls hooks', :aggregate_failures do
- expect(build).to receive(:update_coverage).ordered
-
- expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
- expect(build_report_result_service).to receive(:execute).with(build)
- end
-
- expect(BuildHooksWorker).to receive(:perform_async)
- expect(ChatNotificationWorker).not_to receive(:perform_async)
- expect(Ci::ArchiveTraceWorker).to receive(:perform_in)
-
- subject
- end
-
- context 'when build is failed' do
- before do
- build.update!(status: :failed)
- end
-
- it 'adds a todo' do
- expect(::Ci::MergeRequests::AddTodoWhenBuildFailsWorker).to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when build has a chat' do
- before do
- build.pipeline.update!(source: :chat)
- end
-
- it 'schedules a ChatNotification job' do
- expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
-
- subject
- end
- end
-
- context 'when project is deleted' do
- before do
- allow(build).to receive(:project).and_return(nil)
- end
-
- it 'does no processing' do
- expect(worker).not_to receive(:process_build)
-
- subject
- end
- end
-
- context 'when project is pending_delete' do
- before do
- build.project.update_attribute(:pending_delete, true)
- end
-
- it 'does no processing' do
- expect(worker).not_to receive(:process_build)
-
- subject
- end
- end
- end
-
- context 'when build does not exist' do
- it 'does not raise exception' do
- expect { described_class.new.perform(non_existing_record_id) }
- .not_to raise_error
- end
- end
- end
-end
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index 426eb03638c..80dc36d268f 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -23,8 +23,8 @@ RSpec.describe BuildHooksWorker do
end
end
- describe '.perform_async' do
- it 'sends a message to the application logger, before performing', :sidekiq_inline do
+ describe '.perform_async', :sidekiq_inline do
+ it 'sends a message to the application logger, before performing' do
build = create(:ci_build)
expect(Gitlab::AppLogger).to receive(:info).with(
diff --git a/spec/workers/ci/build_finished_worker_spec.rb b/spec/workers/ci/build_finished_worker_spec.rb
index 201182636e7..5ddaabc3938 100644
--- a/spec/workers/ci/build_finished_worker_spec.rb
+++ b/spec/workers/ci/build_finished_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::BuildFinishedWorker do
+ include AfterNextHelpers
+
subject { described_class.new.perform(build.id) }
describe '#perform' do
@@ -16,17 +18,28 @@ RSpec.describe Ci::BuildFinishedWorker do
it 'calculates coverage and calls hooks', :aggregate_failures do
expect(build).to receive(:update_coverage).ordered
- expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
- expect(build_report_result_service).to receive(:execute).with(build)
- end
+ expect_next(Ci::BuildReportResultService).to receive(:execute).with(build)
- expect(BuildHooksWorker).to receive(:perform_async)
+ expect(build).to receive(:execute_hooks)
expect(ChatNotificationWorker).not_to receive(:perform_async)
expect(Ci::ArchiveTraceWorker).to receive(:perform_in)
subject
end
+ context 'when the execute_build_hooks_inline feature flag is disabled' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: false)
+ end
+
+ it 'uses the BuildHooksWorker' do
+ expect(build).not_to receive(:execute_hooks)
+ expect(BuildHooksWorker).to receive(:perform_async).with(build)
+
+ subject
+ end
+ end
+
context 'when build is failed' do
before do
build.update!(status: :failed)
diff --git a/spec/workers/ci/cancel_pipeline_worker_spec.rb b/spec/workers/ci/cancel_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..6165aaff1c7
--- /dev/null
+++ b/spec/workers/ci/cancel_pipeline_worker_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CancelPipelineWorker, :aggregate_failures do
+ let!(:pipeline) { create(:ci_pipeline, :running) }
+
+ describe '#perform' do
+ subject(:perform) { described_class.new.perform(pipeline.id, pipeline.id) }
+
+ it 'calls cancel_running' do
+ allow(::Ci::Pipeline).to receive(:find_by_id).and_return(pipeline)
+ expect(pipeline).to receive(:cancel_running).with(
+ auto_canceled_by_pipeline_id: pipeline.id,
+ cascade_to_children: false
+ )
+
+ perform
+ end
+
+ context 'if pipeline is deleted' do
+ subject(:perform) { described_class.new.perform(non_existing_record_id, non_existing_record_id) }
+
+ it 'does not error' do
+ expect(pipeline).not_to receive(:cancel_running)
+
+ perform
+ end
+ end
+
+ describe 'with builds and state transition side effects', :sidekiq_inline do
+ let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
+
+ it_behaves_like 'an idempotent worker', :sidekiq_inline do
+ let(:job_args) { [pipeline.id, pipeline.id] }
+
+ it 'cancels the pipeline' do
+ perform
+
+ pipeline.reload
+
+ expect(pipeline).to be_canceled
+ expect(pipeline.builds.first).to be_canceled
+ expect(pipeline.builds.first.auto_canceled_by_id).to eq pipeline.id
+ expect(pipeline.auto_canceled_by_id).to eq pipeline.id
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
new file mode 100644
index 00000000000..ff67266c3e8
--- /dev/null
+++ b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:version) { '1.0.0' }
+ let(:job_args) { version }
+
+ include_examples 'an idempotent worker' do
+ subject(:perform_twice) { perform_multiple(job_args, worker: worker, exec_times: 2) }
+
+ let(:service) { ::Ci::Runners::ProcessRunnerVersionUpdateService.new(version) }
+ let(:available_runner_releases) do
+ %w[1.0.0 1.0.1]
+ end
+
+ before do
+ allow(Ci::Runners::ProcessRunnerVersionUpdateService).to receive(:new).and_return(service)
+ allow(service).to receive(:execute).and_call_original
+
+ url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+
+ WebMock.stub_request(:get, url).to_return(
+ body: available_runner_releases.map { |v| { name: v } }.to_json,
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ it 'logs the service result', :aggregate_failures do
+ perform_twice
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateService).to have_received(:new).twice
+ expect(service).to have_received(:execute).twice
+ expect(worker.logging_extras).to eq(
+ {
+ 'extra.ci_runners_process_runner_version_update_worker.status' => :success,
+ 'extra.ci_runners_process_runner_version_update_worker.message' => nil,
+ 'extra.ci_runners_process_runner_version_update_worker.upgrade_status' => 'recommended'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
new file mode 100644
index 00000000000..1292df62ce5
--- /dev/null
+++ b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'when scheduled by cronjob' do
+ it 'reschedules itself' do
+ expect(described_class).to(receive(:perform_in).with(a_value_between(0, 12.hours.in_seconds), false))
+ expect(::Ci::Runners::ReconcileExistingRunnerVersionsService).not_to receive(:new)
+
+ worker.perform
+ end
+ end
+
+ context 'when self-scheduled' do
+ include_examples 'an idempotent worker' do
+ subject(:perform) { perform_multiple(false, worker: worker) }
+
+ it 'executes the service' do
+ expect_next_instance_of(Ci::Runners::ReconcileExistingRunnerVersionsService) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end.exactly(worker_exec_times)
+
+ perform
+ end
+ end
+
+ it 'logs the service result' do
+ expect_next_instance_of(Ci::Runners::ReconcileExistingRunnerVersionsService) do |service|
+ expect(service).to receive(:execute)
+ .and_return(ServiceResponse.success(payload: { some_job_result_key: 'some_value' }))
+ end
+
+ worker.perform(false)
+
+ expect(worker.logging_extras).to eq({
+ 'extra.ci_runners_reconcile_existing_runner_versions_cron_worker.some_job_result_key' => 'some_value'
+ })
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/track_failed_build_worker_spec.rb b/spec/workers/ci/track_failed_build_worker_spec.rb
new file mode 100644
index 00000000000..12d0e64afc5
--- /dev/null
+++ b/spec/workers/ci/track_failed_build_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::TrackFailedBuildWorker do
+ let_it_be(:build) { create(:ci_build, :failed, :sast_report) }
+ let_it_be(:exit_code) { 42 }
+ let_it_be(:failure_reason) { "script_failure" }
+
+ subject { described_class.new.perform(build.id, exit_code, failure_reason) }
+
+ describe '#perform' do
+ context 'when a build has failed' do
+ it 'executes track service' do
+ expect(Ci::TrackFailedBuildService)
+ .to receive(:new)
+ .with(build: build, exit_code: exit_code, failure_reason: failure_reason)
+ .and_call_original
+
+ subject
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [build.id, exit_code, failure_reason] }
+ end
+ end
+end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
index eeccdbd0e2d..0e3fa350fcd 100644
--- a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_shared_state do
describe '#register' do
it 'adds jid to the set' do
- expect(job_tracker.register('a-job-id', max_jids)). to be true
+ expect(job_tracker.register('a-job-id', max_jids)).to be true
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb
index f6d4cc4679d..bf156c3b8cb 100644
--- a/spec/workers/concerns/waitable_worker_spec.rb
+++ b/spec/workers/concerns/waitable_worker_spec.rb
@@ -30,19 +30,33 @@ RSpec.describe WaitableWorker do
describe '.bulk_perform_and_wait' do
context '1 job' do
- it 'inlines the job' do
- args_list = [[1]]
- expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
- expect(Gitlab::AppJsonLogger).to(
- receive(:info).with(a_hash_including('message' => 'running inline',
- 'class' => 'Gitlab::Foo::Bar::DummyWorker',
- 'job_status' => 'running',
- 'queue' => 'foo_bar_dummy'))
- .once)
-
- worker.bulk_perform_and_wait(args_list)
-
- expect(worker.counter).to eq(1)
+ it 'runs the jobs asynchronously' do
+ arguments = [[1]]
+
+ expect(worker).to receive(:bulk_perform_async).with(arguments)
+
+ worker.bulk_perform_and_wait(arguments)
+ end
+
+ context 'when the feature flag `always_async_project_authorizations_refresh` is turned off' do
+ before do
+ stub_feature_flags(always_async_project_authorizations_refresh: false)
+ end
+
+ it 'inlines the job' do
+ args_list = [[1]]
+ expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:info).with(a_hash_including('message' => 'running inline',
+ 'class' => 'Gitlab::Foo::Bar::DummyWorker',
+ 'job_status' => 'running',
+ 'queue' => 'foo_bar_dummy'))
+ .once)
+
+ worker.bulk_perform_and_wait(args_list)
+
+ expect(worker.counter).to eq(1)
+ end
end
end
diff --git a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
index 2b4a42060d9..dfe7a266be2 100644
--- a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state do
- it_behaves_like 'it runs batched background migration jobs', :ci
+ it_behaves_like 'it runs batched background migration jobs', :ci, :ci_builds
end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
index a6c7db60abe..e57bd7581c2 100644
--- a/spec/workers/database/batched_background_migration_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigrationWorker do
- it_behaves_like 'it runs batched background migration jobs', :main
+ it_behaves_like 'it runs batched background migration jobs', :main, :events
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index e8ec7c28537..4a1bf7dbbf9 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -382,6 +382,7 @@ RSpec.describe 'Every Sidekiq worker' do
'ProjectScheduleBulkRepositoryShardMovesWorker' => 3,
'ProjectTemplateExportWorker' => false,
'ProjectUpdateRepositoryStorageWorker' => 3,
+ 'Projects::DisableLegacyOpenSourceLicenseForInactiveProjectsWorker' => 3,
'Projects::GitGarbageCollectWorker' => false,
'Projects::InactiveProjectsDeletionNotificationWorker' => 3,
'Projects::PostCreationWorker' => 3,
diff --git a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
index b3c6a48767c..932152c0764 100644
--- a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
@@ -8,37 +8,66 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker do
let(:project) { create(:project) }
let!(:group) { create(:group, projects: [project]) }
let(:feature_flag_state) { [group] }
+ let(:single_endpoint_feature_flag_state) { [group] }
describe '#import' do
let(:importer) { instance_double('Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
before do
+ stub_feature_flags(github_importer_single_endpoint_issue_events_import: single_endpoint_feature_flag_state)
stub_feature_flags(github_importer_issue_events_import: feature_flag_state)
end
- it 'imports all the issue events' do
- waiter = Gitlab::JobWaiter.new(2, '123')
+ context 'when single endpoint feature flag enabled' do
+ it 'imports all the issue events' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
- expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter)
- .to receive(:new)
- .with(project, client)
- .and_return(importer)
+ expect(Gitlab::GithubImport::Importer::IssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
- expect(importer).to receive(:execute).and_return(waiter)
+ expect(importer).to receive(:execute).and_return(waiter)
- expect(Gitlab::GithubImport::AdvanceStageWorker)
- .to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :notes)
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :notes)
- worker.import(client, project)
+ worker.import(client, project)
+ end
+ end
+
+ context 'when import issue events feature flag enabled' do
+ let(:single_endpoint_feature_flag_state) { false }
+
+ it 'imports the issue events partly' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::Importer::IssueEventsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute).and_return(waiter)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :notes)
+
+ worker.import(client, project)
+ end
end
- context 'when feature flag is disabled' do
+ context 'when feature flags are disabled' do
let(:feature_flag_state) { false }
+ let(:single_endpoint_feature_flag_state) { false }
it 'skips issue events import and calls next stage' do
expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::Importer::IssueEventsImporter).not_to receive(:new)
expect(Gitlab::GithubImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, {}, :notes)
worker.import(client, project)
diff --git a/spec/workers/merge_requests/create_approval_event_worker_spec.rb b/spec/workers/merge_requests/create_approval_event_worker_spec.rb
new file mode 100644
index 00000000000..8389949ecc9
--- /dev/null
+++ b/spec/workers/merge_requests/create_approval_event_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreateApprovalEventWorker do
+ let!(:user) { create(:user) }
+ let!(:project) { create(:project) }
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls MergeRequests::CreateApprovalEventService' do
+ expect_next_instance_of(
+ MergeRequests::CreateApprovalEventService,
+ project: project, current_user: user
+ ) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'does not call MergeRequests::CreateApprovalEventService' do
+ expect(MergeRequests::CreateApprovalEventService).not_to receive(:new)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+end
diff --git a/spec/workers/merge_requests/create_approval_note_worker_spec.rb b/spec/workers/merge_requests/create_approval_note_worker_spec.rb
new file mode 100644
index 00000000000..f58d38599fc
--- /dev/null
+++ b/spec/workers/merge_requests/create_approval_note_worker_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreateApprovalNoteWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls SystemNoteService.approve_mr' do
+ expect(SystemNoteService).to receive(:approve_mr).with(merge_request, user)
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'logs and does not call SystemNoteService.approve_mr' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+ expect(SystemNoteService).not_to receive(:approve_mr)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Current user not found.', 'current_user_id' => user.id } }
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Merge request not found.', 'merge_request_id' => merge_request.id } }
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb b/spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb
new file mode 100644
index 00000000000..0130ef63f50
--- /dev/null
+++ b/spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ExecuteApprovalHooksWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls MergeRequests::ExecuteApprovalHooksService' do
+ expect_next_instance_of(
+ MergeRequests::ExecuteApprovalHooksService,
+ project: project, current_user: user
+ ) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'logs and does not call MergeRequests::ExecuteApprovalHooksService' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+ expect(MergeRequests::ExecuteApprovalHooksService).not_to receive(:new)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Current user not found.', 'current_user_id' => user.id } }
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Merge request not found.', 'merge_request_id' => merge_request.id } }
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb b/spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb
new file mode 100644
index 00000000000..f8316a8ff05
--- /dev/null
+++ b/spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ResolveTodosAfterApprovalWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls TodoService#resolve_todos_for_target' do
+ expect_next_instance_of(TodoService) do |todo_service|
+ expect(todo_service)
+ .to receive(:resolve_todos_for_target)
+ .with(merge_request, user)
+ end
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'logs and does not call TodoService#resolve_todos_for_target' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+ expect(TodoService).not_to receive(:new)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Current user not found.', 'current_user_id' => user.id } }
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Merge request not found.', 'merge_request_id' => merge_request.id } }
+ end
+ end
+end
diff --git a/spec/workers/new_issue_worker_spec.rb b/spec/workers/new_issue_worker_spec.rb
index 35b83c3bee8..b9053b10419 100644
--- a/spec/workers/new_issue_worker_spec.rb
+++ b/spec/workers/new_issue_worker_spec.rb
@@ -74,6 +74,8 @@ RSpec.describe NewIssueWorker do
it 'creates a new event record' do
expect { worker.perform(issue.id, user.id) }.to change { Event.count }.from(0).to(1)
+
+ expect(Event.last).to have_attributes(target_id: issue.id, target_type: 'Issue')
end
it 'creates a notification for the mentioned user' do
@@ -89,6 +91,14 @@ RSpec.describe NewIssueWorker do
worker.perform(issue.id, user.id)
end
+
+ context 'when a class is set' do
+ it 'creates event with the correct type' do
+ expect { worker.perform(issue.id, user.id, 'WorkItem') }.to change { Event.count }.from(0).to(1)
+
+ expect(Event.last).to have_attributes(target_id: issue.id, target_type: 'WorkItem')
+ end
+ end
end
end
end
diff --git a/spec/workers/packages/cleanup/execute_policy_worker_spec.rb b/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
index 81fcec1a360..6325a82ed3d 100644
--- a/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
+++ b/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe Packages::Cleanup::ExecutePolicyWorker do
end
describe '#remaining_work_count' do
- subject { worker.remaining_work_count}
+ subject { worker.remaining_work_count }
context 'with no policies' do
it { is_expected.to eq(0) }
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
index 1c1586ef199..9272e26a34f 100644
--- a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
+++ b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it_behaves_like 'subscribes to event'
it 'clears the cache with Gitlab::Pages::CacheControl' do
- caches.each do |cache_type, cache_id|
- expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache_type, id: cache_id) do |cache_control|
+ caches.each do |cache|
+ expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id]) do |cache_control|
expect(cache_control).to receive(:clear_cache)
end
end
@@ -26,20 +26,120 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it_behaves_like 'clears caches with',
event_class: Pages::PageDeployedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
it_behaves_like 'clears caches with',
event_class: Pages::PageDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectCreatedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectArchivedEvent,
+ event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectPathChangedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ old_path: 'old_path',
+ new_path: 'new_path'
+ },
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectTransferedEvent,
+ event_data: {
+ project_id: 1,
+ old_namespace_id: 2,
+ old_root_namespace_id: 3,
+ new_namespace_id: 4,
+ new_root_namespace_id: 5
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 },
+ { type: :namespace, id: 5 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Groups::GroupTransferedEvent,
+ event_data: {
+ group_id: 1,
+ old_root_namespace_id: 3,
+ new_root_namespace_id: 5
+ },
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :namespace, id: 5 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Groups::GroupPathChangedEvent,
+ event_data: {
+ group_id: 1,
+ root_namespace_id: 2,
+ old_path: 'old_path',
+ new_path: 'new_path'
+ },
+ caches: [
+ { type: :namespace, id: 2 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Groups::GroupDeletedEvent,
+ event_data: {
+ group_id: 1,
+ root_namespace_id: 3
+ },
+ caches: [
+ { type: :namespace, id: 3 }
+ ]
+
+ context 'when namespace based cache keys are duplicated' do
+ # de-dups namespace cache keys
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectTransferedEvent,
+ event_data: {
+ project_id: 1,
+ old_namespace_id: 2,
+ old_root_namespace_id: 5,
+ new_namespace_id: 4,
+ new_root_namespace_id: 5
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 5 }
+ ]
+ end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 4ddb793516f..d632ca39e44 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -452,6 +452,12 @@ RSpec.describe PostReceive do
perform
end
+ it 'updates the snippet model updated_at' do
+ expect(snippet).to receive(:touch)
+
+ perform
+ end
+
it 'updates snippet statistics' do
expect(Snippets::UpdateStatisticsService).to receive(:new).with(snippet).and_call_original
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index 7f42c700ce4..30c85464452 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe ProjectCacheWorker do
.twice
expect(UpdateProjectStatisticsWorker).to receive(:perform_in)
- .with(lease_timeout, project.id, statistics)
+ .with(lease_timeout, lease_key, project.id, statistics)
.and_call_original
expect(Namespaces::ScheduleAggregationWorker)
diff --git a/spec/workers/projects/import_export/relation_export_worker_spec.rb b/spec/workers/projects/import_export/relation_export_worker_spec.rb
new file mode 100644
index 00000000000..236650fe55b
--- /dev/null
+++ b/spec/workers/projects/import_export/relation_export_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::RelationExportWorker, type: :worker do
+ let(:project_relation_export) { create(:project_relation_export) }
+ let(:job_args) { [project_relation_export.id] }
+
+ it_behaves_like 'an idempotent worker'
+
+ describe '#perform' do
+ subject(:worker) { described_class.new }
+
+ context 'when relation export has initial state queued' do
+ let(:project_relation_export) { create(:project_relation_export) }
+
+ it 'calls RelationExportService' do
+ expect_next_instance_of(Projects::ImportExport::RelationExportService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ worker.perform(project_relation_export.id)
+ end
+ end
+
+ context 'when relation export does not have queued state' do
+ let(:project_relation_export) { create(:project_relation_export, status_event: :start) }
+
+ it 'does not call RelationExportService' do
+ expect(Projects::ImportExport::RelationExportService).not_to receive(:new)
+
+ worker.perform(project_relation_export.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
index 6007d3b34f8..2562a7bc6fe 100644
--- a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
+++ b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
@@ -6,12 +6,12 @@ RSpec.describe RemoveUnreferencedLfsObjectsWorker do
let(:worker) { described_class.new }
describe '#perform' do
- let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1') }
- let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2') }
+ let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1' * 64) }
+ let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2' * 64) }
let!(:project1) { create(:project, lfs_enabled: true) }
let!(:project2) { create(:project, lfs_enabled: true) }
- let!(:referenced_lfs_object1) { create(:lfs_object, oid: '3') }
- let!(:referenced_lfs_object2) { create(:lfs_object, oid: '4') }
+ let!(:referenced_lfs_object1) { create(:lfs_object, oid: '3' * 64) }
+ let!(:referenced_lfs_object2) { create(:lfs_object, oid: '4' * 64) }
let!(:lfs_objects_project1_1) do
create(:lfs_objects_project,
project: project1,
diff --git a/spec/workers/update_project_statistics_worker_spec.rb b/spec/workers/update_project_statistics_worker_spec.rb
index 1f840e363ea..2f356376d7c 100644
--- a/spec/workers/update_project_statistics_worker_spec.rb
+++ b/spec/workers/update_project_statistics_worker_spec.rb
@@ -3,17 +3,35 @@
require 'spec_helper'
RSpec.describe UpdateProjectStatisticsWorker do
+ include ExclusiveLeaseHelpers
+
let(:worker) { described_class.new }
let(:project) { create(:project, :repository) }
let(:statistics) { %w(repository_size) }
+ let(:lease_key) { "namespace:namespaces_root_statistics:#{project.namespace_id}" }
describe '#perform' do
- it 'updates the project statistics' do
- expect(Projects::UpdateStatisticsService).to receive(:new)
- .with(project, nil, statistics: statistics)
- .and_call_original
+ context 'when a lease could be obtained' do
+ it 'updates the project statistics' do
+ expect(Projects::UpdateStatisticsService).to receive(:new)
+ .with(project, nil, statistics: statistics)
+ .and_call_original
+
+ worker.perform(lease_key, project.id, statistics)
+ end
+ end
+
+ context 'when a lease could not be obtained' do
+ before do
+ stub_exclusive_lease_taken(lease_key, timeout: ProjectCacheWorker::LEASE_TIMEOUT)
+ end
+
+ it 'does not update the project statistics' do
+ lease_key = "namespace:namespaces_root_statistics:#{project.namespace_id}"
+ expect(Projects::UpdateStatisticsService).not_to receive(:new)
- worker.perform(project.id, statistics)
+ worker.perform(lease_key, project.id, statistics)
+ end
end
end
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
index 297301c45e2..263ca31e0a0 100644
--- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -25,20 +25,13 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
context 'when automatic deactivation of dormant users is enabled' do
before do
stub_application_setting(deactivate_dormant_users: true)
- stub_const("#{described_class.name}::PAUSE_SECONDS", 0)
end
it 'deactivates dormant users' do
- freeze_time do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
-
- expect(worker).to receive(:sleep).twice
-
- worker.perform
+ worker.perform
- expect(User.dormant.count).to eq(0)
- expect(User.with_no_activity.count).to eq(0)
- end
+ expect(User.dormant.count).to eq(0)
+ expect(User.with_no_activity.count).to eq(0)
end
where(:user_type, :expected_state) do
@@ -78,6 +71,14 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
expect(inactive_recently_created.reload.state).to eq('active')
end
+
+ it 'triggers update of highest user role for deactivated users', :clean_gitlab_redis_shared_state do
+ [dormant, inactive].each do |user|
+ expect(UpdateHighestRoleWorker).to receive(:perform_in).with(anything, user.id)
+ end
+
+ worker.perform
+ end
end
context 'when automatic deactivation of dormant users is disabled' do