summaryrefslogtreecommitdiff
path: root/spec/workers
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/bulk_import_worker_spec.rb71
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb39
-rw-r--r--spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb (renamed from spec/workers/ci/pipelines/create_artifact_worker_spec.rb)6
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb36
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb82
-rw-r--r--spec/workers/experiments/record_conversion_event_worker_spec.rb35
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb16
-rw-r--r--spec/workers/gitlab_performance_bar_stats_worker_spec.rb2
-rw-r--r--spec/workers/group_destroy_worker_spec.rb6
-rw-r--r--spec/workers/jira_connect/sync_branch_worker_spec.rb89
-rw-r--r--spec/workers/jira_connect/sync_deployments_worker_spec.rb60
-rw-r--r--spec/workers/jira_connect/sync_feature_flags_worker_spec.rb60
-rw-r--r--spec/workers/jira_connect/sync_merge_request_worker_spec.rb47
-rw-r--r--spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb32
-rw-r--r--spec/workers/namespaces/onboarding_user_added_worker_spec.rb10
-rw-r--r--spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb29
-rw-r--r--spec/workers/project_update_repository_storage_worker_spec.rb42
-rw-r--r--spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb12
-rw-r--r--spec/workers/snippet_update_repository_storage_worker_spec.rb15
20 files changed, 504 insertions, 189 deletions
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 12783f40528..d3a4144d606 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -4,13 +4,74 @@ require 'spec_helper'
RSpec.describe BulkImportWorker do
describe '#perform' do
- it 'executes Group Importer' do
- bulk_import_id = 1
+ before do
+ stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
+ end
+
+ context 'when no bulk import is found' do
+ it 'does nothing' do
+ expect(described_class).not_to receive(:perform_in)
+
+ subject.perform(non_existing_record_id)
+ end
+ end
+
+ context 'when bulk import is finished' do
+ it 'does nothing' do
+ bulk_import = create(:bulk_import, :finished)
+
+ expect(described_class).not_to receive(:perform_in)
+
+ subject.perform(bulk_import.id)
+ end
+ end
+
+ context 'when all entities are processed' do
+ it 'marks bulk import as finished' do
+ bulk_import = create(:bulk_import, :started)
+ create(:bulk_import_entity, :finished, bulk_import: bulk_import)
+ create(:bulk_import_entity, :failed, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(bulk_import.reload.finished?).to eq(true)
+ end
+ end
+
+ context 'when maximum allowed number of import entities in progress' do
+ it 'reenqueues itself' do
+ bulk_import = create(:bulk_import, :started)
+ (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :started, bulk_import: bulk_import) }
+
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+
+ subject.perform(bulk_import.id)
+ end
+ end
+
+ context 'when bulk import is created' do
+ it 'marks bulk import as started' do
+ bulk_import = create(:bulk_import, :created)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ subject.perform(bulk_import.id)
+
+ expect(bulk_import.reload.started?).to eq(true)
+ end
+
+ context 'when there are created entities to process' do
+ it 'marks a batch of entities as started, enqueues BulkImports::EntityWorker and reenqueues' do
+ bulk_import = create(:bulk_import, :created)
+ (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :created, bulk_import: bulk_import) }
+
+ expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
+ expect(BulkImports::EntityWorker).to receive(:perform_async)
- expect(BulkImports::Importers::GroupsImporter)
- .to receive(:new).with(bulk_import_id).and_return(double(execute: true))
+ subject.perform(bulk_import.id)
- described_class.new.perform(bulk_import_id)
+ expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
+ end
+ end
end
end
end
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
new file mode 100644
index 00000000000..31515b31947
--- /dev/null
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::EntityWorker do
+ describe '#execute' do
+ let(:bulk_import) { create(:bulk_import) }
+
+ context 'when started entity exists' do
+ let(:entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
+
+ it 'executes BulkImports::Importers::GroupImporter' do
+ expect(BulkImports::Importers::GroupImporter).to receive(:new).with(entity).and_call_original
+
+ subject.perform(entity.id)
+ end
+
+ it 'sets jid' do
+ jid = 'jid'
+
+ allow(subject).to receive(:jid).and_return(jid)
+
+ subject.perform(entity.id)
+
+ expect(entity.reload.jid).to eq(jid)
+ end
+ end
+
+ context 'when started entity does not exist' do
+ it 'does not execute BulkImports::Importers::GroupImporter' do
+ entity = create(:bulk_import_entity, bulk_import: bulk_import)
+
+ expect(BulkImports::Importers::GroupImporter).not_to receive(:new)
+
+ subject.perform(entity.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipelines/create_artifact_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
index 31d2c4e9559..000eda055af 100644
--- a/spec/workers/ci/pipelines/create_artifact_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/coverage_report_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Ci::Pipelines::CreateArtifactWorker do
+RSpec.describe ::Ci::PipelineArtifacts::CoverageReportWorker do
describe '#perform' do
subject { described_class.new.perform(pipeline_id) }
@@ -11,7 +11,7 @@ RSpec.describe ::Ci::Pipelines::CreateArtifactWorker do
let(:pipeline_id) { pipeline.id }
it 'calls pipeline report result service' do
- expect_next_instance_of(::Ci::Pipelines::CreateArtifactService) do |create_artifact_service|
+ expect_next_instance_of(::Ci::PipelineArtifacts::CoverageReportService) do |create_artifact_service|
expect(create_artifact_service).to receive(:execute)
end
@@ -23,7 +23,7 @@ RSpec.describe ::Ci::Pipelines::CreateArtifactWorker do
let(:pipeline_id) { non_existing_record_id }
it 'does not call pipeline create artifact service' do
- expect(Ci::Pipelines::CreateArtifactService).not_to receive(:execute)
+ expect(Ci::PipelineArtifacts::CoverageReportService).not_to receive(:execute)
subject
end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
new file mode 100644
index 00000000000..9e9aa962b63
--- /dev/null
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let_it_be(:pipeline_artifact) do
+ create(:ci_pipeline_artifact, expire_at: 1.week.ago)
+ end
+
+ it 'executes a service' do
+ expect_next_instance_of(::Ci::PipelineArtifacts::DestroyExpiredArtifactsService) do |instance|
+ expect(instance).to receive(:execute)
+ end
+
+ worker.perform
+ end
+
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple(worker: worker)
+ end
+
+ it 'removes the artifact only once' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:destroyed_pipeline_artifacts_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:destroyed_pipeline_artifacts_count, 0)
+
+ subject
+
+ expect { pipeline_artifact.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+end
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index d98ea1b6ab2..e6592f7f204 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:repository, reload: true) { create(:container_repository, :cleanup_scheduled) }
let_it_be(:project) { repository.project }
let_it_be(:policy) { project.container_expiration_policy }
@@ -19,23 +21,55 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
RSpec.shared_examples 'handling all repository conditions' do
it 'sends the repository for cleaning' do
+ service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :finished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
subject
end
context 'with unfinished cleanup' do
it 'logs an unfinished cleanup' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(status: :unfinished, repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :unfinished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished)
subject
end
+
+ context 'with a truncated list of tags to delete' do
+ it 'logs an unfinished cleanup' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository, cleanup_tags_service_after_truncate_size: 10, cleanup_tags_service_before_delete_size: 5)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: true)
+
+ subject
+ end
+ end
+
+ context 'the truncated log field' do
+ where(:before_truncate_size, :after_truncate_size, :truncated) do
+ 100 | 100 | false
+ 100 | 80 | true
+ nil | 100 | false
+ 100 | nil | false
+ nil | nil | false
+ end
+
+ with_them do
+ it 'is logged properly' do
+ service_response = cleanup_service_response(status: :unfinished, repository: repository, cleanup_tags_service_after_truncate_size: after_truncate_size, cleanup_tags_service_before_truncate_size: before_truncate_size)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :unfinished, truncated: truncated)
+
+ subject
+ end
+ end
+ end
end
context 'with policy running shortly' do
@@ -87,10 +121,10 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
it 'process the cleanup scheduled repository first' do
+ service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :finished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
subject
end
@@ -105,10 +139,10 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
it 'process the repository with the oldest expiration_policy_started_at' do
+ service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: cleanup_service_response(repository: repository)))
- expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :finished)
- expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
subject
end
@@ -164,8 +198,28 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
- def cleanup_service_response(status: :finished, repository:)
- ServiceResponse.success(message: "cleanup #{status}", payload: { cleanup_status: status, container_repository_id: repository.id })
+ def cleanup_service_response(status: :finished, repository:, cleanup_tags_service_original_size: 100, cleanup_tags_service_before_truncate_size: 80, cleanup_tags_service_after_truncate_size: 80, cleanup_tags_service_before_delete_size: 50)
+ ServiceResponse.success(
+ message: "cleanup #{status}",
+ payload: {
+ cleanup_status: status,
+ container_repository_id: repository.id,
+ cleanup_tags_service_original_size: cleanup_tags_service_original_size,
+ cleanup_tags_service_before_truncate_size: cleanup_tags_service_before_truncate_size,
+ cleanup_tags_service_after_truncate_size: cleanup_tags_service_after_truncate_size,
+ cleanup_tags_service_before_delete_size: cleanup_tags_service_before_delete_size
+ }.compact
+ )
+ end
+
+ def expect_log_extra_metadata(service_response:, cleanup_status: :finished, truncated: false)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, cleanup_status)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
+ %i[cleanup_tags_service_original_size cleanup_tags_service_before_truncate_size cleanup_tags_service_after_truncate_size cleanup_tags_service_before_delete_size].each do |field|
+ value = service_response.payload[field]
+ expect(worker).to receive(:log_extra_metadata_on_done).with(field, value) unless value.nil?
+ end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_truncated, truncated)
end
end
diff --git a/spec/workers/experiments/record_conversion_event_worker_spec.rb b/spec/workers/experiments/record_conversion_event_worker_spec.rb
new file mode 100644
index 00000000000..05e4ebc13ba
--- /dev/null
+++ b/spec/workers/experiments/record_conversion_event_worker_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Experiments::RecordConversionEventWorker, '#perform' do
+ subject(:perform) { described_class.new.perform(:experiment_key, 1234) }
+
+ before do
+ stub_experiment(experiment_key: experiment_active)
+ end
+
+ context 'when the experiment is active' do
+ let(:experiment_active) { true }
+
+ include_examples 'an idempotent worker' do
+ subject { perform }
+
+ it 'records the event' do
+ expect(Experiment).to receive(:record_conversion_event).with(:experiment_key, 1234)
+
+ perform
+ end
+ end
+ end
+
+ context 'when the experiment is not active' do
+ let(:experiment_active) { false }
+
+ it 'records the event' do
+ expect(Experiment).not_to receive(:record_conversion_event)
+
+ perform
+ end
+ end
+end
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 995f37daf17..6d73d715d21 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -8,9 +8,11 @@ RSpec.describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
- expect(instance).to receive(:execute)
+ expect(instance).to receive(:execute).and_call_original
end
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:destroyed_job_artifacts_count, 0)
+
worker.perform
end
end
diff --git a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
index 7acf1a338d3..75d4d2dff2e 100644
--- a/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_pull_requests_reviews_worker_spec.rb
@@ -9,23 +9,7 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker do
let(:client) { double(:client) }
describe '#import' do
- it 'does not import with the feature disabled' do
- stub_feature_flags(github_import_pull_request_reviews: false)
-
- expect(Gitlab::JobWaiter)
- .to receive(:new)
- .and_return(double(key: '123', jobs_remaining: 0))
-
- expect(Gitlab::GithubImport::AdvanceStageWorker)
- .to receive(:perform_async)
- .with(project.id, { '123' => 0 }, :issues_and_diff_notes)
-
- worker.import(client, project)
- end
-
it 'imports all the pull request reviews' do
- stub_feature_flags(github_import_pull_request_reviews: true)
-
importer = double(:importer)
waiter = Gitlab::JobWaiter.new(2, '123')
diff --git a/spec/workers/gitlab_performance_bar_stats_worker_spec.rb b/spec/workers/gitlab_performance_bar_stats_worker_spec.rb
index 367003dd1ad..3638add1524 100644
--- a/spec/workers/gitlab_performance_bar_stats_worker_spec.rb
+++ b/spec/workers/gitlab_performance_bar_stats_worker_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe GitlabPerformanceBarStatsWorker do
let(:uuid) { 1 }
before do
- expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
+ expect(Gitlab::Redis::Cache).to receive(:with).and_yield(redis)
expect_to_cancel_exclusive_lease(GitlabPerformanceBarStatsWorker::LEASE_KEY, uuid)
end
diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb
index ab3dd19dec1..82ae9010a24 100644
--- a/spec/workers/group_destroy_worker_spec.rb
+++ b/spec/workers/group_destroy_worker_spec.rb
@@ -4,8 +4,12 @@ require 'spec_helper'
RSpec.describe GroupDestroyWorker do
let(:group) { create(:group) }
- let(:user) { create(:admin) }
let!(:project) { create(:project, namespace: group) }
+ let(:user) { create(:user) }
+
+ before do
+ group.add_owner(user)
+ end
subject { described_class.new }
diff --git a/spec/workers/jira_connect/sync_branch_worker_spec.rb b/spec/workers/jira_connect/sync_branch_worker_spec.rb
index c8453064b0d..7c715f36fb4 100644
--- a/spec/workers/jira_connect/sync_branch_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_branch_worker_spec.rb
@@ -13,82 +13,67 @@ RSpec.describe JiraConnect::SyncBranchWorker do
let(:project_id) { project.id }
let(:branch_name) { 'master' }
let(:commit_shas) { %w(b83d6e3 5a62481) }
-
- subject { described_class.new.perform(project_id, branch_name, commit_shas) }
+ let(:update_sequence_id) { 1 }
def expect_jira_sync_service_execute(args)
- expect_next_instance_of(JiraConnect::SyncService) do |instance|
- expect(instance).to receive(:execute).with(args.merge(update_sequence_id: nil))
+ expect_next_instances_of(JiraConnect::SyncService, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |instance|
+ expect(instance).to receive(:execute).with(args)
end
end
- it 'calls JiraConnect::SyncService#execute' do
- expect_jira_sync_service_execute(
- branches: [instance_of(Gitlab::Git::Branch)],
- commits: project.commits_by(oids: commit_shas)
- )
-
- subject
- end
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project_id, branch_name, commit_shas, update_sequence_id] }
- context 'without branch name' do
- let(:branch_name) { nil }
-
- it 'calls JiraConnect::SyncService#execute' do
- expect_jira_sync_service_execute(
- branches: nil,
- commits: project.commits_by(oids: commit_shas)
- )
-
- subject
+ before do
+ stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
end
- end
-
- context 'without commits' do
- let(:commit_shas) { nil }
it 'calls JiraConnect::SyncService#execute' do
expect_jira_sync_service_execute(
branches: [instance_of(Gitlab::Git::Branch)],
- commits: nil
+ commits: project.commits_by(oids: commit_shas),
+ update_sequence_id: update_sequence_id
)
subject
end
- end
- context 'when project no longer exists' do
- let(:project_id) { non_existing_record_id }
+ context 'without branch name' do
+ let(:branch_name) { nil }
- it 'does not call JiraConnect::SyncService' do
- expect(JiraConnect::SyncService).not_to receive(:new)
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_jira_sync_service_execute(
+ branches: nil,
+ commits: project.commits_by(oids: commit_shas),
+ update_sequence_id: update_sequence_id
+ )
- subject
+ subject
+ end
end
- end
- context 'with update_sequence_id' do
- let(:update_sequence_id) { 1 }
- let(:request_path) { '/rest/devinfo/0.10/bulk' }
- let(:request_body) do
- {
- repositories: [
- Atlassian::JiraConnect::Serializers::RepositoryEntity.represent(
- project,
- commits: project.commits_by(oids: commit_shas),
- branches: [project.repository.find_branch(branch_name)],
- update_sequence_id: update_sequence_id
- )
- ]
- }
+ context 'without commits' do
+ let(:commit_shas) { nil }
+
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_jira_sync_service_execute(
+ branches: [instance_of(Gitlab::Git::Branch)],
+ commits: nil,
+ update_sequence_id: update_sequence_id
+ )
+
+ subject
+ end
end
- subject { described_class.new.perform(project_id, branch_name, commit_shas, update_sequence_id) }
+ context 'when project no longer exists' do
+ let(:project_id) { non_existing_record_id }
- it 'sends the reqeust with custom update_sequence_id' do
- expect_next(Atlassian::JiraConnect::Client).to receive(:post).with(request_path, request_body)
+ it 'does not call JiraConnect::SyncService' do
+ expect(JiraConnect::SyncService).not_to receive(:new)
- subject
+ subject
+ end
end
end
end
diff --git a/spec/workers/jira_connect/sync_deployments_worker_spec.rb b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
new file mode 100644
index 00000000000..9485f4cd3a7
--- /dev/null
+++ b/spec/workers/jira_connect/sync_deployments_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::JiraConnect::SyncDeploymentsWorker do
+ include AfterNextHelpers
+ include ServicesHelper
+
+ describe '#perform' do
+ let_it_be(:deployment) { create(:deployment) }
+
+ let(:sequence_id) { Random.random_number(1..10_000) }
+ let(:object_id) { deployment.id }
+
+ subject { described_class.new.perform(object_id, sequence_id) }
+
+ context 'when the object exists' do
+ it 'calls the Jira sync service' do
+ expect_next(::JiraConnect::SyncService, deployment.project)
+ .to receive(:execute).with(deployments: contain_exactly(deployment), update_sequence_id: sequence_id)
+
+ subject
+ end
+ end
+
+ context 'when the object does not exist' do
+ let(:object_id) { non_existing_record_id }
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_sync_deployments: false)
+ end
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is enabled for this project' do
+ before do
+ stub_feature_flags(jira_sync_deployments: deployment.project)
+ end
+
+ it 'calls the sync service' do
+ expect_next(::JiraConnect::SyncService).to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
new file mode 100644
index 00000000000..035f4ebdd3c
--- /dev/null
+++ b/spec/workers/jira_connect/sync_feature_flags_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::JiraConnect::SyncFeatureFlagsWorker do
+ include AfterNextHelpers
+ include ServicesHelper
+
+ describe '#perform' do
+ let_it_be(:feature_flag) { create(:operations_feature_flag) }
+
+ let(:sequence_id) { Random.random_number(1..10_000) }
+ let(:feature_flag_id) { feature_flag.id }
+
+ subject { described_class.new.perform(feature_flag_id, sequence_id) }
+
+ context 'when object exists' do
+ it 'calls the Jira sync service' do
+ expect_next(::JiraConnect::SyncService, feature_flag.project)
+ .to receive(:execute).with(feature_flags: contain_exactly(feature_flag), update_sequence_id: sequence_id)
+
+ subject
+ end
+ end
+
+ context 'when object does not exist' do
+ let(:feature_flag_id) { non_existing_record_id }
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_sync_feature_flags: false)
+ end
+
+ it 'does not call the sync service' do
+ expect_next(::JiraConnect::SyncService).not_to receive(:execute)
+
+ subject
+ end
+ end
+
+ context 'when the feature flag is enabled for this project' do
+ before do
+ stub_feature_flags(jira_sync_feature_flags: feature_flag.project)
+ end
+
+ it 'calls the sync service' do
+ expect_next(::JiraConnect::SyncService).to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
index 1a40aa2b3ad..6a0a0744f6f 100644
--- a/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_merge_request_worker_spec.rb
@@ -12,48 +12,31 @@ RSpec.describe JiraConnect::SyncMergeRequestWorker do
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let(:merge_request_id) { merge_request.id }
+ let(:update_sequence_id) { 1 }
- subject { described_class.new.perform(merge_request_id) }
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request_id, update_sequence_id] }
- it 'calls JiraConnect::SyncService#execute' do
- expect_next_instance_of(JiraConnect::SyncService) do |service|
- expect(service).to receive(:execute).with(merge_requests: [merge_request], update_sequence_id: nil)
+ before do
+ stub_request(:post, 'https://sample.atlassian.net/rest/devinfo/0.10/bulk').to_return(status: 200, body: '', headers: {})
end
- subject
- end
-
- context 'when MR no longer exists' do
- let(:merge_request_id) { non_existing_record_id }
-
- it 'does not call JiraConnect::SyncService' do
- expect(JiraConnect::SyncService).not_to receive(:new)
+ it 'calls JiraConnect::SyncService#execute' do
+ expect_next_instances_of(JiraConnect::SyncService, IdempotentWorkerHelper::WORKER_EXEC_TIMES) do |service|
+ expect(service).to receive(:execute).with(merge_requests: [merge_request], update_sequence_id: update_sequence_id)
+ end
subject
end
- end
- context 'with update_sequence_id' do
- let(:update_sequence_id) { 1 }
- let(:request_path) { '/rest/devinfo/0.10/bulk' }
- let(:request_body) do
- {
- repositories: [
- Atlassian::JiraConnect::Serializers::RepositoryEntity.represent(
- project,
- merge_requests: [merge_request],
- update_sequence_id: update_sequence_id
- )
- ]
- }
- end
-
- subject { described_class.new.perform(merge_request_id, update_sequence_id) }
+ context 'when MR no longer exists' do
+ let(:merge_request_id) { non_existing_record_id }
- it 'sends the request with custom update_sequence_id' do
- expect_next(Atlassian::JiraConnect::Client).to receive(:post).with(request_path, request_body)
+ it 'does not call JiraConnect::SyncService' do
+ expect(JiraConnect::SyncService).not_to receive(:new)
- subject
+ subject
+ end
end
end
end
diff --git a/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb b/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
new file mode 100644
index 00000000000..f1789fa8fbd
--- /dev/null
+++ b/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespaces::OnboardingPipelineCreatedWorker, '#perform' do
+ include AfterNextHelpers
+
+ let_it_be(:ci_pipeline) { create(:ci_pipeline) }
+
+ before do
+ OnboardingProgress.onboard(ci_pipeline.project.namespace)
+ end
+
+ it 'registers an onboarding progress action' do
+ expect_next(OnboardingProgressService, ci_pipeline.project.namespace)
+ .to receive(:execute).with(action: :pipeline_created).and_call_original
+
+ subject.perform(ci_pipeline.project.namespace_id)
+
+ expect(OnboardingProgress.completed?(ci_pipeline.project.namespace, :pipeline_created)).to eq(true)
+ end
+
+ context "when a namespace doesn't exist" do
+ it 'does not register an onboarding progress action' do
+ expect_next(OnboardingProgressService, ci_pipeline.project.namespace).not_to receive(:execute)
+
+ subject.perform(nil)
+
+ expect(OnboardingProgress.completed?(ci_pipeline.project.namespace, :pipeline_created)).to eq(false)
+ end
+ end
+end
diff --git a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb b/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
index 03c668259f8..a773e160fab 100644
--- a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
@@ -7,10 +7,16 @@ RSpec.describe Namespaces::OnboardingUserAddedWorker, '#perform' do
let_it_be(:group) { create(:group) }
- it 'records the event' do
+ before do
+ OnboardingProgress.onboard(group)
+ end
+
+ it 'registers an onboarding progress action' do
expect_next(OnboardingProgressService, group)
.to receive(:execute).with(action: :user_added).and_call_original
- expect { subject.perform(group.id) }.to change(NamespaceOnboardingAction, :count).by(1)
+ subject.perform(group.id)
+
+ expect(OnboardingProgress.completed?(group, :user_added)).to be(true)
end
end
diff --git a/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb b/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb
index aadfae51906..fb762593d75 100644
--- a/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb
+++ b/spec/workers/project_schedule_bulk_repository_shard_moves_worker_spec.rb
@@ -3,31 +3,10 @@
require 'spec_helper'
RSpec.describe ProjectScheduleBulkRepositoryShardMovesWorker do
- describe "#perform" do
- before do
- stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
+ it_behaves_like 'schedules bulk repository shard moves' do
+ let_it_be_with_reload(:container) { create(:project, :repository).tap { |project| project.track_project_repository } }
- allow(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async)
- end
-
- let!(:project) { create(:project, :repository).tap { |project| project.track_project_repository } }
- let(:source_storage_name) { 'default' }
- let(:destination_storage_name) { 'test_second_storage' }
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [source_storage_name, destination_storage_name] }
-
- it 'schedules project repository storage moves' do
- expect { subject }.to change(ProjectRepositoryStorageMove, :count).by(1)
-
- storage_move = project.repository_storage_moves.last!
-
- expect(storage_move).to have_attributes(
- source_storage_name: source_storage_name,
- destination_storage_name: destination_storage_name,
- state_name: :scheduled
- )
- end
- end
+ let(:move_service_klass) { ProjectRepositoryStorageMove }
+ let(:worker_klass) { ProjectUpdateRepositoryStorageWorker }
end
end
diff --git a/spec/workers/project_update_repository_storage_worker_spec.rb b/spec/workers/project_update_repository_storage_worker_spec.rb
index f75bb3d1642..490f1f5a2ad 100644
--- a/spec/workers/project_update_repository_storage_worker_spec.rb
+++ b/spec/workers/project_update_repository_storage_worker_spec.rb
@@ -3,45 +3,13 @@
require 'spec_helper'
RSpec.describe ProjectUpdateRepositoryStorageWorker do
- let(:project) { create(:project, :repository) }
-
subject { described_class.new }
- describe "#perform" do
- let(:service) { double(:update_repository_storage_service) }
-
- before do
- allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(%w[default test_second_storage])
- end
-
- context 'without repository storage move' do
- it "calls the update repository storage service" do
- expect(Projects::UpdateRepositoryStorageService).to receive(:new).and_return(service)
- expect(service).to receive(:execute)
-
- expect do
- subject.perform(project.id, 'test_second_storage')
- end.to change(ProjectRepositoryStorageMove, :count).by(1)
-
- storage_move = project.repository_storage_moves.last
- expect(storage_move).to have_attributes(
- source_storage_name: "default",
- destination_storage_name: "test_second_storage"
- )
- end
- end
-
- context 'with repository storage move' do
- let!(:repository_storage_move) { create(:project_repository_storage_move) }
-
- it "calls the update repository storage service" do
- expect(Projects::UpdateRepositoryStorageService).to receive(:new).and_return(service)
- expect(service).to receive(:execute)
+ it_behaves_like 'an update storage move worker' do
+ let_it_be_with_refind(:container) { create(:project, :repository) }
+ let_it_be(:repository_storage_move) { create(:project_repository_storage_move) }
- expect do
- subject.perform(nil, nil, repository_storage_move.id)
- end.not_to change(ProjectRepositoryStorageMove, :count)
- end
- end
+ let(:service_klass) { Projects::UpdateRepositoryStorageService }
+ let(:repository_storage_move_klass) { ProjectRepositoryStorageMove }
end
end
diff --git a/spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb b/spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb
new file mode 100644
index 00000000000..3a09b6ce449
--- /dev/null
+++ b/spec/workers/snippet_schedule_bulk_repository_shard_moves_worker_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SnippetScheduleBulkRepositoryShardMovesWorker do
+ it_behaves_like 'schedules bulk repository shard moves' do
+ let_it_be_with_reload(:container) { create(:snippet, :repository).tap { |snippet| snippet.create_repository } }
+
+ let(:move_service_klass) { SnippetRepositoryStorageMove }
+ let(:worker_klass) { SnippetUpdateRepositoryStorageWorker }
+ end
+end
diff --git a/spec/workers/snippet_update_repository_storage_worker_spec.rb b/spec/workers/snippet_update_repository_storage_worker_spec.rb
new file mode 100644
index 00000000000..a48abe4abf7
--- /dev/null
+++ b/spec/workers/snippet_update_repository_storage_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SnippetUpdateRepositoryStorageWorker do
+ subject { described_class.new }
+
+ it_behaves_like 'an update storage move worker' do
+ let_it_be_with_refind(:container) { create(:snippet, :repository) }
+ let_it_be(:repository_storage_move) { create(:snippet_repository_storage_move) }
+
+ let(:service_klass) { Snippets::UpdateRepositoryStorageService }
+ let(:repository_storage_move_klass) { SnippetRepositoryStorageMove }
+ end
+end