summaryrefslogtreecommitdiff
path: root/spec/workers
diff options
context:
space:
mode:
Diffstat (limited to 'spec/workers')
-rw-r--r--spec/workers/authorized_projects_worker_spec.rb2
-rw-r--r--spec/workers/background_migration_worker_spec.rb44
-rw-r--r--spec/workers/create_gpg_signature_worker_spec.rb39
-rw-r--r--spec/workers/delete_user_worker_spec.rb12
-rw-r--r--spec/workers/email_receiver_worker_spec.rb26
-rw-r--r--spec/workers/emails_on_push_worker_spec.rb12
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb10
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb8
-rw-r--r--spec/workers/expire_build_instance_artifacts_worker_spec.rb12
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb31
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb8
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb60
-rw-r--r--spec/workers/gitlab_usage_ping_worker_spec.rb20
-rw-r--r--spec/workers/group_destroy_worker_spec.rb4
-rw-r--r--spec/workers/invalid_gpg_signature_update_worker_spec.rb29
-rw-r--r--spec/workers/merge_worker_spec.rb13
-rw-r--r--spec/workers/namespaceless_project_destroy_worker_spec.rb79
-rw-r--r--spec/workers/new_issue_worker_spec.rb54
-rw-r--r--spec/workers/new_merge_request_worker_spec.rb56
-rw-r--r--spec/workers/new_note_worker_spec.rb4
-rw-r--r--spec/workers/pipeline_metrics_worker_spec.rb2
-rw-r--r--spec/workers/pipeline_notification_worker_spec.rb4
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb (renamed from spec/workers/pipeline_proccess_worker_spec.rb)0
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb65
-rw-r--r--spec/workers/post_receive_spec.rb83
-rw-r--r--spec/workers/process_commit_worker_spec.rb36
-rw-r--r--spec/workers/project_cache_worker_spec.rb34
-rw-r--r--spec/workers/project_destroy_worker_spec.rb22
-rw-r--r--spec/workers/propagate_service_template_worker_spec.rb29
-rw-r--r--spec/workers/prune_old_events_worker_spec.rb8
-rw-r--r--spec/workers/remove_expired_members_worker_spec.rb2
-rw-r--r--spec/workers/remove_old_web_hook_logs_worker_spec.rb18
-rw-r--r--spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb6
-rw-r--r--spec/workers/repository_check/batch_worker_spec.rb10
-rw-r--r--spec/workers/repository_check/clear_worker_spec.rb4
-rw-r--r--spec/workers/repository_fork_worker_spec.rb36
-rw-r--r--spec/workers/repository_import_worker_spec.rb27
-rw-r--r--spec/workers/schedule_update_user_activity_worker_spec.rb2
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb12
-rw-r--r--spec/workers/stuck_import_jobs_worker_spec.rb34
-rw-r--r--spec/workers/stuck_merge_jobs_worker_spec.rb50
-rw-r--r--spec/workers/trigger_schedule_worker_spec.rb73
-rw-r--r--spec/workers/update_user_activity_worker_spec.rb4
43 files changed, 824 insertions, 260 deletions
diff --git a/spec/workers/authorized_projects_worker_spec.rb b/spec/workers/authorized_projects_worker_spec.rb
index bd5cc651c2b..03b9b99e263 100644
--- a/spec/workers/authorized_projects_worker_spec.rb
+++ b/spec/workers/authorized_projects_worker_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe AuthorizedProjectsWorker do
- let(:project) { create(:empty_project) }
+ let(:project) { create(:project) }
describe '.bulk_perform_and_wait' do
it 'schedules the ids and waits for the jobs to complete' do
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
new file mode 100644
index 00000000000..4f6e3474634
--- /dev/null
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -0,0 +1,44 @@
+require 'spec_helper'
+
+describe BackgroundMigrationWorker, :sidekiq do
+ describe '.perform' do
+ it 'performs a background migration' do
+ expect(Gitlab::BackgroundMigration)
+ .to receive(:perform)
+ .with('Foo', [10, 20])
+
+ described_class.new.perform('Foo', [10, 20])
+ end
+ end
+
+ describe '.perform_bulk' do
+ it 'enqueues background migrations in bulk' do
+ Sidekiq::Testing.fake! do
+ described_class.perform_bulk([['Foo', [1]], ['Foo', [2]]])
+
+ expect(described_class.jobs.count).to eq 2
+ expect(described_class.jobs).to all(include('enqueued_at'))
+ end
+ end
+ end
+
+ describe '.perform_bulk_in' do
+ context 'when delay is valid' do
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ described_class.perform_bulk_in(1.minute, [['Foo', [1]], ['Foo', [2]]])
+
+ expect(described_class.jobs.count).to eq 2
+ expect(described_class.jobs).to all(include('at'))
+ end
+ end
+ end
+
+ context 'when delay is invalid' do
+ it 'raises an ArgumentError exception' do
+ expect { described_class.perform_bulk_in(-60, [['Foo']]) }
+ .to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/workers/create_gpg_signature_worker_spec.rb b/spec/workers/create_gpg_signature_worker_spec.rb
new file mode 100644
index 00000000000..54978baca88
--- /dev/null
+++ b/spec/workers/create_gpg_signature_worker_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe CreateGpgSignatureWorker do
+ let(:project) { create(:project, :repository) }
+
+ context 'when GpgKey is found' do
+ let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
+
+ it 'calls Gitlab::Gpg::Commit#signature' do
+ expect(Gitlab::Gpg::Commit).to receive(:new).with(project, commit_sha).and_call_original
+
+ expect_any_instance_of(Gitlab::Gpg::Commit).to receive(:signature)
+
+ described_class.new.perform(commit_sha, project.id)
+ end
+ end
+
+ context 'when Commit is not found' do
+ let(:nonexisting_commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a34' }
+
+ it 'does not raise errors' do
+ expect { described_class.new.perform(nonexisting_commit_sha, project.id) }.not_to raise_error
+ end
+ end
+
+ context 'when Project is not found' do
+ let(:nonexisting_project_id) { -1 }
+
+ it 'does not raise errors' do
+ expect { described_class.new.perform(anything, nonexisting_project_id) }.not_to raise_error
+ end
+
+ it 'does not call Gitlab::Gpg::Commit#signature' do
+ expect_any_instance_of(Gitlab::Gpg::Commit).not_to receive(:signature)
+
+ described_class.new.perform(anything, nonexisting_project_id)
+ end
+ end
+end
diff --git a/spec/workers/delete_user_worker_spec.rb b/spec/workers/delete_user_worker_spec.rb
index 0765573408c..36594515005 100644
--- a/spec/workers/delete_user_worker_spec.rb
+++ b/spec/workers/delete_user_worker_spec.rb
@@ -5,16 +5,16 @@ describe DeleteUserWorker do
let!(:current_user) { create(:user) }
it "calls the DeleteUserWorker with the params it was given" do
- expect_any_instance_of(Users::DestroyService).to receive(:execute).
- with(user, {})
+ expect_any_instance_of(Users::DestroyService).to receive(:execute)
+ .with(user, {})
- DeleteUserWorker.new.perform(current_user.id, user.id)
+ described_class.new.perform(current_user.id, user.id)
end
it "uses symbolized keys" do
- expect_any_instance_of(Users::DestroyService).to receive(:execute).
- with(user, test: "test")
+ expect_any_instance_of(Users::DestroyService).to receive(:execute)
+ .with(user, test: "test")
- DeleteUserWorker.new.perform(current_user.id, user.id, "test" => "test")
+ described_class.new.perform(current_user.id, user.id, "test" => "test")
end
end
diff --git a/spec/workers/email_receiver_worker_spec.rb b/spec/workers/email_receiver_worker_spec.rb
index fe70501eeac..e4e77c667b3 100644
--- a/spec/workers/email_receiver_worker_spec.rb
+++ b/spec/workers/email_receiver_worker_spec.rb
@@ -1,6 +1,6 @@
require "spec_helper"
-describe EmailReceiverWorker do
+describe EmailReceiverWorker, :mailer do
let(:raw_message) { fixture_file('emails/valid_reply.eml') }
context "when reply by email is enabled" do
@@ -17,12 +17,16 @@ describe EmailReceiverWorker do
context "when an error occurs" do
before do
- allow_any_instance_of(Gitlab::Email::Receiver).to receive(:execute).and_raise(Gitlab::Email::EmptyEmailError)
+ allow_any_instance_of(Gitlab::Email::Receiver).to receive(:execute).and_raise(error)
end
- it "sends out a rejection email" do
- perform_enqueued_jobs do
- described_class.new.perform(raw_message)
+ context 'when the error is Gitlab::Email::EmptyEmailError' do
+ let(:error) { Gitlab::Email::EmptyEmailError }
+
+ it 'sends out a rejection email' do
+ perform_enqueued_jobs do
+ described_class.new.perform(raw_message)
+ end
email = ActionMailer::Base.deliveries.last
expect(email).not_to be_nil
@@ -30,6 +34,18 @@ describe EmailReceiverWorker do
expect(email.subject).to include("Rejected")
end
end
+
+ context 'when the error is Gitlab::Email::AutoGeneratedEmailError' do
+ let(:error) { Gitlab::Email::AutoGeneratedEmailError }
+
+ it 'does not send out any rejection email' do
+ perform_enqueued_jobs do
+ described_class.new.perform(raw_message)
+ end
+
+ should_not_email_anyone
+ end
+ end
end
end
diff --git a/spec/workers/emails_on_push_worker_spec.rb b/spec/workers/emails_on_push_worker_spec.rb
index 8cf2b888f9a..318aad4bc1e 100644
--- a/spec/workers/emails_on_push_worker_spec.rb
+++ b/spec/workers/emails_on_push_worker_spec.rb
@@ -1,8 +1,7 @@
require 'spec_helper'
-describe EmailsOnPushWorker do
+describe EmailsOnPushWorker, :mailer do
include RepoHelpers
- include EmailHelpers
include EmailSpec::Matchers
let(:project) { create(:project, :repository) }
@@ -12,7 +11,7 @@ describe EmailsOnPushWorker do
let(:perform) { subject.perform(project.id, recipients, data.stringify_keys) }
let(:email) { ActionMailer::Base.deliveries.last }
- subject { EmailsOnPushWorker.new }
+ subject { described_class.new }
describe "#perform" do
context "when push is a new branch" do
@@ -71,7 +70,9 @@ describe EmailsOnPushWorker do
end
context "when there are no errors in sending" do
- before { perform }
+ before do
+ perform
+ end
it "sends a mail with the correct subject" do
expect(email.subject).to include('adds bar folder and branch-test text file')
@@ -88,7 +89,6 @@ describe EmailsOnPushWorker do
context "when there is an SMTP error" do
before do
- reset_delivered_emails!
allow(Notify).to receive(:repository_push_email).and_raise(Net::SMTPFatalError)
allow(subject).to receive_message_chain(:logger, :info)
perform
@@ -112,8 +112,6 @@ describe EmailsOnPushWorker do
allow_any_instance_of(Mail::TestMailer).to receive(:deliver!).and_wrap_original do |original, mail|
original.call(Mail.new(mail.encoded))
end
-
- reset_delivered_emails!
end
it "sends the mail to each of the recipients" do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index fc9adf47c1e..30908534eb3 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -5,8 +5,8 @@ describe 'Every Sidekiq worker' do
root = Rails.root.join('app', 'workers')
concerns = root.join('concerns').to_s
- workers = Dir[root.join('**', '*.rb')].
- reject { |path| path.start_with?(concerns) }
+ workers = Dir[root.join('**', '*.rb')]
+ .reject { |path| path.start_with?(concerns) }
workers.map do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
@@ -22,9 +22,9 @@ describe 'Every Sidekiq worker' do
end
it 'uses the cronjob queue when the worker runs as a cronjob' do
- cron_workers = Settings.cron_jobs.
- map { |job_name, options| options['job_class'].constantize }.
- to_set
+ cron_workers = Settings.cron_jobs
+ .map { |job_name, options| options['job_class'].constantize }
+ .to_set
workers.each do |worker|
next unless cron_workers.include?(worker)
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 73cbadc13d9..b47b4a02a68 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -5,10 +5,14 @@ describe ExpireBuildArtifactsWorker do
let(:worker) { described_class.new }
- before { Sidekiq::Worker.clear_all }
+ before do
+ Sidekiq::Worker.clear_all
+ end
describe '#perform' do
- before { build }
+ before do
+ build
+ end
subject! do
Sidekiq::Testing.fake! { worker.perform }
diff --git a/spec/workers/expire_build_instance_artifacts_worker_spec.rb b/spec/workers/expire_build_instance_artifacts_worker_spec.rb
index d202b3de77e..bed5c5e2ecb 100644
--- a/spec/workers/expire_build_instance_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_instance_artifacts_worker_spec.rb
@@ -30,18 +30,6 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file_identifier).to be_nil
end
end
-
- context 'when associated project was removed' do
- let(:build) do
- create(:ci_build, :artifacts, artifacts_expiry) do |build|
- build.project.delete
- end
- end
-
- it 'does not remove artifacts' do
- expect(build.reload.artifacts_file.exists?).to be_truthy
- end
- end
end
context 'with not yet expired artifacts' do
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
new file mode 100644
index 00000000000..1b614342a18
--- /dev/null
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -0,0 +1,31 @@
+require 'spec_helper'
+
+describe ExpireJobCacheWorker do
+ set(:pipeline) { create(:ci_empty_pipeline) }
+ let(:project) { pipeline.project }
+ subject { described_class.new }
+
+ describe '#perform' do
+ context 'with a job in the pipeline' do
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+
+ it 'invalidates Etag caching for the job path' do
+ pipeline_path = "/#{project.full_path}/pipelines/#{pipeline.id}.json"
+ job_path = "/#{project.full_path}/builds/#{job.id}.json"
+
+ expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipeline_path)
+ expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(job_path)
+
+ subject.perform(job.id)
+ end
+ end
+
+ context 'when there is no job in the pipeline' do
+ it 'does not change the etag store' do
+ expect(Gitlab::EtagCaching::Store).not_to receive(:new)
+
+ subject.perform(9999)
+ end
+ end
+ end
+end
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index ceba604dea2..54c9a69d329 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe ExpirePipelineCacheWorker do
let(:user) { create(:user) }
- let(:project) { create(:empty_project) }
+ let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
subject { described_class.new }
@@ -10,9 +10,11 @@ describe ExpirePipelineCacheWorker do
it 'invalidates Etag caching for project pipelines path' do
pipelines_path = "/#{project.full_path}/pipelines.json"
new_mr_pipelines_path = "/#{project.full_path}/merge_requests/new.json"
+ pipeline_path = "/#{project.full_path}/pipelines/#{pipeline.id}.json"
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipelines_path)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(new_mr_pipelines_path)
+ expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(pipeline_path)
subject.perform(pipeline.id)
end
@@ -35,8 +37,8 @@ describe ExpirePipelineCacheWorker do
end
it 'updates the cached status for a project' do
- expect(Gitlab::Cache::Ci::ProjectPipelineStatus).to receive(:update_for_pipeline).
- with(pipeline)
+ expect(Gitlab::Cache::Ci::ProjectPipelineStatus).to receive(:update_for_pipeline)
+ .with(pipeline)
subject.perform(pipeline.id)
end
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 029f35512e0..05f971dfd13 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -6,24 +6,60 @@ describe GitGarbageCollectWorker do
let(:project) { create(:project, :repository) }
let(:shell) { Gitlab::Shell.new }
- subject { GitGarbageCollectWorker.new }
+ subject { described_class.new }
describe "#perform" do
- it "flushes ref caches when the task is 'gc'" do
- expect(subject).to receive(:command).with(:gc).and_return([:the, :command])
- expect(Gitlab::Popen).to receive(:popen).
- with([:the, :command], project.repository.path_to_repo).and_return(["", 0])
+ shared_examples 'flushing ref caches' do |gitaly|
+ it "flushes ref caches when the task if 'gc'" do
+ expect(subject).to receive(:command).with(:gc).and_return([:the, :command])
+
+ if gitaly
+ expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:garbage_collect)
+ .and_return(nil)
+ else
+ expect(Gitlab::Popen).to receive(:popen)
+ .with([:the, :command], project.repository.path_to_repo).and_return(["", 0])
+ end
+
+ expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
+ expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
+ expect_any_instance_of(Repository).to receive(:branch_count).and_call_original
+ expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
+
+ subject.perform(project.id)
+ end
+ end
+
+ context "with Gitaly turned on" do
+ it_should_behave_like 'flushing ref caches', true
+ end
+
+ context "with Gitaly turned off", skip_gitaly_mock: true do
+ it_should_behave_like 'flushing ref caches', false
+ end
- expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
- expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
- expect_any_instance_of(Repository).to receive(:branch_count).and_call_original
- expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
+ context "repack_full" do
+ it "calls Gitaly" do
+ expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:repack_full)
+ .and_return(nil)
- subject.perform(project.id)
+ subject.perform(project.id, :full_repack)
+ end
+ end
+
+ context "repack_incremental" do
+ it "calls Gitaly" do
+ expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:repack_incremental)
+ .and_return(nil)
+
+ subject.perform(project.id, :incremental_repack)
+ end
end
shared_examples 'gc tasks' do
- before { allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled) }
+ before do
+ allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled)
+ end
it 'incremental repack adds a new packfile' do
create_objects(project)
@@ -105,7 +141,7 @@ describe GitGarbageCollectWorker do
author: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'),
committer: Gitlab::Git.committer_hash(email: 'foo@bar', name: 'baz'),
tree: old_commit.tree,
- parents: [old_commit],
+ parents: [old_commit]
)
GitOperationService.new(nil, project.repository).send(
:update_ref,
diff --git a/spec/workers/gitlab_usage_ping_worker_spec.rb b/spec/workers/gitlab_usage_ping_worker_spec.rb
index b6c080f36f4..49b4e04dc7c 100644
--- a/spec/workers/gitlab_usage_ping_worker_spec.rb
+++ b/spec/workers/gitlab_usage_ping_worker_spec.rb
@@ -1,23 +1,13 @@
require 'spec_helper'
describe GitlabUsagePingWorker do
- subject { GitlabUsagePingWorker.new }
+ subject { described_class.new }
- it "sends POST request" do
- stub_application_setting(usage_ping_enabled: true)
+ it 'delegates to SubmitUsagePingService' do
+ allow(subject).to receive(:try_obtain_lease).and_return(true)
- stub_request(:post, "https://version.gitlab.com/usage_data").
- to_return(status: 200, body: '', headers: {})
- expect(Gitlab::UsageData).to receive(:to_json).with({ force_refresh: true }).and_call_original
- expect(subject).to receive(:try_obtain_lease).and_return(true)
+ expect_any_instance_of(SubmitUsagePingService).to receive(:execute)
- expect(subject.perform.response.code.to_i).to eq(200)
- end
-
- it "does not run if usage ping is disabled" do
- stub_application_setting(usage_ping_enabled: false)
-
- expect(subject).not_to receive(:try_obtain_lease)
- expect(subject).not_to receive(:perform)
+ subject.perform
end
end
diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb
index 1ff5a3b9034..a170c84ab12 100644
--- a/spec/workers/group_destroy_worker_spec.rb
+++ b/spec/workers/group_destroy_worker_spec.rb
@@ -3,9 +3,9 @@ require 'spec_helper'
describe GroupDestroyWorker do
let(:group) { create(:group) }
let(:user) { create(:admin) }
- let!(:project) { create(:empty_project, namespace: group) }
+ let!(:project) { create(:project, namespace: group) }
- subject { GroupDestroyWorker.new }
+ subject { described_class.new }
describe "#perform" do
it "deletes the project" do
diff --git a/spec/workers/invalid_gpg_signature_update_worker_spec.rb b/spec/workers/invalid_gpg_signature_update_worker_spec.rb
new file mode 100644
index 00000000000..5972696515b
--- /dev/null
+++ b/spec/workers/invalid_gpg_signature_update_worker_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe InvalidGpgSignatureUpdateWorker do
+ context 'when GpgKey is found' do
+ it 'calls NotificationService.new.run' do
+ gpg_key = create(:gpg_key)
+ invalid_signature_updater = double(:invalid_signature_updater)
+
+ expect(Gitlab::Gpg::InvalidGpgSignatureUpdater).to receive(:new).with(gpg_key).and_return(invalid_signature_updater)
+ expect(invalid_signature_updater).to receive(:run)
+
+ described_class.new.perform(gpg_key.id)
+ end
+ end
+
+ context 'when GpgKey is not found' do
+ let(:nonexisting_gpg_key_id) { -1 }
+
+ it 'does not raise errors' do
+ expect { described_class.new.perform(nonexisting_gpg_key_id) }.not_to raise_error
+ end
+
+ it 'does not call NotificationService.new.run' do
+ expect(Gitlab::Gpg::InvalidGpgSignatureUpdater).not_to receive(:new)
+
+ described_class.new.perform(nonexisting_gpg_key_id)
+ end
+ end
+end
diff --git a/spec/workers/merge_worker_spec.rb b/spec/workers/merge_worker_spec.rb
index b5e1fdb8ded..ee51000161a 100644
--- a/spec/workers/merge_worker_spec.rb
+++ b/spec/workers/merge_worker_spec.rb
@@ -15,7 +15,7 @@ describe MergeWorker do
it 'clears cache of source repo after removing source branch' do
expect(source_project.repository.branch_names).to include('markdown')
- MergeWorker.new.perform(
+ described_class.new.perform(
merge_request.id, merge_request.author_id,
commit_message: 'wow such merge',
should_remove_source_branch: true)
@@ -27,4 +27,15 @@ describe MergeWorker do
expect(source_project.repository.branch_names).not_to include('markdown')
end
end
+
+ it 'persists merge_jid' do
+ merge_request = create(:merge_request, merge_jid: nil)
+ user = create(:user)
+ worker = described_class.new
+
+ allow(worker).to receive(:jid) { '999' }
+
+ expect { worker.perform(merge_request.id, user.id, {}) }
+ .to change { merge_request.reload.merge_jid }.from(nil).to('999')
+ end
end
diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb
new file mode 100644
index 00000000000..f2706254284
--- /dev/null
+++ b/spec/workers/namespaceless_project_destroy_worker_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe NamespacelessProjectDestroyWorker do
+ subject { described_class.new }
+
+ before do
+ # Stub after_save callbacks that will fail when Project has no namespace
+ allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil)
+ allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
+ end
+
+ describe '#perform' do
+ context 'project has namespace' do
+ it 'does not do anything' do
+ project = create(:project)
+
+ subject.perform(project.id)
+
+ expect(Project.unscoped.all).to include(project)
+ end
+ end
+
+ context 'project has no namespace' do
+ let!(:project) do
+ project = build(:project, namespace_id: nil)
+ project.save(validate: false)
+ project
+ end
+
+ context 'project not a fork of another project' do
+ it "truncates the project's team" do
+ expect_any_instance_of(ProjectTeam).to receive(:truncate)
+
+ subject.perform(project.id)
+ end
+
+ it 'deletes the project' do
+ subject.perform(project.id)
+
+ expect(Project.unscoped.all).not_to include(project)
+ end
+
+ it 'does not call unlink_fork' do
+ is_expected.not_to receive(:unlink_fork)
+
+ subject.perform(project.id)
+ end
+
+ it 'does not do anything in Project#remove_pages method' do
+ expect(Gitlab::PagesTransfer).not_to receive(:new)
+
+ subject.perform(project.id)
+ end
+ end
+
+ context 'project forked from another' do
+ let!(:parent_project) { create(:project) }
+
+ before do
+ create(:forked_project_link, forked_to_project: project, forked_from_project: parent_project)
+ end
+
+ it 'closes open merge requests' do
+ merge_request = create(:merge_request, source_project: project, target_project: parent_project)
+
+ subject.perform(project.id)
+
+ expect(merge_request.reload).to be_closed
+ end
+
+ it 'destroys the link' do
+ subject.perform(project.id)
+
+ expect(parent_project.forked_project_links).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/new_issue_worker_spec.rb b/spec/workers/new_issue_worker_spec.rb
new file mode 100644
index 00000000000..4e15ccc534b
--- /dev/null
+++ b/spec/workers/new_issue_worker_spec.rb
@@ -0,0 +1,54 @@
+require 'spec_helper'
+
+describe NewIssueWorker do
+ describe '#perform' do
+ let(:worker) { described_class.new }
+
+ context 'when an issue not found' do
+ it 'does not call Services' do
+ expect(EventCreateService).not_to receive(:new)
+ expect(NotificationService).not_to receive(:new)
+
+ worker.perform(99, create(:user).id)
+ end
+
+ it 'logs an error' do
+ expect(Rails.logger).to receive(:error).with('NewIssueWorker: couldn\'t find Issue with ID=99, skipping job')
+
+ worker.perform(99, create(:user).id)
+ end
+ end
+
+ context 'when a user not found' do
+ it 'does not call Services' do
+ expect(EventCreateService).not_to receive(:new)
+ expect(NotificationService).not_to receive(:new)
+
+ worker.perform(create(:issue).id, 99)
+ end
+
+ it 'logs an error' do
+ expect(Rails.logger).to receive(:error).with('NewIssueWorker: couldn\'t find User with ID=99, skipping job')
+
+ worker.perform(create(:issue).id, 99)
+ end
+ end
+
+ context 'when everything is ok' do
+ let(:project) { create(:project, :public) }
+ let(:mentioned) { create(:user) }
+ let(:user) { create(:user) }
+ let(:issue) { create(:issue, project: project, description: "issue for #{mentioned.to_reference}") }
+
+ it 'creates a new event record' do
+ expect { worker.perform(issue.id, user.id) }.to change { Event.count }.from(0).to(1)
+ end
+
+ it 'creates a notification for the assignee' do
+ expect(Notify).to receive(:new_issue_email).with(mentioned.id, issue.id).and_return(double(deliver_later: true))
+
+ worker.perform(issue.id, user.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/new_merge_request_worker_spec.rb b/spec/workers/new_merge_request_worker_spec.rb
new file mode 100644
index 00000000000..9e0cbde45b1
--- /dev/null
+++ b/spec/workers/new_merge_request_worker_spec.rb
@@ -0,0 +1,56 @@
+require 'spec_helper'
+
+describe NewMergeRequestWorker do
+ describe '#perform' do
+ let(:worker) { described_class.new }
+
+ context 'when a merge request not found' do
+ it 'does not call Services' do
+ expect(EventCreateService).not_to receive(:new)
+ expect(NotificationService).not_to receive(:new)
+
+ worker.perform(99, create(:user).id)
+ end
+
+ it 'logs an error' do
+ expect(Rails.logger).to receive(:error).with('NewMergeRequestWorker: couldn\'t find MergeRequest with ID=99, skipping job')
+
+ worker.perform(99, create(:user).id)
+ end
+ end
+
+ context 'when a user not found' do
+ it 'does not call Services' do
+ expect(EventCreateService).not_to receive(:new)
+ expect(NotificationService).not_to receive(:new)
+
+ worker.perform(create(:merge_request).id, 99)
+ end
+
+ it 'logs an error' do
+ expect(Rails.logger).to receive(:error).with('NewMergeRequestWorker: couldn\'t find User with ID=99, skipping job')
+
+ worker.perform(create(:merge_request).id, 99)
+ end
+ end
+
+ context 'when everything is ok' do
+ let(:project) { create(:project, :public) }
+ let(:mentioned) { create(:user) }
+ let(:user) { create(:user) }
+ let(:merge_request) do
+ create(:merge_request, source_project: project, description: "mr for #{mentioned.to_reference}")
+ end
+
+ it 'creates a new event record' do
+ expect { worker.perform(merge_request.id, user.id) }.to change { Event.count }.from(0).to(1)
+ end
+
+ it 'creates a notification for the assignee' do
+ expect(Notify).to receive(:new_merge_request_email).with(mentioned.id, merge_request.id).and_return(double(deliver_later: true))
+
+ worker.perform(merge_request.id, user.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/new_note_worker_spec.rb b/spec/workers/new_note_worker_spec.rb
index 8fdbb35afd0..575361c93d4 100644
--- a/spec/workers/new_note_worker_spec.rb
+++ b/spec/workers/new_note_worker_spec.rb
@@ -24,8 +24,8 @@ describe NewNoteWorker do
let(:unexistent_note_id) { 999 }
it 'logs NewNoteWorker process skipping' do
- expect(Rails.logger).to receive(:error).
- with("NewNoteWorker: couldn't find note with ID=999, skipping job")
+ expect(Rails.logger).to receive(:error)
+ .with("NewNoteWorker: couldn't find note with ID=999, skipping job")
described_class.new.perform(unexistent_note_id)
end
diff --git a/spec/workers/pipeline_metrics_worker_spec.rb b/spec/workers/pipeline_metrics_worker_spec.rb
index 5dbc0da95c2..ef71125c0b6 100644
--- a/spec/workers/pipeline_metrics_worker_spec.rb
+++ b/spec/workers/pipeline_metrics_worker_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe PipelineMetricsWorker do
let(:project) { create(:project, :repository) }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref, head_pipeline: pipeline) }
let(:pipeline) do
create(:ci_empty_pipeline,
diff --git a/spec/workers/pipeline_notification_worker_spec.rb b/spec/workers/pipeline_notification_worker_spec.rb
index 139032d77bd..eb539ffd893 100644
--- a/spec/workers/pipeline_notification_worker_spec.rb
+++ b/spec/workers/pipeline_notification_worker_spec.rb
@@ -1,8 +1,6 @@
require 'spec_helper'
-describe PipelineNotificationWorker do
- include EmailHelpers
-
+describe PipelineNotificationWorker, :mailer do
let(:pipeline) { create(:ci_pipeline) }
describe '#execute' do
diff --git a/spec/workers/pipeline_proccess_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index 86e9d7f6684..86e9d7f6684 100644
--- a/spec/workers/pipeline_proccess_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
new file mode 100644
index 00000000000..75197039f5a
--- /dev/null
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -0,0 +1,65 @@
+require 'spec_helper'
+
+describe PipelineScheduleWorker do
+ subject { described_class.new.perform }
+
+ set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
+
+ let!(:pipeline_schedule) do
+ create(:ci_pipeline_schedule, :nightly, project: project, owner: user)
+ end
+
+ before do
+ stub_ci_pipeline_to_return_yaml_file
+
+ pipeline_schedule.update_column(:next_run_at, 1.day.ago)
+ end
+
+ context 'when the schedule is runnable by the user' do
+ before do
+ project.add_master(user)
+ end
+
+ context 'when there is a scheduled pipeline within next_run_at' do
+ it 'creates a new pipeline' do
+ expect { subject }.to change { project.pipelines.count }.by(1)
+ expect(Ci::Pipeline.last).to be_schedule
+ end
+
+ it 'updates the next_run_at field' do
+ subject
+
+ expect(pipeline_schedule.reload.next_run_at).to be > Time.now
+ end
+
+ it 'sets the schedule on the pipeline' do
+ subject
+
+ expect(project.pipelines.last.pipeline_schedule).to eq(pipeline_schedule)
+ end
+ end
+
+ context 'inactive schedule' do
+ before do
+ pipeline_schedule.deactivate!
+ end
+
+ it 'does not creates a new pipeline' do
+ expect { subject }.not_to change { project.pipelines.count }
+ end
+ end
+ end
+
+ context 'when the schedule is not runnable by the user' do
+ it 'deactivates the schedule' do
+ subject
+
+ expect(pipeline_schedule.reload.active).to be_falsy
+ end
+
+ it 'does not schedule a pipeline' do
+ expect { subject }.not_to change { project.pipelines.count }
+ end
+ end
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index a2a559a2369..af6a3c9f6c7 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -4,13 +4,29 @@ describe PostReceive do
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
- let(:project) { create(:project, :repository) }
+ let(:gl_repository) { "project-#{project.id}" }
let(:key) { create(:key, user: project.owner) }
let(:key_id) { key.shell_id }
- context "as a resque worker" do
- it "reponds to #perform" do
- expect(PostReceive.new).to respond_to(:perform)
+ let(:project) do
+ create(:project, :repository, auto_cancel_pending_pipelines: 'disabled')
+ end
+
+ context "as a sidekiq worker" do
+ it "responds to #perform" do
+ expect(described_class.new).to respond_to(:perform)
+ end
+ end
+
+ context 'with a non-existing project' do
+ let(:gl_repository) { "project-123456789" }
+ let(:error_message) do
+ "Triggered hook for non-existing project with gl_repository \"#{gl_repository}\""
+ end
+
+ it "returns false and logs an error" do
+ expect(Gitlab::GitLogger).to receive(:error).with("POST-RECEIVE: #{error_message}")
+ expect(described_class.new.perform(gl_repository, key_id, base64_changes)).to be(false)
end
end
@@ -25,7 +41,7 @@ describe PostReceive do
it "calls GitTagPushService" do
expect_any_instance_of(GitPushService).to receive(:execute).and_return(true)
expect_any_instance_of(GitTagPushService).not_to receive(:execute)
- PostReceive.new.perform(pwd(project), key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
@@ -35,7 +51,7 @@ describe PostReceive do
it "calls GitTagPushService" do
expect_any_instance_of(GitPushService).not_to receive(:execute)
expect_any_instance_of(GitTagPushService).to receive(:execute).and_return(true)
- PostReceive.new.perform(pwd(project), key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
@@ -45,12 +61,12 @@ describe PostReceive do
it "does not call any of the services" do
expect_any_instance_of(GitPushService).not_to receive(:execute)
expect_any_instance_of(GitTagPushService).not_to receive(:execute)
- PostReceive.new.perform(pwd(project), key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
context "gitlab-ci.yml" do
- subject { PostReceive.new.perform(pwd(project), key_id, base64_changes) }
+ subject { described_class.new.perform(gl_repository, key_id, base64_changes) }
context "creates a Ci::Pipeline for every change" do
before do
@@ -58,53 +74,70 @@ describe PostReceive do
OpenStruct.new(id: '123456')
end
allow_any_instance_of(Ci::CreatePipelineService).to receive(:branch?).and_return(true)
+ allow_any_instance_of(Repository).to receive(:ref_exists?).and_return(true)
stub_ci_pipeline_to_return_yaml_file
end
- it { expect{ subject }.to change{ Ci::Pipeline.count }.by(2) }
+ it { expect { subject }.to change { Ci::Pipeline.count }.by(2) }
end
context "does not create a Ci::Pipeline" do
- before { stub_ci_pipeline_yaml_file(nil) }
+ before do
+ stub_ci_pipeline_yaml_file(nil)
+ end
- it { expect{ subject }.not_to change{ Ci::Pipeline.count } }
+ it { expect { subject }.not_to change { Ci::Pipeline.count } }
+ end
+ end
+
+ context 'after project changes hooks' do
+ let(:changes) { '123456 789012 refs/heads/tést' }
+ let(:fake_hook_data) { Hash.new(event_name: 'repository_update') }
+
+ before do
+ allow_any_instance_of(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data)
+ # silence hooks so we can isolate
+ allow_any_instance_of(Key).to receive(:post_create_hook).and_return(true)
+ allow_any_instance_of(GitPushService).to receive(:execute).and_return(true)
+ end
+
+ it 'calls SystemHooksService' do
+ expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(fake_hook_data, :repository_update_hooks).and_return(true)
+
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
end
context "webhook" do
it "fetches the correct project" do
- expect(Project).to receive(:find_by_full_path).with(project.path_with_namespace).and_return(project)
- PostReceive.new.perform(pwd(project), key_id, base64_changes)
+ expect(Project).to receive(:find_by).with(id: project.id.to_s)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
it "does not run if the author is not in the project" do
- allow_any_instance_of(Gitlab::GitPostReceive).
- to receive(:identify_using_ssh_key).
- and_return(nil)
+ allow_any_instance_of(Gitlab::GitPostReceive)
+ .to receive(:identify_using_ssh_key)
+ .and_return(nil)
expect(project).not_to receive(:execute_hooks)
- expect(PostReceive.new.perform(pwd(project), key_id, base64_changes)).to be_falsey
+ expect(described_class.new.perform(gl_repository, key_id, base64_changes)).to be_falsey
end
it "asks the project to trigger all hooks" do
- allow(Project).to receive(:find_by_full_path).and_return(project)
+ allow(Project).to receive(:find_by).and_return(project)
expect(project).to receive(:execute_hooks).twice
expect(project).to receive(:execute_services).twice
- PostReceive.new.perform(pwd(project), key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
it "enqueues a UpdateMergeRequestsWorker job" do
- allow(Project).to receive(:find_by_full_path).and_return(project)
+ allow(Project).to receive(:find_by).and_return(project)
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(project.id, project.owner.id, any_args)
- PostReceive.new.perform(pwd(project), key_id, base64_changes)
+ described_class.new.perform(gl_repository, key_id, base64_changes)
end
end
-
- def pwd(project)
- File.join(Gitlab.config.repositories.storages.default['path'], project.path_with_namespace)
- end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 9afe2e610b9..24f8ca67594 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -31,16 +31,28 @@ describe ProcessCommitWorker do
worker.perform(project.id, user.id, commit.to_hash)
end
+
+ context 'when commit already exists in upstream project' do
+ let(:forked) { create(:project, :public, :repository) }
+
+ it 'does not process commit message' do
+ create(:forked_project_link, forked_to_project: forked, forked_from_project: project)
+
+ expect(worker).not_to receive(:process_commit_message)
+
+ worker.perform(forked.id, user.id, forked.commit.to_hash)
+ end
+ end
end
describe '#process_commit_message' do
context 'when pushing to the default branch' do
it 'closes issues that should be closed per the commit message' do
- allow(commit).to receive(:safe_message).
- and_return("Closes #{issue.to_reference}")
+ allow(commit).to receive(:safe_message)
+ .and_return("Closes #{issue.to_reference}")
- expect(worker).to receive(:close_issues).
- with(project, user, user, commit, [issue])
+ expect(worker).to receive(:close_issues)
+ .with(project, user, user, commit, [issue])
worker.process_commit_message(project, commit, user, user, true)
end
@@ -48,8 +60,8 @@ describe ProcessCommitWorker do
context 'when pushing to a non-default branch' do
it 'does not close any issues' do
- allow(commit).to receive(:safe_message).
- and_return("Closes #{issue.to_reference}")
+ allow(commit).to receive(:safe_message)
+ .and_return("Closes #{issue.to_reference}")
expect(worker).not_to receive(:close_issues)
@@ -90,8 +102,8 @@ describe ProcessCommitWorker do
describe '#update_issue_metrics' do
it 'updates any existing issue metrics' do
- allow(commit).to receive(:safe_message).
- and_return("Closes #{issue.to_reference}")
+ allow(commit).to receive(:safe_message)
+ .and_return("Closes #{issue.to_reference}")
worker.update_issue_metrics(commit, user)
@@ -101,8 +113,8 @@ describe ProcessCommitWorker do
end
it "doesn't execute any queries with false conditions" do
- allow(commit).to receive(:safe_message).
- and_return("Lorem Ipsum")
+ allow(commit).to receive(:safe_message)
+ .and_return("Lorem Ipsum")
expect { worker.update_issue_metrics(commit, user) }.not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
end
@@ -116,8 +128,8 @@ describe ProcessCommitWorker do
end
it 'parses date strings into Time instances' do
- commit = worker.
- build_commit(project, id: '123', authored_date: Time.now.to_s)
+ commit = worker
+ .build_commit(project, id: '123', authored_date: Time.now.to_s)
expect(commit.authored_date).to be_an_instance_of(Time)
end
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index c23ffdf99c0..6b1f2ff3227 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -7,8 +7,8 @@ describe ProjectCacheWorker do
describe '#perform' do
before do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).
- and_return(true)
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain)
+ .and_return(true)
end
context 'with a non-existing project' do
@@ -39,21 +39,33 @@ describe ProjectCacheWorker do
end
it 'refreshes the method caches' do
- expect_any_instance_of(Repository).to receive(:refresh_method_caches).
- with(%i(readme)).
- and_call_original
+ expect_any_instance_of(Repository).to receive(:refresh_method_caches)
+ .with(%i(readme))
+ .and_call_original
worker.perform(project.id, %w(readme))
end
+
+ context 'with plain readme' do
+ it 'refreshes the method caches' do
+ allow(MarkupHelper).to receive(:gitlab_markdown?).and_return(false)
+ allow(MarkupHelper).to receive(:plain?).and_return(true)
+
+ expect_any_instance_of(Repository).to receive(:refresh_method_caches)
+ .with(%i(readme))
+ .and_call_original
+ worker.perform(project.id, %w(readme))
+ end
+ end
end
end
describe '#update_statistics' do
context 'when a lease could not be obtained' do
it 'does not update the repository size' do
- allow(worker).to receive(:try_obtain_lease_for).
- with(project.id, :update_statistics).
- and_return(false)
+ allow(worker).to receive(:try_obtain_lease_for)
+ .with(project.id, :update_statistics)
+ .and_return(false)
expect(statistics).not_to receive(:refresh!)
@@ -63,9 +75,9 @@ describe ProjectCacheWorker do
context 'when a lease could be obtained' do
it 'updates the project statistics' do
- allow(worker).to receive(:try_obtain_lease_for).
- with(project.id, :update_statistics).
- and_return(true)
+ allow(worker).to receive(:try_obtain_lease_for)
+ .with(project.id, :update_statistics)
+ .and_return(true)
expect(statistics).to receive(:refresh!)
.with(only: %i(repository_size))
diff --git a/spec/workers/project_destroy_worker_spec.rb b/spec/workers/project_destroy_worker_spec.rb
index 0ab42f99510..f19c9dff941 100644
--- a/spec/workers/project_destroy_worker_spec.rb
+++ b/spec/workers/project_destroy_worker_spec.rb
@@ -1,24 +1,36 @@
require 'spec_helper'
describe ProjectDestroyWorker do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, pending_delete: true) }
let(:path) { project.repository.path_to_repo }
- subject { ProjectDestroyWorker.new }
+ subject { described_class.new }
- describe "#perform" do
- it "deletes the project" do
+ describe '#perform' do
+ it 'deletes the project' do
subject.perform(project.id, project.owner.id, {})
expect(Project.all).not_to include(project)
expect(Dir.exist?(path)).to be_falsey
end
- it "deletes the project but skips repo deletion" do
+ it 'deletes the project but skips repo deletion' do
subject.perform(project.id, project.owner.id, { "skip_repo" => true })
expect(Project.all).not_to include(project)
expect(Dir.exist?(path)).to be_truthy
end
+
+ it 'does not raise error when project could not be found' do
+ expect do
+ subject.perform(-1, project.owner.id, {})
+ end.not_to raise_error
+ end
+
+ it 'does not raise error when user could not be found' do
+ expect do
+ subject.perform(project.id, -1, {})
+ end.not_to raise_error
+ end
end
end
diff --git a/spec/workers/propagate_service_template_worker_spec.rb b/spec/workers/propagate_service_template_worker_spec.rb
new file mode 100644
index 00000000000..b8b65ead9b3
--- /dev/null
+++ b/spec/workers/propagate_service_template_worker_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe PropagateServiceTemplateWorker do
+ let!(:service_template) do
+ PushoverService.create(
+ template: true,
+ active: true,
+ properties: {
+ device: 'MyDevice',
+ sound: 'mic',
+ priority: 4,
+ user_key: 'asdf',
+ api_key: '123456789'
+ })
+ end
+
+ before do
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain)
+ .and_return(true)
+ end
+
+ describe '#perform' do
+ it 'calls the propagate service with the template' do
+ expect(Projects::PropagateServiceTemplate).to receive(:propagate).with(service_template)
+
+ subject.perform(service_template.id)
+ end
+ end
+end
diff --git a/spec/workers/prune_old_events_worker_spec.rb b/spec/workers/prune_old_events_worker_spec.rb
index 35e1518a35e..ea974355050 100644
--- a/spec/workers/prune_old_events_worker_spec.rb
+++ b/spec/workers/prune_old_events_worker_spec.rb
@@ -2,9 +2,11 @@ require 'spec_helper'
describe PruneOldEventsWorker do
describe '#perform' do
- let!(:expired_event) { create(:event, author_id: 0, created_at: 13.months.ago) }
- let!(:not_expired_event) { create(:event, author_id: 0, created_at: 1.day.ago) }
- let!(:exactly_12_months_event) { create(:event, author_id: 0, created_at: 12.months.ago) }
+ let(:user) { create(:user) }
+
+ let!(:expired_event) { create(:event, :closed, author: user, created_at: 13.months.ago) }
+ let!(:not_expired_event) { create(:event, :closed, author: user, created_at: 1.day.ago) }
+ let!(:exactly_12_months_event) { create(:event, :closed, author: user, created_at: 12.months.ago) }
it 'prunes events older than 12 months' do
expect { subject.perform }.to change { Event.count }.by(-1)
diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb
index 402aa1e714e..058fdf4c009 100644
--- a/spec/workers/remove_expired_members_worker_spec.rb
+++ b/spec/workers/remove_expired_members_worker_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe RemoveExpiredMembersWorker do
- let(:worker) { RemoveExpiredMembersWorker.new }
+ let(:worker) { described_class.new }
describe '#perform' do
context 'project members' do
diff --git a/spec/workers/remove_old_web_hook_logs_worker_spec.rb b/spec/workers/remove_old_web_hook_logs_worker_spec.rb
new file mode 100644
index 00000000000..6d26ba5dfa0
--- /dev/null
+++ b/spec/workers/remove_old_web_hook_logs_worker_spec.rb
@@ -0,0 +1,18 @@
+require 'spec_helper'
+
+describe RemoveOldWebHookLogsWorker do
+ subject { described_class.new }
+
+ describe '#perform' do
+ let!(:week_old_record) { create(:web_hook_log, created_at: Time.now - 1.week) }
+ let!(:three_days_old_record) { create(:web_hook_log, created_at: Time.now - 3.days) }
+ let!(:one_day_old_record) { create(:web_hook_log, created_at: Time.now - 1.day) }
+
+ it 'removes web hook logs older than 2 days' do
+ subject.perform
+
+ expect(WebHookLog.all).to include(one_day_old_record)
+ expect(WebHookLog.all).not_to include(week_old_record, three_days_old_record)
+ end
+ end
+end
diff --git a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
index 6d42946de38..57f83c1dbe9 100644
--- a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
+++ b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
@@ -1,13 +1,13 @@
require 'spec_helper'
describe RemoveUnreferencedLfsObjectsWorker do
- let(:worker) { RemoveUnreferencedLfsObjectsWorker.new }
+ let(:worker) { described_class.new }
describe '#perform' do
let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1') }
let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2') }
- let!(:project1) { create(:empty_project, lfs_enabled: true) }
- let!(:project2) { create(:empty_project, lfs_enabled: true) }
+ let!(:project1) { create(:project, lfs_enabled: true) }
+ let!(:project2) { create(:project, lfs_enabled: true) }
let!(:referenced_lfs_object1) { create(:lfs_object, oid: '3') }
let!(:referenced_lfs_object2) { create(:lfs_object, oid: '4') }
let!(:lfs_objects_project1_1) do
diff --git a/spec/workers/repository_check/batch_worker_spec.rb b/spec/workers/repository_check/batch_worker_spec.rb
index bcd97a4f6ef..850b8cd8f5c 100644
--- a/spec/workers/repository_check/batch_worker_spec.rb
+++ b/spec/workers/repository_check/batch_worker_spec.rb
@@ -4,7 +4,7 @@ describe RepositoryCheck::BatchWorker do
subject { described_class.new }
it 'prefers projects that have never been checked' do
- projects = create_list(:empty_project, 3, created_at: 1.week.ago)
+ projects = create_list(:project, 3, created_at: 1.week.ago)
projects[0].update_column(:last_repository_check_at, 4.months.ago)
projects[2].update_column(:last_repository_check_at, 3.months.ago)
@@ -12,7 +12,7 @@ describe RepositoryCheck::BatchWorker do
end
it 'sorts projects by last_repository_check_at' do
- projects = create_list(:empty_project, 3, created_at: 1.week.ago)
+ projects = create_list(:project, 3, created_at: 1.week.ago)
projects[0].update_column(:last_repository_check_at, 2.months.ago)
projects[1].update_column(:last_repository_check_at, 4.months.ago)
projects[2].update_column(:last_repository_check_at, 3.months.ago)
@@ -21,7 +21,7 @@ describe RepositoryCheck::BatchWorker do
end
it 'excludes projects that were checked recently' do
- projects = create_list(:empty_project, 3, created_at: 1.week.ago)
+ projects = create_list(:project, 3, created_at: 1.week.ago)
projects[0].update_column(:last_repository_check_at, 2.days.ago)
projects[1].update_column(:last_repository_check_at, 2.months.ago)
projects[2].update_column(:last_repository_check_at, 3.days.ago)
@@ -30,7 +30,7 @@ describe RepositoryCheck::BatchWorker do
end
it 'does nothing when repository checks are disabled' do
- create(:empty_project, created_at: 1.week.ago)
+ create(:project, created_at: 1.week.ago)
current_settings = double('settings', repository_checks_enabled: false)
expect(subject).to receive(:current_settings) { current_settings }
@@ -38,7 +38,7 @@ describe RepositoryCheck::BatchWorker do
end
it 'skips projects created less than 24 hours ago' do
- project = create(:empty_project)
+ project = create(:project)
project.update_column(:created_at, 23.hours.ago)
expect(subject.perform).to eq([])
diff --git a/spec/workers/repository_check/clear_worker_spec.rb b/spec/workers/repository_check/clear_worker_spec.rb
index a3b70c74787..1c49415d46c 100644
--- a/spec/workers/repository_check/clear_worker_spec.rb
+++ b/spec/workers/repository_check/clear_worker_spec.rb
@@ -2,10 +2,10 @@ require 'spec_helper'
describe RepositoryCheck::ClearWorker do
it 'clears repository check columns' do
- project = create(:empty_project)
+ project = create(:project)
project.update_columns(
last_repository_check_failed: true,
- last_repository_check_at: Time.now,
+ last_repository_check_at: Time.now
)
described_class.new.perform
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 7d6a2db2972..d9e9409840f 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -1,11 +1,11 @@
require 'spec_helper'
describe RepositoryForkWorker do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, :import_scheduled) }
let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new }
- subject { RepositoryForkWorker.new }
+ subject { described_class.new }
before do
allow(subject).to receive(:gitlab_shell).and_return(shell)
@@ -35,26 +35,38 @@ describe RepositoryForkWorker do
fork_project.namespace.full_path
).and_return(true)
- expect_any_instance_of(Repository).to receive(:expire_emptiness_caches).
- and_call_original
+ expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
+ .and_call_original
- expect_any_instance_of(Repository).to receive(:expire_exists_cache).
- and_call_original
+ expect_any_instance_of(Repository).to receive(:expire_exists_cache)
+ .and_call_original
subject.perform(project.id, '/test/path', project.full_path,
fork_project.namespace.full_path)
end
it "handles bad fork" do
+ source_path = project.full_path
+ target_path = fork_project.namespace.full_path
+ error_message = "Unable to fork project #{project.id} for repository #{source_path} -> #{target_path}"
+
expect(shell).to receive(:fork_repository).and_return(false)
- expect(subject.logger).to receive(:error)
+ expect do
+ subject.perform(project.id, '/test/path', source_path, target_path)
+ end.to raise_error(RepositoryForkWorker::ForkError, error_message)
+ end
- subject.perform(
- project.id,
- '/test/path',
- project.full_path,
- fork_project.namespace.full_path)
+ it 'handles unexpected error' do
+ source_path = project.full_path
+ target_path = fork_project.namespace.full_path
+
+ allow_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_raise(RuntimeError)
+
+ expect do
+ subject.perform(project.id, '/test/path', source_path, target_path)
+ end.to raise_error(RepositoryForkWorker::ForkError)
+ expect(project.reload.import_status).to eq('failed')
end
end
end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index 5a2c0671dac..100dfc32bbe 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -1,15 +1,15 @@
require 'spec_helper'
describe RepositoryImportWorker do
- let(:project) { create(:empty_project) }
+ let(:project) { create(:project, :import_scheduled) }
subject { described_class.new }
describe '#perform' do
context 'when the import was successful' do
it 'imports a project' do
- expect_any_instance_of(Projects::ImportService).to receive(:execute).
- and_return({ status: :ok })
+ expect_any_instance_of(Projects::ImportService).to receive(:execute)
+ .and_return({ status: :ok })
expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
expect_any_instance_of(Project).to receive(:import_finish)
@@ -21,15 +21,26 @@ describe RepositoryImportWorker do
context 'when the import has failed' do
it 'hide the credentials that were used in the import URL' do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
- expect_any_instance_of(Projects::ImportService).to receive(:execute).
- and_return({ status: :error, message: error })
- allow(subject).to receive(:jid).and_return('123')
- subject.perform(project.id)
+ project.update_attributes(import_jid: '123')
+ expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error })
- expect(project.reload.import_error).to include("https://*****:*****@test.com/root/repoC.git/")
+ expect do
+ subject.perform(project.id)
+ end.to raise_error(RepositoryImportWorker::ImportError, error)
expect(project.reload.import_jid).not_to be_nil
end
end
+
+ context 'with unexpected error' do
+ it 'marks import as failed' do
+ allow_any_instance_of(Projects::ImportService).to receive(:execute).and_raise(RuntimeError)
+
+ expect do
+ subject.perform(project.id)
+ end.to raise_error(RepositoryImportWorker::ImportError)
+ expect(project.reload.import_status).to eq('failed')
+ end
+ end
end
end
diff --git a/spec/workers/schedule_update_user_activity_worker_spec.rb b/spec/workers/schedule_update_user_activity_worker_spec.rb
index e583c3203aa..32c59381b01 100644
--- a/spec/workers/schedule_update_user_activity_worker_spec.rb
+++ b/spec/workers/schedule_update_user_activity_worker_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe ScheduleUpdateUserActivityWorker, :redis do
+describe ScheduleUpdateUserActivityWorker, :clean_gitlab_redis_shared_state do
let(:now) { Time.now }
before do
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index 8434b0c8e5b..549635f7f33 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -34,7 +34,9 @@ describe StuckCiJobsWorker do
let(:status) { 'pending' }
context 'when job is not stuck' do
- before { allow_any_instance_of(Ci::Build).to receive(:stuck?).and_return(false) }
+ before do
+ allow_any_instance_of(Ci::Build).to receive(:stuck?).and_return(false)
+ end
context 'when job was not updated for more than 1 day ago' do
let(:updated_at) { 2.days.ago }
@@ -53,7 +55,9 @@ describe StuckCiJobsWorker do
end
context 'when job is stuck' do
- before { allow_any_instance_of(Ci::Build).to receive(:stuck?).and_return(true) }
+ before do
+ allow_any_instance_of(Ci::Build).to receive(:stuck?).and_return(true)
+ end
context 'when job was not updated for more than 1 hour ago' do
let(:updated_at) { 2.hours.ago }
@@ -93,7 +97,9 @@ describe StuckCiJobsWorker do
let(:status) { 'running' }
let(:updated_at) { 2.days.ago }
- before { job.project.update(pending_delete: true) }
+ before do
+ job.project.update(pending_delete: true)
+ end
it 'does not drop job' do
expect_any_instance_of(Ci::Build).not_to receive(:drop)
diff --git a/spec/workers/stuck_import_jobs_worker_spec.rb b/spec/workers/stuck_import_jobs_worker_spec.rb
index 466277a5e5e..a82eb54ffe4 100644
--- a/spec/workers/stuck_import_jobs_worker_spec.rb
+++ b/spec/workers/stuck_import_jobs_worker_spec.rb
@@ -8,29 +8,29 @@ describe StuckImportJobsWorker do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(exclusive_lease_uuid)
end
- describe 'long running import' do
- let(:project) { create(:empty_project, import_jid: '123', import_status: 'started') }
+ describe 'with started import_status' do
+ let(:project) { create(:project, :import_started, import_jid: '123') }
- before do
- allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(['123'])
- end
+ describe 'long running import' do
+ it 'marks the project as failed' do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(['123'])
- it 'marks the project as failed' do
- expect { worker.perform }.to change { project.reload.import_status }.to('failed')
+ expect { worker.perform }.to change { project.reload.import_status }.to('failed')
+ end
end
- end
- describe 'running import' do
- let(:project) { create(:empty_project, import_jid: '123', import_status: 'started') }
-
- before do
- allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
- end
+ describe 'running import' do
+ it 'does not mark the project as failed' do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
- it 'does not mark the project as failed' do
- worker.perform
+ expect { worker.perform }.not_to change { project.reload.import_status }
+ end
- expect(project.reload.import_status).to eq('started')
+ describe 'import without import_jid' do
+ it 'marks the project as failed' do
+ expect { worker.perform }.to change { project.reload.import_status }.to('failed')
+ end
+ end
end
end
end
diff --git a/spec/workers/stuck_merge_jobs_worker_spec.rb b/spec/workers/stuck_merge_jobs_worker_spec.rb
new file mode 100644
index 00000000000..a5ad78393c9
--- /dev/null
+++ b/spec/workers/stuck_merge_jobs_worker_spec.rb
@@ -0,0 +1,50 @@
+require 'spec_helper'
+
+describe StuckMergeJobsWorker do
+ describe 'perform' do
+ let(:worker) { described_class.new }
+
+ context 'merge job identified as completed' do
+ it 'updates merge request to merged when locked but has merge_commit_sha' do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(%w(123 456))
+ mr_with_sha = create(:merge_request, :locked, merge_jid: '123', state: :locked, merge_commit_sha: 'foo-bar-baz')
+ mr_without_sha = create(:merge_request, :locked, merge_jid: '123', state: :locked, merge_commit_sha: nil)
+
+ worker.perform
+
+ expect(mr_with_sha.reload).to be_merged
+ expect(mr_without_sha.reload).to be_opened
+ end
+
+ it 'updates merge request to opened when locked but has not been merged' do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(%w(123))
+ merge_request = create(:merge_request, :locked, merge_jid: '123', state: :locked)
+
+ worker.perform
+
+ expect(merge_request.reload).to be_opened
+ end
+
+ it 'logs updated stuck merge job ids' do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return(%w(123 456))
+
+ create(:merge_request, :locked, merge_jid: '123')
+ create(:merge_request, :locked, merge_jid: '456')
+
+ expect(Rails).to receive_message_chain(:logger, :info).with('Updated state of locked merge jobs. JIDs: 123, 456')
+
+ worker.perform
+ end
+ end
+
+ context 'merge job not identified as completed' do
+ it 'does not change merge request state when job is not completed yet' do
+ allow(Gitlab::SidekiqStatus).to receive(:completed_jids).and_return([])
+
+ merge_request = create(:merge_request, :locked, merge_jid: '123')
+
+ expect { worker.perform }.not_to change { merge_request.reload.state }.from('locked')
+ end
+ end
+ end
+end
diff --git a/spec/workers/trigger_schedule_worker_spec.rb b/spec/workers/trigger_schedule_worker_spec.rb
deleted file mode 100644
index 861bed4442e..00000000000
--- a/spec/workers/trigger_schedule_worker_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-require 'spec_helper'
-
-describe TriggerScheduleWorker do
- let(:worker) { described_class.new }
-
- before do
- stub_ci_pipeline_to_return_yaml_file
- end
-
- context 'when there is a scheduled trigger within next_run_at' do
- let(:next_run_at) { 2.days.ago }
-
- let!(:trigger_schedule) do
- create(:ci_trigger_schedule, :nightly)
- end
-
- before do
- trigger_schedule.update_column(:next_run_at, next_run_at)
- end
-
- it 'creates a new trigger request' do
- expect { worker.perform }.to change { Ci::TriggerRequest.count }
- end
-
- it 'creates a new pipeline' do
- expect { worker.perform }.to change { Ci::Pipeline.count }
- expect(Ci::Pipeline.last).to be_pending
- end
-
- it 'updates next_run_at' do
- worker.perform
-
- expect(trigger_schedule.reload.next_run_at).not_to eq(next_run_at)
- end
-
- context 'inactive schedule' do
- before do
- trigger_schedule.update(active: false)
- end
-
- it 'does not create a new trigger' do
- expect { worker.perform }.not_to change { Ci::TriggerRequest.count }
- end
- end
- end
-
- context 'when there are no scheduled triggers within next_run_at' do
- before { create(:ci_trigger_schedule, :nightly) }
-
- it 'does not create a new pipeline' do
- expect { worker.perform }.not_to change { Ci::Pipeline.count }
- end
-
- it 'does not update next_run_at' do
- expect { worker.perform }.not_to change { Ci::TriggerSchedule.last.next_run_at }
- end
- end
-
- context 'when next_run_at is nil' do
- before do
- schedule = create(:ci_trigger_schedule, :nightly)
- schedule.update_column(:next_run_at, nil)
- end
-
- it 'does not create a new pipeline' do
- expect { worker.perform }.not_to change { Ci::Pipeline.count }
- end
-
- it 'does not update next_run_at' do
- expect { worker.perform }.not_to change { Ci::TriggerSchedule.last.next_run_at }
- end
- end
-end
diff --git a/spec/workers/update_user_activity_worker_spec.rb b/spec/workers/update_user_activity_worker_spec.rb
index 43e9511f116..268ca1d81f2 100644
--- a/spec/workers/update_user_activity_worker_spec.rb
+++ b/spec/workers/update_user_activity_worker_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe UpdateUserActivityWorker, :redis do
+describe UpdateUserActivityWorker, :clean_gitlab_redis_shared_state do
let(:user_active_2_days_ago) { create(:user, current_sign_in_at: 10.months.ago) }
let(:user_active_yesterday_1) { create(:user) }
let(:user_active_yesterday_2) { create(:user) }
@@ -25,7 +25,7 @@ describe UpdateUserActivityWorker, :redis do
end
end
- it 'deletes the pairs from Redis' do
+ it 'deletes the pairs from SharedState' do
data.each { |id, time| Gitlab::UserActivities.record(id, time) }
subject.perform(data)