summaryrefslogtreecommitdiff
path: root/spec/services/merge_requests/base_service_spec.rb
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services/merge_requests/base_service_spec.rb')
-rw-r--r--spec/services/merge_requests/base_service_spec.rb68
1 files changed, 67 insertions, 1 deletions
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index 6eeba3029ae..bd907ba6015 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -2,7 +2,15 @@
require 'spec_helper'
-RSpec.describe MergeRequests::BaseService do
+module MergeRequests
+ class ExampleService < MergeRequests::BaseService
+ def execute(merge_request, async: false, allow_duplicate: false)
+ create_pipeline_for(merge_request, current_user, async: async, allow_duplicate: allow_duplicate)
+ end
+ end
+end
+
+RSpec.describe MergeRequests::BaseService, feature_category: :code_review_workflow do
include ProjectForksHelper
let_it_be(:project) { create(:project, :repository) }
@@ -57,4 +65,62 @@ RSpec.describe MergeRequests::BaseService do
it_behaves_like 'does not enqueue Jira sync worker'
end
end
+
+ describe `#create_pipeline_for` do
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ subject { MergeRequests::ExampleService.new(project: project, current_user: project.first_owner, params: params) }
+
+ context 'async: false' do
+ it 'creates a pipeline directly' do
+ expect(MergeRequests::CreatePipelineService)
+ .to receive(:new)
+ .with(hash_including(project: project, current_user: project.first_owner, params: { allow_duplicate: false }))
+ .and_call_original
+ expect(MergeRequests::CreatePipelineWorker).not_to receive(:perform_async)
+
+ subject.execute(merge_request, async: false)
+ end
+
+ context 'allow_duplicate: true' do
+ it 'passes :allow_duplicate as true' do
+ expect(MergeRequests::CreatePipelineService)
+ .to receive(:new)
+ .with(hash_including(project: project, current_user: project.first_owner, params: { allow_duplicate: true }))
+ .and_call_original
+ expect(MergeRequests::CreatePipelineWorker).not_to receive(:perform_async)
+
+ subject.execute(merge_request, async: false, allow_duplicate: true)
+ end
+ end
+ end
+
+ context 'async: true' do
+ it 'enques a CreatePipelineWorker' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, project.first_owner.id, merge_request.id, { "allow_duplicate" => false })
+ .and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute(merge_request, async: true) }.to change(MergeRequests::CreatePipelineWorker.jobs, :size).by(1)
+ end
+ end
+
+ context 'allow_duplicate: true' do
+ it 'passes :allow_duplicate as true' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, project.first_owner.id, merge_request.id, { "allow_duplicate" => true })
+ .and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute(merge_request, async: true, allow_duplicate: true) }.to change(MergeRequests::CreatePipelineWorker.jobs, :size).by(1)
+ end
+ end
+ end
+ end
+ end
end