summaryrefslogtreecommitdiff
path: root/spec/services/ci
diff options
context:
space:
mode:
Diffstat (limited to 'spec/services/ci')
-rw-r--r--spec/services/ci/compare_accessibility_reports_service_spec.rb62
-rw-r--r--spec/services/ci/compare_test_reports_service_spec.rb7
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb67
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb4
-rw-r--r--spec/services/ci/daily_build_group_report_result_service_spec.rb (renamed from spec/services/ci/daily_report_result_service_spec.rb)41
-rw-r--r--spec/services/ci/destroy_expired_job_artifacts_service_spec.rb26
-rw-r--r--spec/services/ci/generate_terraform_reports_service_spec.rb71
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb15
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb6
-rw-r--r--spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb19
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb25
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb57
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml47
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml39
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml39
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml43
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml62
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml63
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml40
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml35
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml35
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml35
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml63
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml64
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml43
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml66
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml58
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml27
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml48
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml42
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml66
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml40
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml53
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml38
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml39
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml65
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml54
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml44
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml52
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml52
-rw-r--r--spec/services/ci/pipeline_schedule_service_spec.rb32
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb19
-rw-r--r--spec/services/ci/register_job_service_spec.rb2
-rw-r--r--spec/services/ci/retry_build_service_spec.rb50
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb19
-rw-r--r--spec/services/ci/update_instance_variables_service_spec.rb230
46 files changed, 2001 insertions, 103 deletions
diff --git a/spec/services/ci/compare_accessibility_reports_service_spec.rb b/spec/services/ci/compare_accessibility_reports_service_spec.rb
new file mode 100644
index 00000000000..aee1fd14bc5
--- /dev/null
+++ b/spec/services/ci/compare_accessibility_reports_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::CompareAccessibilityReportsService do
+ let(:service) { described_class.new(project) }
+ let(:project) { create(:project, :repository) }
+
+ describe '#execute' do
+ subject { service.execute(base_pipeline, head_pipeline) }
+
+ context 'when head pipeline has accessibility reports' do
+ let(:base_pipeline) { nil }
+ let(:head_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject[:data]).to match_schema('entities/accessibility_reports_comparer')
+ end
+ end
+
+ context 'when base and head pipelines have accessibility reports' do
+ let(:base_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+ let(:head_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+
+ it 'returns status and data' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject[:data]).to match_schema('entities/accessibility_reports_comparer')
+ end
+ end
+ end
+
+ describe '#latest?' do
+ subject { service.latest?(base_pipeline, head_pipeline, data) }
+
+ let!(:base_pipeline) { nil }
+ let!(:head_pipeline) { create(:ci_pipeline, :with_accessibility_reports, project: project) }
+ let!(:key) { service.send(:key, base_pipeline, head_pipeline) }
+
+ context 'when cache key is latest' do
+ let(:data) { { key: key } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when cache key is outdated' do
+ before do
+ head_pipeline.update_column(:updated_at, 10.minutes.ago)
+ end
+
+ let(:data) { { key: key } }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when cache key is empty' do
+ let(:data) { { key: nil } }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+end
diff --git a/spec/services/ci/compare_test_reports_service_spec.rb b/spec/services/ci/compare_test_reports_service_spec.rb
index f5edd3a552d..46f4d2d42ff 100644
--- a/spec/services/ci/compare_test_reports_service_spec.rb
+++ b/spec/services/ci/compare_test_reports_service_spec.rb
@@ -38,9 +38,10 @@ describe Ci::CompareTestReportsService do
create(:ci_job_artifact, :junit_with_corrupted_data, job: build, project: project)
end
- it 'returns status and error message' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:status_reason]).to include('XML parsing failed')
+ it 'returns a parsed TestReports success status and failure on the individual suite' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject.dig(:data, 'status')).to eq('success')
+ expect(subject.dig(:data, 'suites', 0, 'status') ).to eq('error')
end
end
end
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
index fe64a66f322..4d49923a184 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -30,6 +30,26 @@ describe Ci::CreateJobArtifactsService do
describe '#execute' do
subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) }
+ context 'locking' do
+ let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) }
+ let!(:latest_artifact) { create(:ci_job_artifact, job: old_job, locked: true) }
+ let!(:other_artifact) { create(:ci_job_artifact, locked: true) }
+
+ it 'locks the new artifact' do
+ subject
+
+ expect(Ci::JobArtifact.last).to have_attributes(locked: true)
+ end
+
+ it 'unlocks all other artifacts for the same ref' do
+ expect { subject }.to change { latest_artifact.reload.locked }.from(true).to(false)
+ end
+
+ it 'does not unlock artifacts for other refs' do
+ expect { subject }.not_to change { other_artifact.reload.locked }.from(true)
+ end
+ end
+
context 'when artifacts file is uploaded' do
it 'saves artifact for the given type' do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
@@ -157,6 +177,53 @@ describe Ci::CreateJobArtifactsService do
end
end
+ context 'when artifact type is cluster_applications' do
+ let(:artifacts_file) do
+ file_to_upload('spec/fixtures/helm/helm_list_v2_prometheus_missing.json.gz', sha256: artifacts_sha256)
+ end
+
+ let(:params) do
+ {
+ 'artifact_type' => 'cluster_applications',
+ 'artifact_format' => 'gzip'
+ }
+ end
+
+ it 'calls cluster applications parse service' do
+ expect_next_instance_of(Clusters::ParseClusterApplicationsArtifactService) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ subject
+ end
+
+ context 'when there is a deployment cluster' do
+ let(:user) { project.owner }
+
+ before do
+ job.update!(user: user)
+ end
+
+ it 'calls cluster applications parse service with job and job user', :aggregate_failures do
+ expect(Clusters::ParseClusterApplicationsArtifactService).to receive(:new).with(job, user).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_synchronous_artifact_parsing: false)
+ end
+
+ it 'does not call parse service' do
+ expect(Clusters::ParseClusterApplicationsArtifactService).not_to receive(:new)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+ end
+
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
allow_next_instance_of(JobArtifactUploader) do |uploader|
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
index 112b19fcbc5..5980260a08a 100644
--- a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -34,7 +34,7 @@ describe Ci::CreatePipelineService do
it 'creates a pipeline using the content passed in as param' do
expect(subject).to be_persisted
- expect(subject.builds.map(&:name)).to eq %w[rspec custom]
+ expect(subject.builds.pluck(:name)).to match_array %w[rspec custom]
expect(subject.config_source).to eq 'bridge_source'
end
@@ -59,7 +59,7 @@ describe Ci::CreatePipelineService do
it 'created a pipeline using the content passed in as param and download the artifact' do
expect(subject).to be_persisted
- expect(subject.builds.pluck(:name)).to eq %w[rspec time custom]
+ expect(subject.builds.pluck(:name)).to match_array %w[rspec time custom]
expect(subject.config_source).to eq 'bridge_source'
end
end
diff --git a/spec/services/ci/daily_report_result_service_spec.rb b/spec/services/ci/daily_build_group_report_result_service_spec.rb
index 240709bab0b..f0b72b8fd86 100644
--- a/spec/services/ci/daily_report_result_service_spec.rb
+++ b/spec/services/ci/daily_build_group_report_result_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Ci::DailyReportResultService, '#execute' do
+describe Ci::DailyBuildGroupReportResultService, '#execute' do
let!(:pipeline) { create(:ci_pipeline, created_at: '2020-02-06 00:01:10') }
let!(:rspec_job) { create(:ci_build, pipeline: pipeline, name: '3/3 rspec', coverage: 80) }
let!(:karma_job) { create(:ci_build, pipeline: pipeline, name: '2/2 karma', coverage: 90) }
@@ -11,31 +11,29 @@ describe Ci::DailyReportResultService, '#execute' do
it 'creates daily code coverage record for each job in the pipeline that has coverage value' do
described_class.new.execute(pipeline)
- Ci::DailyReportResult.find_by(title: 'rspec').tap do |coverage|
+ Ci::DailyBuildGroupReportResult.find_by(group_name: 'rspec').tap do |coverage|
expect(coverage).to have_attributes(
project_id: pipeline.project.id,
last_pipeline_id: pipeline.id,
ref_path: pipeline.source_ref_path,
- param_type: 'coverage',
- title: rspec_job.group_name,
- value: rspec_job.coverage,
+ group_name: rspec_job.group_name,
+ data: { 'coverage' => rspec_job.coverage },
date: pipeline.created_at.to_date
)
end
- Ci::DailyReportResult.find_by(title: 'karma').tap do |coverage|
+ Ci::DailyBuildGroupReportResult.find_by(group_name: 'karma').tap do |coverage|
expect(coverage).to have_attributes(
project_id: pipeline.project.id,
last_pipeline_id: pipeline.id,
ref_path: pipeline.source_ref_path,
- param_type: 'coverage',
- title: karma_job.group_name,
- value: karma_job.coverage,
+ group_name: karma_job.group_name,
+ data: { 'coverage' => karma_job.coverage },
date: pipeline.created_at.to_date
)
end
- expect(Ci::DailyReportResult.find_by(title: 'extra')).to be_nil
+ expect(Ci::DailyBuildGroupReportResult.find_by(group_name: 'extra')).to be_nil
end
context 'when there are multiple builds with the same group name that report coverage' do
@@ -45,14 +43,13 @@ describe Ci::DailyReportResultService, '#execute' do
it 'creates daily code coverage record with the average as the value' do
described_class.new.execute(pipeline)
- Ci::DailyReportResult.find_by(title: 'test').tap do |coverage|
+ Ci::DailyBuildGroupReportResult.find_by(group_name: 'test').tap do |coverage|
expect(coverage).to have_attributes(
project_id: pipeline.project.id,
last_pipeline_id: pipeline.id,
ref_path: pipeline.source_ref_path,
- param_type: 'coverage',
- title: test_job_2.group_name,
- value: 75,
+ group_name: test_job_2.group_name,
+ data: { 'coverage' => 75.0 },
date: pipeline.created_at.to_date
)
end
@@ -77,8 +74,8 @@ describe Ci::DailyReportResultService, '#execute' do
end
it "updates the existing record's coverage value and last_pipeline_id" do
- rspec_coverage = Ci::DailyReportResult.find_by(title: 'rspec')
- karma_coverage = Ci::DailyReportResult.find_by(title: 'karma')
+ rspec_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'rspec')
+ karma_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'karma')
# Bump up the coverage values
described_class.new.execute(new_pipeline)
@@ -88,12 +85,12 @@ describe Ci::DailyReportResultService, '#execute' do
expect(rspec_coverage).to have_attributes(
last_pipeline_id: new_pipeline.id,
- value: new_rspec_job.coverage
+ data: { 'coverage' => new_rspec_job.coverage }
)
expect(karma_coverage).to have_attributes(
last_pipeline_id: new_pipeline.id,
- value: new_karma_job.coverage
+ data: { 'coverage' => new_karma_job.coverage }
)
end
end
@@ -117,8 +114,8 @@ describe Ci::DailyReportResultService, '#execute' do
end
it 'updates the existing daily code coverage records' do
- rspec_coverage = Ci::DailyReportResult.find_by(title: 'rspec')
- karma_coverage = Ci::DailyReportResult.find_by(title: 'karma')
+ rspec_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'rspec')
+ karma_coverage = Ci::DailyBuildGroupReportResult.find_by(group_name: 'karma')
# Run another one but for the older pipeline.
# This simulates the scenario wherein the success worker
@@ -135,12 +132,12 @@ describe Ci::DailyReportResultService, '#execute' do
expect(rspec_coverage).to have_attributes(
last_pipeline_id: pipeline.id,
- value: rspec_job.coverage
+ data: { 'coverage' => rspec_job.coverage }
)
expect(karma_coverage).to have_attributes(
last_pipeline_id: pipeline.id,
- value: karma_job.coverage
+ data: { 'coverage' => karma_job.coverage }
)
end
end
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
index fc5450ab33d..4b9f12d8fdf 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
@@ -11,8 +11,26 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
let(:service) { described_class.new }
let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
- it 'destroys expired job artifacts' do
- expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ context 'when artifact is expired' do
+ context 'when artifact is not locked' do
+ before do
+ artifact.update!(locked: false)
+ end
+
+ it 'destroys job artifact' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ end
+ end
+
+ context 'when artifact is locked' do
+ before do
+ artifact.update!(locked: true)
+ end
+
+ it 'does not destroy job artifact' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
end
context 'when artifact is not expired' do
@@ -72,7 +90,7 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
end
- let!(:artifact) { create_list(:ci_job_artifact, 2, expire_at: 1.day.ago) }
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
it 'raises an error and does not continue destroying' do
is_expected.to be_falsy
@@ -96,7 +114,7 @@ describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
end
- let!(:artifact) { create_list(:ci_job_artifact, 2, expire_at: 1.day.ago) }
+ let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
it 'destroys all expired artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
diff --git a/spec/services/ci/generate_terraform_reports_service_spec.rb b/spec/services/ci/generate_terraform_reports_service_spec.rb
new file mode 100644
index 00000000000..4d2c60bed2c
--- /dev/null
+++ b/spec/services/ci/generate_terraform_reports_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::GenerateTerraformReportsService do
+ let_it_be(:project) { create(:project, :repository) }
+
+ describe '#execute' do
+ let_it_be(:merge_request) { create(:merge_request, :with_terraform_reports, source_project: project) }
+
+ subject { described_class.new(project, nil, id: merge_request.id) }
+
+ context 'when head pipeline has terraform reports' do
+ it 'returns status and data' do
+ result = subject.execute(nil, merge_request.head_pipeline)
+
+ expect(result).to match(
+ status: :parsed,
+ data: match(
+ a_hash_including('tfplan.json' => a_hash_including('create' => 0, 'update' => 1, 'delete' => 0))
+ ),
+ key: an_instance_of(Array)
+ )
+ end
+ end
+
+ context 'when head pipeline has corrupted terraform reports' do
+ it 'returns status and error message' do
+ build = create(:ci_build, pipeline: merge_request.head_pipeline, project: project)
+ create(:ci_job_artifact, :terraform_with_corrupted_data, job: build, project: project)
+
+ result = subject.execute(nil, merge_request.head_pipeline)
+
+ expect(result).to match(
+ status: :error,
+ status_reason: 'An error occurred while fetching terraform reports.',
+ key: an_instance_of(Array)
+ )
+ end
+ end
+ end
+
+ describe '#latest?' do
+ let_it_be(:head_pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+
+ subject { described_class.new(project) }
+
+ it 'returns true when cache key is latest' do
+ cache_key = subject.send(:key, nil, head_pipeline)
+
+ result = subject.latest?(nil, head_pipeline, key: cache_key)
+
+ expect(result).to eq(true)
+ end
+
+ it 'returns false when cache key is outdated' do
+ cache_key = subject.send(:key, nil, head_pipeline)
+ head_pipeline.update_column(:updated_at, 10.minutes.ago)
+
+ result = subject.latest?(nil, head_pipeline, key: cache_key)
+
+ expect(result).to eq(false)
+ end
+
+ it 'returns false when cache key is nil' do
+ result = subject.latest?(nil, head_pipeline, key: nil)
+
+ expect(result).to eq(false)
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
index b487730d07f..de3c7713ac8 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -18,7 +18,7 @@ describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
it 'does update existing status of processable' do
collection.set_processable_status(test_a.id, 'success', 100)
- expect(collection.status_for_names(['test-a'])).to eq('success')
+ expect(collection.status_for_names(['test-a'], dag: false)).to eq('success')
end
it 'ignores a missing processable' do
@@ -33,15 +33,18 @@ describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection do
end
describe '#status_for_names' do
- where(:names, :status) do
- %w[build-a] | 'success'
- %w[build-a build-b] | 'failed'
- %w[build-a test-a] | 'running'
+ where(:names, :status, :dag) do
+ %w[build-a] | 'success' | false
+ %w[build-a build-b] | 'failed' | false
+ %w[build-a test-a] | 'running' | false
+ %w[build-a] | 'success' | true
+ %w[build-a build-b] | 'failed' | true
+ %w[build-a test-a] | 'pending' | true
end
with_them do
it 'returns composite status of given names' do
- expect(collection.status_for_names(names)).to eq(status)
+ expect(collection.status_for_names(names, dag: dag)).to eq(status)
end
end
end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index cbeb45b92ff..3b66ecff196 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -2,13 +2,19 @@
require 'spec_helper'
require_relative 'shared_processing_service.rb'
+require_relative 'shared_processing_service_tests_with_yaml.rb'
describe Ci::PipelineProcessing::AtomicProcessingService do
before do
stub_feature_flags(ci_atomic_processing: true)
+
+ # This feature flag is implicit
+ # Atomic Processing does not process statuses differently
+ stub_feature_flags(ci_composite_status: true)
end
it_behaves_like 'Pipeline Processing Service'
+ it_behaves_like 'Pipeline Processing Service Tests With Yaml'
private
diff --git a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
index 09b462b7600..fd491bf461b 100644
--- a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
@@ -2,13 +2,30 @@
require 'spec_helper'
require_relative 'shared_processing_service.rb'
+require_relative 'shared_processing_service_tests_with_yaml.rb'
describe Ci::PipelineProcessing::LegacyProcessingService do
before do
stub_feature_flags(ci_atomic_processing: false)
end
- it_behaves_like 'Pipeline Processing Service'
+ context 'when ci_composite_status is enabled' do
+ before do
+ stub_feature_flags(ci_composite_status: true)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+ it_behaves_like 'Pipeline Processing Service Tests With Yaml'
+ end
+
+ context 'when ci_composite_status is disabled' do
+ before do
+ stub_feature_flags(ci_composite_status: false)
+ end
+
+ it_behaves_like 'Pipeline Processing Service'
+ it_behaves_like 'Pipeline Processing Service Tests With Yaml'
+ end
private
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index ffe5eacfc48..29fa43001ae 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -816,10 +816,10 @@ shared_examples 'Pipeline Processing Service' do
context 'when a needed job is skipped', :sidekiq_inline do
let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:deploy) do
- create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag, needs: [
- create(:ci_build_need, name: 'linux:rspec')
- ])
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
end
it 'skips the jobs depending on it' do
@@ -836,6 +836,23 @@ shared_examples 'Pipeline Processing Service' do
end
end
+ context 'when a needed job is manual', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
+ end
+
+ it 'makes deploy DAG to be waiting for optional manual to finish' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(skipped created))
+ expect(all_builds.manual).to contain_exactly(linux_build)
+ expect(all_builds.created).to contain_exactly(deploy)
+ end
+ end
+
private
def all_builds
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
new file mode 100644
index 00000000000..93f83f0ea3b
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+shared_context 'Pipeline Processing Service Tests With Yaml' do
+ where(:test_file_path) do
+ Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
+ end
+
+ with_them do
+ let(:test_file) { YAML.load_file(test_file_path) }
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
+ let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline) }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
+ stub_not_protect_default_branch
+ project.add_developer(user)
+ end
+
+ it 'follows transitions' do
+ expect(pipeline).to be_persisted
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(test_file.dig('init', 'expect'), "init")
+
+ test_file['transitions'].each_with_index do |transition, idx|
+ event_on_jobs(transition['event'], transition['jobs'])
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(transition['expect'], "transition:#{idx}")
+ end
+ end
+
+ private
+
+ def check_expectation(expectation, message)
+ expect(current_state.deep_stringify_keys).to eq(expectation), message
+ end
+
+ def current_state
+ # reload pipeline and all relations
+ pipeline.reload
+
+ {
+ pipeline: pipeline.status,
+ stages: pipeline.ordered_stages.pluck(:name, :status).to_h,
+ jobs: pipeline.statuses.latest.pluck(:name, :status).to_h
+ }
+ end
+
+ def event_on_jobs(event, job_names)
+ statuses = pipeline.statuses.latest.by_name(job_names).to_a
+ expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
+
+ statuses.each { |status| status.public_send("#{event}!") }
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml
new file mode 100644
index 00000000000..cfc456387ff
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml
@@ -0,0 +1,47 @@
+config:
+ build:
+ stage: build
+ allow_failure: true
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+ needs: [build]
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
+
+# TODO: What is the real expected behavior here?
+# Is `needs` keyword a requirement indicator or just a helper to build dependency tree?
+# How should it behave `when: on_failure` with `needs`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml
new file mode 100644
index 00000000000..e71ef194c5f
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails.yml
@@ -0,0 +1,39 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+ needs: [build]
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml
new file mode 100644
index 00000000000..40a80f6f53b
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test.yml
@@ -0,0 +1,39 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml
new file mode 100644
index 00000000000..b0904a027f8
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_deploy_needs_test_when_always.yml
@@ -0,0 +1,43 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: always
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: skipped
+ deploy: pending
+ jobs:
+ build: failed
+ test: skipped
+ deploy: pending
+
+# TODO: `test` is actually skipped, but we run `deploy`. Should we?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml
new file mode 100644
index 00000000000..a133023b12d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml
@@ -0,0 +1,62 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1]
+ expect:
+ pipeline: running
+ stages:
+ build: running
+ test: created
+ deploy: created
+ jobs:
+ build_1: success
+ build_2: pending
+ test: created
+ deploy: created
+
+ - event: drop
+ jobs: [build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: failed
+ test: skipped
+ deploy: pending
+
+# TODO: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml
new file mode 100644
index 00000000000..4c676761e5c
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml
@@ -0,0 +1,63 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: always
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1]
+ expect:
+ pipeline: running
+ stages:
+ build: running
+ test: created
+ deploy: created
+ jobs:
+ build_1: success
+ build_2: pending
+ test: created
+ deploy: created
+
+ - event: drop
+ jobs: [build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: failed
+ test: skipped
+ deploy: pending
+
+# TODO: what's the actual expected behavior here?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml
new file mode 100644
index 00000000000..ea7046262c3
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml
@@ -0,0 +1,40 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml
new file mode 100644
index 00000000000..8860f565cc7
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_always.yml
@@ -0,0 +1,35 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: always
+ script: exit 0
+ needs: [build]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ jobs:
+ build: pending
+ test: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ jobs:
+ build: failed
+ test: pending
+
+# TODO: Should we run `test`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml
new file mode 100644
index 00000000000..3fa5a8034a2
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure.yml
@@ -0,0 +1,35 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+ needs: [build]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ jobs:
+ build: pending
+ test: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ jobs:
+ build: failed
+ test: pending
+
+# TODO: Should we run `test`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml
new file mode 100644
index 00000000000..700d4440802
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure.yml
@@ -0,0 +1,35 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+ needs: [build]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ jobs:
+ build: pending
+ test: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ jobs:
+ build: success
+ test: skipped
+
+# TODO: Should we run `test`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml
new file mode 100644
index 00000000000..f324525bd56
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml
@@ -0,0 +1,63 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+ when: on_failure
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1, build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: success
+
+# TODO: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml
new file mode 100644
index 00000000000..9986dbaa215
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml
@@ -0,0 +1,64 @@
+config:
+ build_1:
+ stage: build
+ script: exit 0
+
+ build_2:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+ when: on_failure
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ when: always
+ needs: [build_1, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build_1: pending
+ build_2: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build_1, build_2]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build_1: success
+ build_2: success
+ test: skipped
+ deploy: success
+
+# TODO: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml
new file mode 100644
index 00000000000..8d4d9d403f1
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml
@@ -0,0 +1,43 @@
+config:
+ test:
+ stage: test
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: failed
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
new file mode 100644
index 00000000000..1d61cd24f8c
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
@@ -0,0 +1,66 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test: running
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
new file mode 100644
index 00000000000..d8ca563b141
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
@@ -0,0 +1,58 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: created
+ stages:
+ test: skipped
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test: running
+ deploy: created
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
+
+# TOOD: should we run deploy?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
new file mode 100644
index 00000000000..ba0a20f49a7
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_always.yml
@@ -0,0 +1,27 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: always
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: created
+ stages:
+ test: skipped
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions: []
+
+# TODO: should we run `deploy`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
new file mode 100644
index 00000000000..d375c6a49e0
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -0,0 +1,48 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: on_failure
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: created
+ stages:
+ test: skipped
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: skipped
+ jobs:
+ test: failed
+ deploy: skipped
+
+# TODO: should we run `deploy`?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml
new file mode 100644
index 00000000000..34073b92ccc
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml
@@ -0,0 +1,42 @@
+config:
+ test1:
+ stage: test
+ script: exit 0
+
+ test2:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test1, test2]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test1: pending
+ test2: manual
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [test1]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: created
+ jobs:
+ test1: success
+ test2: manual
+ deploy: created
+
+# TODO: should deploy run?
+# Further discussions: https://gitlab.com/gitlab-org/gitlab/-/issues/213080
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml
new file mode 100644
index 00000000000..5ace621e89c
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml
@@ -0,0 +1,66 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ deploy: created
+ jobs:
+ build: failed
+ test: pending
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: success
+ deploy: pending
+ jobs:
+ build: failed
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: success
+ deploy: success
+ jobs:
+ build: failed
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml
new file mode 100644
index 00000000000..19524cfd3e4
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml
@@ -0,0 +1,40 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: success
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml
new file mode 100644
index 00000000000..3e081d4411b
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml
@@ -0,0 +1,53 @@
+config:
+ build:
+ stage: build
+ allow_failure: true
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: pending
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build: failed
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build: failed
+ test: skipped
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml
new file mode 100644
index 00000000000..0618abf3524
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails.yml
@@ -0,0 +1,38 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml
new file mode 100644
index 00000000000..362ac6e4239
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml
@@ -0,0 +1,39 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: skipped
+ deploy: skipped
+ jobs:
+ build: failed
+ test: skipped
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
new file mode 100644
index 00000000000..2ffa35b56d7
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
@@ -0,0 +1,65 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test: running
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
new file mode 100644
index 00000000000..088fab5ca09
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
@@ -0,0 +1,54 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: skipped
+ deploy: pending
+ jobs:
+ test: manual
+ deploy: pending
+
+transitions:
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: skipped
+ deploy: success
+ jobs:
+ test: manual
+ deploy: success
+
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: pending
+ deploy: success
+ jobs:
+ test: pending
+ deploy: success
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: failed
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
new file mode 100644
index 00000000000..2b30316aef6
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -0,0 +1,44 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: on_failure
+ script: exit 0
+
+init:
+ expect:
+ pipeline: skipped
+ stages:
+ test: skipped
+ deploy: skipped
+ jobs:
+ test: manual
+ deploy: skipped
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: skipped
+ jobs:
+ test: pending
+ deploy: skipped
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: skipped
+ jobs:
+ test: failed
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml
new file mode 100644
index 00000000000..1751cbb2023
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml
@@ -0,0 +1,52 @@
+config:
+ build:
+ stage: build
+ script: exit 1
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: drop
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: failed
+ test: pending
+ deploy: created
+ jobs:
+ build: failed
+ test: pending
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: failed
+ stages:
+ build: failed
+ test: success
+ deploy: skipped
+ jobs:
+ build: failed
+ test: success
+ deploy: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml
new file mode 100644
index 00000000000..15afe1ce8e1
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml
@@ -0,0 +1,52 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: on_failure
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: pending
+ jobs:
+ build: success
+ test: skipped
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: skipped
+ deploy: success
+ jobs:
+ build: success
+ test: skipped
+ deploy: success
diff --git a/spec/services/ci/pipeline_schedule_service_spec.rb b/spec/services/ci/pipeline_schedule_service_spec.rb
index f7590720f66..867ed0acc0d 100644
--- a/spec/services/ci/pipeline_schedule_service_spec.rb
+++ b/spec/services/ci/pipeline_schedule_service_spec.rb
@@ -25,38 +25,6 @@ describe Ci::PipelineScheduleService do
subject
end
- context 'when ci_pipeline_schedule_async feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipeline_schedule_async: false)
- end
-
- it 'runs RunPipelineScheduleWorker synchronously' do
- expect_next_instance_of(RunPipelineScheduleWorker) do |worker|
- expect(worker).to receive(:perform).with(schedule.id, schedule.owner.id)
- end
-
- subject
- end
-
- it 'calls Garbage Collection manually' do
- expect(GC).to receive(:start)
-
- subject
- end
-
- context 'when ci_pipeline_schedule_force_gc feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipeline_schedule_force_gc: false)
- end
-
- it 'does not call Garbage Collection manually' do
- expect(GC).not_to receive(:start)
-
- subject
- end
- end
- end
-
context 'when owner is nil' do
let(:schedule) { create(:ci_pipeline_schedule, project: project, owner: nil) }
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index 6f5a070d73d..40ae1c4029b 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -33,25 +33,6 @@ describe Ci::ProcessPipelineService do
end
end
- context 'with a pipeline which has processables with nil scheduling_type', :clean_gitlab_redis_shared_state do
- let!(:build1) { create_build('build1') }
- let!(:build2) { create_build('build2') }
- let!(:build3) { create_build('build3', scheduling_type: :dag) }
- let!(:build3_on_build2) { create(:ci_build_need, build: build3, name: 'build2') }
-
- before do
- pipeline.processables.update_all(scheduling_type: nil)
- end
-
- it 'populates scheduling_type before processing' do
- process_pipeline
-
- expect(build1.scheduling_type).to eq('stage')
- expect(build2.scheduling_type).to eq('stage')
- expect(build3.scheduling_type).to eq('dag')
- end
- end
-
def process_pipeline
described_class.new(pipeline).execute
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 2da1350e2af..c0f854df9b7 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -571,7 +571,7 @@ module Ci
end
describe '#register_success' do
- let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
+ let!(:current_time) { Time.zone.local(2018, 4, 5, 14, 0, 0) }
let!(:attempt_counter) { double('Gitlab::Metrics::NullMetric') }
let!(:job_queue_duration_seconds) { double('Gitlab::Metrics::NullMetric') }
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 86b68dc3ade..0aa603b24ae 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -22,9 +22,9 @@ describe Ci::RetryBuildService do
described_class.new(project, user)
end
- CLONE_ACCESSORS = described_class::CLONE_ACCESSORS
+ clone_accessors = described_class::CLONE_ACCESSORS
- REJECT_ACCESSORS =
+ reject_accessors =
%i[id status user token token_encrypted coverage trace runner
artifacts_expire_at
created_at updated_at started_at finished_at queued_at erased_by
@@ -34,13 +34,13 @@ describe Ci::RetryBuildService do
job_artifacts_container_scanning job_artifacts_dast
job_artifacts_license_management job_artifacts_license_scanning
job_artifacts_performance job_artifacts_lsif
- job_artifacts_terraform
+ job_artifacts_terraform job_artifacts_cluster_applications
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee job_artifacts_dotenv
- job_artifacts_cobertura needs].freeze
+ job_artifacts_cobertura needs job_artifacts_accessibility].freeze
- IGNORE_ACCESSORS =
+ ignore_accessors =
%i[type lock_version target_url base_tags trace_sections
commit_id deployment erased_by_id project_id
runner_id tag_taggings taggings tags trigger_request_id
@@ -63,6 +63,9 @@ describe Ci::RetryBuildService do
end
before do
+ # Test correctly behaviour of deprecated artifact because it can be still in use
+ stub_feature_flags(drop_license_management_artifact: false)
+
# Make sure that build has both `stage_id` and `stage` because FactoryBot
# can reset one of the fields when assigning another. We plan to deprecate
# and remove legacy `stage` column in the future.
@@ -88,7 +91,7 @@ describe Ci::RetryBuildService do
end
end
- CLONE_ACCESSORS.each do |attribute|
+ clone_accessors.each do |attribute|
it "clones #{attribute} build attribute" do
expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(build.send(attribute)).not_to be_nil
@@ -118,7 +121,7 @@ describe Ci::RetryBuildService do
end
describe 'reject accessors' do
- REJECT_ACCESSORS.each do |attribute|
+ reject_accessors.each do |attribute|
it "does not clone #{attribute} build attribute" do
expect(new_build.send(attribute)).not_to eq build.send(attribute)
end
@@ -126,8 +129,8 @@ describe Ci::RetryBuildService do
end
it 'has correct number of known attributes' do
- processed_accessors = CLONE_ACCESSORS + REJECT_ACCESSORS
- known_accessors = processed_accessors + IGNORE_ACCESSORS
+ processed_accessors = clone_accessors + reject_accessors
+ known_accessors = processed_accessors + ignore_accessors
# :tag_list is a special case, this accessor does not exist
# in reflected associations, comes from `act_as_taggable` and
@@ -190,6 +193,35 @@ describe Ci::RetryBuildService do
expect(subsequent_build.reload).to be_created
end
end
+
+ context 'when pipeline has other builds' do
+ let!(:stage2) { create(:ci_stage_entity, project: project, pipeline: pipeline, name: 'deploy') }
+ let!(:build2) { create(:ci_build, pipeline: pipeline, stage_id: stage.id ) }
+ let!(:deploy) { create(:ci_build, pipeline: pipeline, stage_id: stage2.id) }
+ let!(:deploy_needs_build2) { create(:ci_build_need, build: deploy, name: build2.name) }
+
+ context 'when build has nil scheduling_type' do
+ before do
+ build.pipeline.processables.update_all(scheduling_type: nil)
+ build.reload
+ end
+
+ it 'populates scheduling_type of processables' do
+ expect(new_build.scheduling_type).to eq('stage')
+ expect(build.reload.scheduling_type).to eq('stage')
+ expect(build2.reload.scheduling_type).to eq('stage')
+ expect(deploy.reload.scheduling_type).to eq('dag')
+ end
+ end
+
+ context 'when build has scheduling_type' do
+ it 'does not call populate_scheduling_type!' do
+ expect_any_instance_of(Ci::Pipeline).not_to receive(:ensure_scheduling_type!)
+
+ expect(new_build.scheduling_type).to eq('stage')
+ end
+ end
+ end
end
context 'when user does not have ability to execute build' do
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 81a0b05f2c7..8e85e68d4fc 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -261,6 +261,25 @@ describe Ci::RetryPipelineService, '#execute' do
service.execute(pipeline)
end
+
+ context 'when pipeline has processables with nil scheduling_type' do
+ let!(:build1) { create_build('build1', :success, 0) }
+ let!(:build2) { create_build('build2', :failed, 0) }
+ let!(:build3) { create_build('build3', :failed, 1) }
+ let!(:build3_needs_build1) { create(:ci_build_need, build: build3, name: build1.name) }
+
+ before do
+ statuses.update_all(scheduling_type: nil)
+ end
+
+ it 'populates scheduling_type of processables' do
+ service.execute(pipeline)
+
+ expect(build1.reload.scheduling_type).to eq('stage')
+ expect(build2.reload.scheduling_type).to eq('stage')
+ expect(build3.reload.scheduling_type).to eq('dag')
+ end
+ end
end
context 'when user is not allowed to retry pipeline' do
diff --git a/spec/services/ci/update_instance_variables_service_spec.rb b/spec/services/ci/update_instance_variables_service_spec.rb
new file mode 100644
index 00000000000..93f6e5d3ea8
--- /dev/null
+++ b/spec/services/ci/update_instance_variables_service_spec.rb
@@ -0,0 +1,230 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::UpdateInstanceVariablesService do
+ let(:params) { { variables_attributes: variables_attributes } }
+
+ subject { described_class.new(params) }
+
+ describe '#execute' do
+ context 'without variables' do
+ let(:variables_attributes) { [] }
+
+ it { expect(subject.execute).to be_truthy }
+ end
+
+ context 'with insert only variables' do
+ let(:variables_attributes) do
+ [
+ { key: 'var_a', secret_value: 'dummy_value_for_a', protected: true },
+ { key: 'var_b', secret_value: 'dummy_value_for_b', protected: false }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'persists all the records' do
+ expect { subject.execute }
+ .to change { Ci::InstanceVariable.count }
+ .by variables_attributes.size
+ end
+
+ it 'persists attributes' do
+ subject.execute
+
+ expect(Ci::InstanceVariable.all).to contain_exactly(
+ have_attributes(key: 'var_a', secret_value: 'dummy_value_for_a', protected: true),
+ have_attributes(key: 'var_b', secret_value: 'dummy_value_for_b', protected: false)
+ )
+ end
+ end
+
+ context 'with update only variables' do
+ let!(:var_a) { create(:ci_instance_variable) }
+ let!(:var_b) { create(:ci_instance_variable, protected: false) }
+
+ let(:variables_attributes) do
+ [
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ id: var_b.id,
+ key: 'var_b_key',
+ secret_value: 'new_dummy_value_for_b',
+ protected: 'true'
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'does not change the count' do
+ expect { subject.execute }
+ .not_to change { Ci::InstanceVariable.count }
+ end
+
+ it 'updates the records in place', :aggregate_failures do
+ subject.execute
+
+ expect(var_a.reload).to have_attributes(secret_value: 'new_dummy_value_for_a')
+
+ expect(var_b.reload).to have_attributes(
+ key: 'var_b_key', secret_value: 'new_dummy_value_for_b', protected: true)
+ end
+ end
+
+ context 'with insert and update variables' do
+ let!(:var_a) { create(:ci_instance_variable) }
+
+ let(:variables_attributes) do
+ [
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ key: 'var_b',
+ secret_value: 'dummy_value_for_b',
+ protected: true
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'inserts only one record' do
+ expect { subject.execute }
+ .to change { Ci::InstanceVariable.count }.by 1
+ end
+
+ it 'persists all the records', :aggregate_failures do
+ subject.execute
+ var_b = Ci::InstanceVariable.find_by(key: 'var_b')
+
+ expect(var_a.reload.secret_value).to eq('new_dummy_value_for_a')
+ expect(var_b.secret_value).to eq('dummy_value_for_b')
+ end
+ end
+
+ context 'with insert, update, and destroy variables' do
+ let!(:var_a) { create(:ci_instance_variable) }
+ let!(:var_b) { create(:ci_instance_variable) }
+
+ let(:variables_attributes) do
+ [
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ id: var_b.id,
+ key: var_b.key,
+ secret_value: 'dummy_value_for_b',
+ protected: var_b.protected?.to_s,
+ '_destroy' => 'true'
+ },
+ {
+ key: 'var_c',
+ secret_value: 'dummy_value_for_c',
+ protected: true
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_truthy }
+
+ it 'persists all the records', :aggregate_failures do
+ subject.execute
+ var_c = Ci::InstanceVariable.find_by(key: 'var_c')
+
+ expect(var_a.reload.secret_value).to eq('new_dummy_value_for_a')
+ expect { var_b.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect(var_c.secret_value).to eq('dummy_value_for_c')
+ end
+ end
+
+ context 'with invalid variables' do
+ let!(:var_a) { create(:ci_instance_variable, secret_value: 'dummy_value_for_a') }
+
+ let(:variables_attributes) do
+ [
+ {
+ key: '...?',
+ secret_value: 'nice_value'
+ },
+ {
+ id: var_a.id,
+ key: var_a.key,
+ secret_value: 'new_dummy_value_for_a',
+ protected: var_a.protected?.to_s
+ },
+ {
+ key: var_a.key,
+ secret_value: 'other_value'
+ }
+ ]
+ end
+
+ it { expect(subject.execute).to be_falsey }
+
+ it 'does not insert any records' do
+ expect { subject.execute }
+ .not_to change { Ci::InstanceVariable.count }
+ end
+
+ it 'does not update existing records' do
+ subject.execute
+
+ expect(var_a.reload.secret_value).to eq('dummy_value_for_a')
+ end
+
+ it 'returns errors' do
+ subject.execute
+
+ expect(subject.errors).to match_array(
+ [
+ "Key (#{var_a.key}) has already been taken",
+ "Key can contain only letters, digits and '_'."
+ ])
+ end
+ end
+
+ context 'when deleting non existing variables' do
+ let(:variables_attributes) do
+ [
+ {
+ id: 'some-id',
+ key: 'some_key',
+ secret_value: 'other_value',
+ '_destroy' => 'true'
+ }
+ ]
+ end
+
+ it { expect { subject.execute }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+
+ context 'when updating non existing variables' do
+ let(:variables_attributes) do
+ [
+ {
+ id: 'some-id',
+ key: 'some_key',
+ secret_value: 'other_value'
+ }
+ ]
+ end
+
+ it { expect { subject.execute }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+ end
+end