summaryrefslogtreecommitdiff
path: root/spec/models/ci
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 07:08:36 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 07:08:36 +0000
commit48aff82709769b098321c738f3444b9bdaa694c6 (patch)
treee00c7c43e2d9b603a5a6af576b1685e400410dee /spec/models/ci
parent879f5329ee916a948223f8f43d77fba4da6cd028 (diff)
downloadgitlab-ce-48aff82709769b098321c738f3444b9bdaa694c6.tar.gz
Add latest changes from gitlab-org/gitlab@13-5-stable-eev13.5.0-rc42
Diffstat (limited to 'spec/models/ci')
-rw-r--r--spec/models/ci/bridge_spec.rb91
-rw-r--r--spec/models/ci/build_pending_state_spec.rb27
-rw-r--r--spec/models/ci/build_spec.rb162
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb138
-rw-r--r--spec/models/ci/deleted_object_spec.rb95
-rw-r--r--spec/models/ci/freeze_period_status_spec.rb4
-rw-r--r--spec/models/ci/job_artifact_spec.rb4
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb4
-rw-r--r--spec/models/ci/pipeline_spec.rb63
9 files changed, 452 insertions, 136 deletions
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 850fc1ec6e6..c464e176c17 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -59,30 +59,20 @@ RSpec.describe Ci::Bridge do
describe 'state machine transitions' do
context 'when bridge points towards downstream' do
- it 'schedules downstream pipeline creation' do
- expect(bridge).to receive(:schedule_downstream_pipeline!)
+ %i[created manual].each do |status|
+ it "schedules downstream pipeline creation when the status is #{status}" do
+ bridge.status = status
- bridge.enqueue!
- end
- end
- end
-
- describe 'state machine transitions' do
- context 'when bridge points towards downstream' do
- it 'schedules downstream pipeline creation' do
- expect(bridge).to receive(:schedule_downstream_pipeline!)
+ expect(bridge).to receive(:schedule_downstream_pipeline!)
- bridge.enqueue!
+ bridge.enqueue!
+ end
end
- end
- end
- describe 'state machine transitions' do
- context 'when bridge points towards downstream' do
- it 'schedules downstream pipeline creation' do
- expect(bridge).to receive(:schedule_downstream_pipeline!)
+ it 'raises error when the status is failed' do
+ bridge.status = :failed
- bridge.enqueue!
+ expect { bridge.enqueue! }.to raise_error(StateMachines::InvalidTransition)
end
end
end
@@ -304,4 +294,67 @@ RSpec.describe Ci::Bridge do
end
end
end
+
+ describe '#play' do
+ let(:downstream_project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:bridge) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: downstream_project) }
+
+ subject { bridge.play(user) }
+
+ before do
+ project.add_maintainer(user)
+ downstream_project.add_maintainer(user)
+ end
+
+ it 'enqueues the bridge' do
+ subject
+
+ expect(bridge).to be_pending
+ end
+ end
+
+ describe '#playable?' do
+ context 'when bridge is a manual action' do
+ subject { build_stubbed(:ci_bridge, :manual).playable? }
+
+ it { is_expected.to be_truthy }
+
+ context 'when FF ci_manual_bridges is disabled' do
+ before do
+ stub_feature_flags(ci_manual_bridges: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when build is not a manual action' do
+ subject { build_stubbed(:ci_bridge, :created).playable? }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#action?' do
+ context 'when bridge is a manual action' do
+ subject { build_stubbed(:ci_bridge, :manual).action? }
+
+ it { is_expected.to be_truthy }
+
+ context 'when FF ci_manual_bridges is disabled' do
+ before do
+ stub_feature_flags(ci_manual_bridges: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when build is not a manual action' do
+ subject { build_stubbed(:ci_bridge, :created).action? }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/models/ci/build_pending_state_spec.rb b/spec/models/ci/build_pending_state_spec.rb
new file mode 100644
index 00000000000..a546d2aff65
--- /dev/null
+++ b/spec/models/ci/build_pending_state_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildPendingState do
+ describe '#crc32' do
+ context 'when checksum does not exist' do
+ let(:pending_state) do
+ build(:ci_build_pending_state, trace_checksum: nil)
+ end
+
+ it 'returns nil' do
+ expect(pending_state.crc32).to be_nil
+ end
+ end
+
+ context 'when checksum is in hexadecimal' do
+ let(:pending_state) do
+ build(:ci_build_pending_state, trace_checksum: 'crc32:75bcd15')
+ end
+
+ it 'returns decimal representation of the checksum' do
+ expect(pending_state.crc32).to eq 123456789
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 1e551d9ee33..f1d51324bbf 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1109,7 +1109,8 @@ RSpec.describe Ci::Build do
let(:environment) { deployment.environment }
before do
- allow(Deployments::FinishedWorker).to receive(:perform_async)
+ allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
+ allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
end
it 'has deployments record with created status' do
@@ -1129,7 +1130,8 @@ RSpec.describe Ci::Build do
context 'when transits to success' do
before do
- allow(Deployments::SuccessWorker).to receive(:perform_async)
+ allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
+ allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
build.success!
end
@@ -2305,6 +2307,54 @@ RSpec.describe Ci::Build do
end
end
+ describe '#has_expired_locked_archive_artifacts?' do
+ subject { build.has_expired_locked_archive_artifacts? }
+
+ context 'when build does not have artifacts' do
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'when build has artifacts' do
+ before do
+ create(:ci_job_artifact, :archive, job: build)
+ end
+
+ context 'when artifacts are unlocked' do
+ before do
+ build.pipeline.unlocked!
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when artifacts are locked' do
+ before do
+ build.pipeline.artifacts_locked!
+ end
+
+ context 'when artifacts do not expire' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when artifacts expire in the future' do
+ before do
+ build.update!(artifacts_expire_at: 1.day.from_now)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when artifacts expired in the past' do
+ before do
+ build.update!(artifacts_expire_at: 1.day.ago)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+ end
+
describe '#has_expiring_archive_artifacts?' do
context 'when artifacts have expiration date set' do
before do
@@ -2504,94 +2554,6 @@ RSpec.describe Ci::Build do
end
end
- describe 'CHANGED_PAGES variables' do
- let(:route_map_yaml) do
- <<~ROUTEMAP
- - source: 'bar/branch-test.txt'
- public: '/bar/branches'
- - source: 'with space/README.md'
- public: '/README'
- ROUTEMAP
- end
-
- before do
- allow_any_instance_of(Project)
- .to receive(:route_map_for).with(/.+/)
- .and_return(Gitlab::RouteMap.new(route_map_yaml))
- end
-
- context 'with a deployment environment and a merge request' do
- let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let(:environment) { create(:environment, project: merge_request.project, name: "foo-#{project.default_branch}") }
- let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
-
- let(:full_urls) do
- [
- File.join(environment.external_url, '/bar/branches'),
- File.join(environment.external_url, '/README')
- ]
- end
-
- it 'populates CI_MERGE_REQUEST_CHANGED_PAGES_* variables' do
- expect(subject).to include(
- {
- key: 'CI_MERGE_REQUEST_CHANGED_PAGE_PATHS',
- value: '/bar/branches,/README',
- public: true,
- masked: false
- },
- {
- key: 'CI_MERGE_REQUEST_CHANGED_PAGE_URLS',
- value: full_urls.join(','),
- public: true,
- masked: false
- }
- )
- end
-
- context 'with a deployment environment and no merge request' do
- let(:environment) { create(:environment, project: project, name: "foo-#{project.default_branch}") }
- let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
-
- it 'does not append CHANGED_PAGES variables' do
- ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
-
- expect(ci_variables).to be_empty
- end
- end
-
- context 'with no deployment environment and a present merge request' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project, target_project: project) }
- let(:build) { create(:ci_build, pipeline: merge_request.all_pipelines.take) }
-
- it 'does not append CHANGED_PAGES variables' do
- ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
-
- expect(ci_variables).to be_empty
- end
- end
-
- context 'with no deployment environment and no merge request' do
- it 'does not append CHANGED_PAGES variables' do
- ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
-
- expect(ci_variables).to be_empty
- end
- end
- end
-
- context 'with the :modified_path_ci_variables feature flag disabled' do
- before do
- stub_feature_flags(modified_path_ci_variables: false)
- end
-
- it 'does not set CI_MERGE_REQUEST_CHANGED_PAGES_* variables' do
- expect(subject.find { |var| var[:key] == 'CI_MERGE_REQUEST_CHANGED_PAGE_PATHS' }).to be_nil
- expect(subject.find { |var| var[:key] == 'CI_MERGE_REQUEST_CHANGED_PAGE_URLS' }).to be_nil
- end
- end
- end
-
context 'when build has user' do
let(:user_variables) do
[
@@ -4652,4 +4614,24 @@ RSpec.describe Ci::Build do
it { is_expected.to be_nil }
end
end
+
+ describe '#run_on_status_commit' do
+ it 'runs provided hook after status commit' do
+ action = spy('action')
+
+ build.run_on_status_commit { action.perform! }
+ build.success!
+
+ expect(action).to have_received(:perform!).once
+ end
+
+ it 'does not run hooks when status has not changed' do
+ action = spy('action')
+
+ build.run_on_status_commit { action.perform! }
+ build.save!
+
+ expect(action).not_to have_received(:perform!)
+ end
+ end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index fefe5e3bfca..cdbdd2b1d20 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -502,22 +502,12 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
describe '#persist_data!' do
let(:build) { create(:ci_build, :running) }
- subject { build_trace_chunk.persist_data! }
-
- shared_examples_for 'Atomic operation' do
- context 'when the other process is persisting' do
- let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
-
- before do
- stub_exclusive_lease_taken(lease_key)
- end
-
- it 'raise an error' do
- expect { subject }.to raise_error('Failed to obtain a lock')
- end
- end
+ before do
+ build_trace_chunk.save!
end
+ subject { build_trace_chunk.persist_data! }
+
context 'when data_store is redis' do
let(:data_store) { :redis }
@@ -548,8 +538,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.reload.checksum).to eq '3398914352'
end
-
- it_behaves_like 'Atomic operation'
end
context 'when data size has not reached CHUNK_SIZE' do
@@ -575,6 +563,62 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.fog?).to be_truthy
end
end
+
+ context 'when the chunk has been modifed by a different worker' do
+ it 'reloads the chunk before migration' do
+ described_class
+ .find(build_trace_chunk.id)
+ .update!(data_store: :fog)
+
+ build_trace_chunk.persist_data!
+ end
+
+ it 'verifies the operation using optimistic locking' do
+ allow(build_trace_chunk)
+ .to receive(:save!)
+ .and_raise(ActiveRecord::StaleObjectError)
+
+ expect { build_trace_chunk.persist_data! }
+ .to raise_error(described_class::FailedToPersistDataError)
+ end
+
+ it 'does not allow flushing unpersisted chunk' do
+ build_trace_chunk.checksum = '12345'
+
+ expect { build_trace_chunk.persist_data! }
+ .to raise_error(described_class::FailedToPersistDataError,
+ /Modifed build trace chunk detected/)
+ end
+ end
+
+ context 'when the chunk is being locked by a different worker' do
+ let(:metrics) { spy('metrics') }
+
+ it 'does not raise an exception' do
+ lock_chunk do
+ expect { build_trace_chunk.persist_data! }.not_to raise_error
+ end
+ end
+
+ it 'increments stalled chunk trace metric' do
+ allow(build_trace_chunk)
+ .to receive(:metrics)
+ .and_return(metrics)
+
+ lock_chunk { build_trace_chunk.persist_data! }
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :stalled)
+ .once
+ end
+
+ def lock_chunk(&block)
+ "trace_write:#{build.id}:chunks:#{chunk_index}".then do |key|
+ build_trace_chunk.in_lock(key, &block)
+ end
+ end
+ end
end
end
@@ -609,8 +653,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
-
- it_behaves_like 'Atomic operation'
end
context 'when data size has not reached CHUNK_SIZE' do
@@ -670,8 +712,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
-
- it_behaves_like 'Atomic operation'
end
context 'when data size has not reached CHUNK_SIZE' do
@@ -779,4 +819,62 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
end
+
+ describe 'comparable build trace chunks' do
+ describe '#<=>' do
+ context 'when chunks are associated with different builds' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
+
+ it 'returns nil' do
+ expect(first <=> second).to be_nil
+ end
+ end
+
+ context 'when there are two chunks with different indexes' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
+
+ it 'indicates the the first one is greater than then second' do
+ expect(first <=> second).to eq 1
+ end
+ end
+
+ context 'when there are two chunks with the same index within the same build' do
+ let(:chunk) { create(:ci_build_trace_chunk) }
+
+ it 'indicates the these are equal' do
+ expect(chunk <=> chunk).to be_zero # rubocop:disable Lint/UselessComparison
+ end
+ end
+ end
+
+ describe '#==' do
+ context 'when chunks have the same index' do
+ let(:chunk) { create(:ci_build_trace_chunk) }
+
+ it 'indicates that the chunks are equal' do
+ expect(chunk).to eq chunk
+ end
+ end
+
+ context 'when chunks have different indexes' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
+
+ it 'indicates that the chunks are not equal' do
+ expect(first).not_to eq second
+ end
+ end
+
+ context 'when chunks are associated with different builds' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
+
+ it 'indicates that the chunks are not equal' do
+ expect(first).not_to eq second
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/deleted_object_spec.rb b/spec/models/ci/deleted_object_spec.rb
new file mode 100644
index 00000000000..cb8911d5027
--- /dev/null
+++ b/spec/models/ci/deleted_object_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeletedObject, :aggregate_failures do
+ describe 'attributes' do
+ it { is_expected.to respond_to(:file) }
+ it { is_expected.to respond_to(:store_dir) }
+ it { is_expected.to respond_to(:file_store) }
+ it { is_expected.to respond_to(:pick_up_at) }
+ end
+
+ describe '.bulk_import' do
+ context 'with data' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, :expired) }
+
+ it 'imports data' do
+ expect { described_class.bulk_import(Ci::JobArtifact.all) }.to change { described_class.count }.by(1)
+
+ deleted_artifact = described_class.first
+
+ expect(deleted_artifact.file_store).to eq(artifact.file_store)
+ expect(deleted_artifact.store_dir).to eq(artifact.file.store_dir.to_s)
+ expect(deleted_artifact.file_identifier).to eq(artifact.file_identifier)
+ expect(deleted_artifact.pick_up_at).to eq(artifact.expire_at)
+ end
+ end
+
+ context 'with invalid data' do
+ let!(:artifact) { create(:ci_job_artifact) }
+
+ it 'does not import anything' do
+ expect(artifact.file_identifier).to be_nil
+
+ expect { described_class.bulk_import([artifact]) }
+ .not_to change { described_class.count }
+ end
+ end
+
+ context 'with empty data' do
+ it 'returns successfully' do
+ expect { described_class.bulk_import([]) }
+ .not_to change { described_class.count }
+ end
+ end
+ end
+
+ context 'ActiveRecord scopes' do
+ let_it_be(:not_ready) { create(:ci_deleted_object, pick_up_at: 1.day.from_now) }
+ let_it_be(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
+
+ describe '.ready_for_destruction' do
+ it 'returns objects that are ready' do
+ result = described_class.ready_for_destruction(2)
+
+ expect(result).to contain_exactly(ready)
+ end
+ end
+
+ describe '.lock_for_destruction' do
+ subject(:result) { described_class.lock_for_destruction(10) }
+
+ it 'returns objects that are ready' do
+ expect(result).to contain_exactly(ready)
+ end
+
+ it 'selects only the id' do
+ expect(result.select_values).to contain_exactly(:id)
+ end
+
+ it 'orders by pick_up_at' do
+ expect(result.order_values.map(&:to_sql))
+ .to contain_exactly("\"ci_deleted_objects\".\"pick_up_at\" ASC")
+ end
+
+ it 'applies limit' do
+ expect(result.limit_value).to eq(10)
+ end
+
+ it 'uses select for update' do
+ expect(result.locked?).to eq('FOR UPDATE SKIP LOCKED')
+ end
+ end
+ end
+
+ describe '#delete_file_from_storage' do
+ let(:object) { build(:ci_deleted_object) }
+
+ it 'does not raise errors' do
+ expect(object.file).to receive(:remove!).and_raise(StandardError)
+
+ expect(object.delete_file_from_storage).to be_falsy
+ end
+ end
+end
diff --git a/spec/models/ci/freeze_period_status_spec.rb b/spec/models/ci/freeze_period_status_spec.rb
index 831895cb528..f51381f7a5f 100644
--- a/spec/models/ci/freeze_period_status_spec.rb
+++ b/spec/models/ci/freeze_period_status_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::FreezePeriodStatus do
shared_examples 'within freeze period' do |time|
it 'is frozen' do
- Timecop.freeze(time) do
+ travel_to(time) do
expect(subject).to be_truthy
end
end
@@ -19,7 +19,7 @@ RSpec.describe Ci::FreezePeriodStatus do
shared_examples 'outside freeze period' do |time|
it 'is not frozen' do
- Timecop.freeze(time) do
+ travel_to(time) do
expect(subject).to be_falsy
end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 779839df670..26851c93ac3 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Ci::JobArtifact do
it_behaves_like 'having unique enum values'
- it_behaves_like 'UpdateProjectStatistics' do
+ it_behaves_like 'UpdateProjectStatistics', :with_counter_attribute do
let_it_be(:job, reload: true) { create(:ci_build) }
subject { build(:ci_job_artifact, :archive, job: job, size: 107464) }
@@ -44,7 +44,7 @@ RSpec.describe Ci::JobArtifact do
let!(:metrics_report) { create(:ci_job_artifact, :junit) }
let!(:codequality_report) { create(:ci_job_artifact, :codequality) }
- it { is_expected.to eq([metrics_report, codequality_report]) }
+ it { is_expected.to match_array([metrics_report, codequality_report]) }
end
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 949d5f7bd04..cec3b544e50 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Ci::PipelineSchedule do
subject { described_class.runnable_schedules }
let!(:pipeline_schedule) do
- Timecop.freeze(1.day.ago) do
+ travel_to(1.day.ago) do
create(:ci_pipeline_schedule, :hourly)
end
end
@@ -118,7 +118,7 @@ RSpec.describe Ci::PipelineSchedule do
let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
it "updates next_run_at to the sidekiq worker's execution time" do
- Timecop.freeze(Time.zone.parse("2019-06-01 12:18:00+0000")) do
+ travel_to(Time.zone.parse("2019-06-01 12:18:00+0000")) do
expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 228a1e8f7a2..88d08f1ec45 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2436,7 +2436,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#retry_failed' do
- let(:latest_status) { pipeline.statuses.latest.pluck(:status) }
+ let(:latest_status) { pipeline.latest_statuses.pluck(:status) }
before do
stub_not_protect_default_branch
@@ -2988,6 +2988,57 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#builds_in_self_and_descendants' do
+ subject(:builds) { pipeline.builds_in_self_and_descendants }
+
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
+
+ context 'when pipeline is standalone' do
+ it 'returns the list of builds' do
+ expect(builds).to contain_exactly(build)
+ end
+ end
+
+ context 'when pipeline is parent of another pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, pipeline: child_pipeline) }
+
+ it 'returns the list of builds' do
+ expect(builds).to contain_exactly(build, child_build)
+ end
+ end
+
+ context 'when pipeline is parent of another parent pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, pipeline: child_pipeline) }
+ let(:child_of_child_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+ let!(:child_of_child_build) { create(:ci_build, pipeline: child_of_child_pipeline) }
+
+ it 'returns the list of builds' do
+ expect(builds).to contain_exactly(build, child_build, child_of_child_build)
+ end
+ end
+ end
+
+ describe '#build_with_artifacts_in_self_and_descendants' do
+ let!(:build) { create(:ci_build, name: 'test', pipeline: pipeline) }
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, :artifacts, name: 'test', pipeline: child_pipeline) }
+
+ it 'returns the build with a given name, having artifacts' do
+ expect(pipeline.build_with_artifacts_in_self_and_descendants('test')).to eq(child_build)
+ end
+
+ context 'when same job name is present in both parent and child pipeline' do
+ let!(:build) { create(:ci_build, :artifacts, name: 'test', pipeline: pipeline) }
+
+ it 'returns the job in the parent pipeline' do
+ expect(pipeline.build_with_artifacts_in_self_and_descendants('test')).to eq(build)
+ end
+ end
+ end
+
describe '#find_job_with_archive_artifacts' do
let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
let!(:job_without_artifacts) { create(:ci_build, name: 'rspec', pipeline: pipeline) }
@@ -3628,6 +3679,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(builds).to include(rspec, jest)
expect(builds).not_to include(karma)
end
+
+ it 'returns only latest builds' do
+ obsolete = create(:ci_build, name: "jest", coverage: 10.12, pipeline: pipeline, retried: true)
+ retried = create(:ci_build, name: "jest", coverage: 20.11, pipeline: pipeline)
+
+ builds = pipeline.builds_with_coverage
+
+ expect(builds).to include(retried)
+ expect(builds).not_to include(obsolete)
+ end
end
describe '#base_and_ancestors' do