summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzegorz@gitlab.com>2018-07-06 11:38:07 +0000
committerGrzegorz Bizon <grzegorz@gitlab.com>2018-07-06 11:38:07 +0000
commit9c720a98b9fc7a3cc35b019251c5285378c9ae18 (patch)
treeffe7b38f4476528a91443e889235355402471f54 /spec
parentbf9fd9c3fc3ded123458093efdcdae33f083300d (diff)
parent21399fbc31083dce37a37980668c408520a136e0 (diff)
downloadgitlab-ce-9c720a98b9fc7a3cc35b019251c5285378c9ae18.tar.gz
Merge branch 'build-chunks-on-object-storage' into 'master'
Allow to store BuildTraceChunks on Object Storage Closes #45712 See merge request gitlab-org/gitlab-ce!19515
Diffstat (limited to 'spec')
-rw-r--r--spec/factories/ci/build_trace_chunks.rb48
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb76
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb446
-rw-r--r--spec/models/ci/build_trace_chunks/database_spec.rb105
-rw-r--r--spec/models/ci/build_trace_chunks/fog_spec.rb146
-rw-r--r--spec/models/ci/build_trace_chunks/redis_spec.rb132
-rw-r--r--spec/support/helpers/stub_object_storage.rb5
7 files changed, 833 insertions, 125 deletions
diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb
index c0b9a25bfe8..3e8e2736423 100644
--- a/spec/factories/ci/build_trace_chunks.rb
+++ b/spec/factories/ci/build_trace_chunks.rb
@@ -3,5 +3,53 @@ FactoryBot.define do
build factory: :ci_build
chunk_index 0
data_store :redis
+
+ trait :redis_with_data do
+ data_store :redis
+
+ transient do
+ initial_data 'test data'
+ end
+
+ after(:create) do |build_trace_chunk, evaluator|
+ Ci::BuildTraceChunks::Redis.new.set_data(build_trace_chunk, evaluator.initial_data)
+ end
+ end
+
+ trait :redis_without_data do
+ data_store :redis
+ end
+
+ trait :database_with_data do
+ data_store :database
+
+ transient do
+ initial_data 'test data'
+ end
+
+ after(:build) do |build_trace_chunk, evaluator|
+ Ci::BuildTraceChunks::Database.new.set_data(build_trace_chunk, evaluator.initial_data)
+ end
+ end
+
+ trait :database_without_data do
+ data_store :database
+ end
+
+ trait :fog_with_data do
+ data_store :fog
+
+ transient do
+ initial_data 'test data'
+ end
+
+ after(:create) do |build_trace_chunk, evaluator|
+ Ci::BuildTraceChunks::Fog.new.set_data(build_trace_chunk, evaluator.initial_data)
+ end
+ end
+
+ trait :fog_without_data do
+ data_store :fog
+ end
end
end
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
new file mode 100644
index 00000000000..2e3656b52fb
--- /dev/null
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -0,0 +1,76 @@
+require 'spec_helper'
+
+describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do
+ include ::ExclusiveLeaseHelpers
+
+ let(:class_instance) { (Class.new { include ::Gitlab::ExclusiveLeaseHelpers }).new }
+ let(:unique_key) { SecureRandom.hex(10) }
+
+ describe '#in_lock' do
+ subject { class_instance.in_lock(unique_key, **options) { } }
+
+ let(:options) { {} }
+
+ context 'when the lease is not obtained yet' do
+ before do
+ stub_exclusive_lease(unique_key, 'uuid')
+ end
+
+ it 'calls the given block' do
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ end
+
+ it 'calls the given block continuously' do
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once
+ end
+
+ it 'cancels the exclusive lease after the block' do
+ expect_to_cancel_exclusive_lease(unique_key, 'uuid')
+
+ subject
+ end
+ end
+
+ context 'when the lease is obtained already' do
+ let!(:lease) { stub_exclusive_lease_taken(unique_key) }
+
+ it 'retries to obtain a lease and raises an error' do
+ expect(lease).to receive(:try_obtain).exactly(11).times
+
+ expect { subject }.to raise_error('Failed to obtain a lock')
+ end
+
+ context 'when ttl is specified' do
+ let(:options) { { ttl: 10.minutes } }
+
+ it 'receives the specified argument' do
+ expect(Gitlab::ExclusiveLease).to receive(:new).with(unique_key, { timeout: 10.minutes } )
+
+ expect { subject }.to raise_error('Failed to obtain a lock')
+ end
+ end
+
+ context 'when retry count is specified' do
+ let(:options) { { retries: 3 } }
+
+ it 'retries for the specified times' do
+ expect(lease).to receive(:try_obtain).exactly(4).times
+
+ expect { subject }.to raise_error('Failed to obtain a lock')
+ end
+ end
+
+ context 'when sleep second is specified' do
+ let(:options) { { retries: 0, sleep_sec: 0.05.seconds } }
+
+ it 'receives the specified argument' do
+ expect(class_instance).to receive(:sleep).with(0.05.seconds).once
+
+ expect { subject }.to raise_error('Failed to obtain a lock')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 464897de306..774a638b430 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -14,6 +14,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
before do
stub_feature_flags(ci_enable_live_trace: true)
+ stub_artifacts_object_storage
end
context 'FastDestroyAll' do
@@ -37,6 +38,22 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
end
+ describe '.all_stores' do
+ subject { described_class.all_stores }
+
+ it 'returns a correctly ordered array' do
+ is_expected.to eq(%w[redis database fog])
+ end
+
+ it 'returns redis store as the the lowest precedence' do
+ expect(subject.first).to eq('redis')
+ end
+
+ it 'returns fog store as the the highest precedence' do
+ expect(subject.last).to eq('fog')
+ end
+ end
+
describe '#data' do
subject { build_trace_chunk.data }
@@ -44,181 +61,269 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis }
before do
- build_trace_chunk.send(:redis_set_data, 'Sample data in redis')
+ build_trace_chunk.send(:unsafe_set_data!, 'Sample data in redis')
end
it { is_expected.to eq('Sample data in redis') }
end
context 'when data_store is database' do
- let(:data_store) { :db }
- let(:raw_data) { 'Sample data in db' }
+ let(:data_store) { :database }
+ let(:raw_data) { 'Sample data in database' }
- it { is_expected.to eq('Sample data in db') }
+ it { is_expected.to eq('Sample data in database') }
end
- end
-
- describe '#set_data' do
- subject { build_trace_chunk.send(:set_data, value) }
- let(:value) { 'Sample data' }
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
- context 'when value bytesize is bigger than CHUNK_SIZE' do
- let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) }
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, 'Sample data in fog')
+ end
- it { expect { subject }.to raise_error('too much data') }
+ it { is_expected.to eq('Sample data in fog') }
end
+ end
- context 'when data_store is redis' do
- let(:data_store) { :redis }
+ describe '#append' do
+ subject { build_trace_chunk.append(new_data, offset) }
- it do
- expect(build_trace_chunk.send(:redis_data)).to be_nil
+ let(:new_data) { 'Sample new data' }
+ let(:offset) { 0 }
+ let(:merged_data) { data + new_data.to_s }
- subject
+ shared_examples_for 'Appending correctly' do
+ context 'when offset is negative' do
+ let(:offset) { -1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
+ end
- expect(build_trace_chunk.send(:redis_data)).to eq(value)
+ context 'when offset is bigger than data size' do
+ let(:offset) { data.bytesize + 1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
end
- context 'when fullfilled chunk size' do
- let(:value) { 'a' * described_class::CHUNK_SIZE }
+ context 'when new data overflows chunk size' do
+ let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
- it 'schedules stashing data' do
- expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
+ it { expect { subject }.to raise_error('Chunk size overflow') }
+ end
+
+ context 'when offset is EOF' do
+ let(:offset) { data.bytesize }
+ it 'appends' do
subject
+
+ expect(build_trace_chunk.data).to eq(merged_data)
end
- end
- end
- context 'when data_store is database' do
- let(:data_store) { :db }
+ context 'when the other process is appending' do
+ let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
- it 'sets data' do
- expect(build_trace_chunk.raw_data).to be_nil
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
- subject
+ it 'raise an error' do
+ expect { subject }.to raise_error('Failed to obtain a lock')
+ end
+ end
- expect(build_trace_chunk.raw_data).to eq(value)
- expect(build_trace_chunk.persisted?).to be_truthy
- end
+ context 'when new_data is nil' do
+ let(:new_data) { nil }
- context 'when raw_data is not changed' do
- it 'does not execute UPDATE' do
- expect(build_trace_chunk.raw_data).to be_nil
- build_trace_chunk.save!
+ it 'raises an error' do
+ expect { subject }.to raise_error('New data is missing')
+ end
+ end
- # First set
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0
- expect(build_trace_chunk.raw_data).to eq(value)
- expect(build_trace_chunk.persisted?).to be_truthy
+ context 'when new_data is empty' do
+ let(:new_data) { '' }
- # Second set
- build_trace_chunk.reload
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0)
+ it 'does not append' do
+ subject
+
+ expect(build_trace_chunk.data).to eq(data)
+ end
+
+ it 'does not execute UPDATE' do
+ ActiveRecord::QueryRecorder.new { subject }.log.map do |query|
+ expect(query).not_to include('UPDATE')
+ end
+ end
end
end
- context 'when fullfilled chunk size' do
- it 'does not schedule stashing data' do
- expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
+ context 'when offset is middle of datasize' do
+ let(:offset) { data.bytesize / 2 }
+ it 'appends' do
subject
+
+ expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
end
end
end
- end
- describe '#truncate' do
- subject { build_trace_chunk.truncate(offset) }
+ shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
+ context 'when new data fullfilled chunk size' do
+ let(:new_data) { 'a' * described_class::CHUNK_SIZE }
- shared_examples_for 'truncates' do
- context 'when offset is negative' do
- let(:offset) { -1 }
+ it 'schedules trace chunk flush worker' do
+ expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
- it { expect { subject }.to raise_error('Offset is out of range') }
- end
+ subject
+ end
- context 'when offset is bigger than data size' do
- let(:offset) { data.bytesize + 1 }
+ it 'migrates data to object storage' do
+ Sidekiq::Testing.inline! do
+ subject
- it { expect { subject }.to raise_error('Offset is out of range') }
+ build_trace_chunk.reload
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(build_trace_chunk.data).to eq(new_data)
+ end
+ end
end
+ end
- context 'when offset is 10' do
- let(:offset) { 10 }
+ shared_examples_for 'Scheduling no sidekiq worker' do
+ context 'when new data fullfilled chunk size' do
+ let(:new_data) { 'a' * described_class::CHUNK_SIZE }
+
+ it 'does not schedule trace chunk flush worker' do
+ expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
- it 'truncates' do
subject
+ end
- expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
+ it 'does not migrate data to object storage' do
+ Sidekiq::Testing.inline! do
+ data_store = build_trace_chunk.data_store
+
+ subject
+
+ build_trace_chunk.reload
+ expect(build_trace_chunk.data_store).to eq(data_store)
+ end
end
end
end
context 'when data_store is redis' do
let(:data_store) { :redis }
- let(:data) { 'Sample data in redis' }
- before do
- build_trace_chunk.send(:redis_set_data, data)
+ context 'when there are no data' do
+ let(:data) { '' }
+
+ it 'has no data' do
+ expect(build_trace_chunk.data).to be_empty
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
- it_behaves_like 'truncates'
- end
+ context 'when there are some data' do
+ let(:data) { 'Sample data in redis' }
- context 'when data_store is database' do
- let(:data_store) { :db }
- let(:raw_data) { 'Sample data in db' }
- let(:data) { raw_data }
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
- it_behaves_like 'truncates'
+ it 'has data' do
+ expect(build_trace_chunk.data).to eq(data)
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
+ end
end
- end
- describe '#append' do
- subject { build_trace_chunk.append(new_data, offset) }
+ context 'when data_store is database' do
+ let(:data_store) { :database }
- let(:new_data) { 'Sample new data' }
- let(:offset) { 0 }
- let(:total_data) { data + new_data }
+ context 'when there are no data' do
+ let(:data) { '' }
- shared_examples_for 'appends' do
- context 'when offset is negative' do
- let(:offset) { -1 }
+ it 'has no data' do
+ expect(build_trace_chunk.data).to be_empty
+ end
- it { expect { subject }.to raise_error('Offset is out of range') }
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling no sidekiq worker'
end
- context 'when offset is bigger than data size' do
- let(:offset) { data.bytesize + 1 }
+ context 'when there are some data' do
+ let(:raw_data) { 'Sample data in database' }
+ let(:data) { raw_data }
- it { expect { subject }.to raise_error('Offset is out of range') }
+ it 'has data' do
+ expect(build_trace_chunk.data).to eq(data)
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling no sidekiq worker'
end
+ end
- context 'when offset is bigger than data size' do
- let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
- it { expect { subject }.to raise_error('Chunk size overflow') }
+ context 'when there are no data' do
+ let(:data) { '' }
+
+ it 'has no data' do
+ expect(build_trace_chunk.data).to be_empty
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling no sidekiq worker'
end
- context 'when offset is EOF' do
- let(:offset) { data.bytesize }
+ context 'when there are some data' do
+ let(:data) { 'Sample data in fog' }
- it 'appends' do
- subject
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
- expect(build_trace_chunk.data).to eq(total_data)
+ it 'has data' do
+ expect(build_trace_chunk.data).to eq(data)
end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling no sidekiq worker'
+ end
+ end
+ end
+
+ describe '#truncate' do
+ subject { build_trace_chunk.truncate(offset) }
+
+ shared_examples_for 'truncates' do
+ context 'when offset is negative' do
+ let(:offset) { -1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
+ end
+
+ context 'when offset is bigger than data size' do
+ let(:offset) { data.bytesize + 1 }
+
+ it { expect { subject }.to raise_error('Offset is out of range') }
end
context 'when offset is 10' do
let(:offset) { 10 }
- it 'appends' do
+ it 'truncates' do
subject
- expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
+ expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
end
end
end
@@ -228,18 +333,29 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
- build_trace_chunk.send(:redis_set_data, data)
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
- it_behaves_like 'appends'
+ it_behaves_like 'truncates'
end
context 'when data_store is database' do
- let(:data_store) { :db }
- let(:raw_data) { 'Sample data in db' }
+ let(:data_store) { :database }
+ let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data }
- it_behaves_like 'appends'
+ it_behaves_like 'truncates'
+ end
+
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+ let(:data) { 'Sample data in fog' }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
+
+ it_behaves_like 'truncates'
end
end
@@ -253,7 +369,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
- build_trace_chunk.send(:redis_set_data, data)
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
it { is_expected.to eq(data.bytesize) }
@@ -265,10 +381,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data_store is database' do
- let(:data_store) { :db }
+ let(:data_store) { :database }
context 'when data exists' do
- let(:raw_data) { 'Sample data in db' }
+ let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data }
it { is_expected.to eq(data.bytesize) }
@@ -278,10 +394,43 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it { is_expected.to eq(0) }
end
end
+
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+
+ context 'when data exists' do
+ let(:data) { 'Sample data in fog' }
+ let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
+
+ it { is_expected.to eq(data.bytesize) }
+ end
+
+ context 'when data does not exist' do
+ it { is_expected.to eq(0) }
+ end
+ end
end
- describe '#use_database!' do
- subject { build_trace_chunk.use_database! }
+ describe '#persist_data!' do
+ subject { build_trace_chunk.persist_data! }
+
+ shared_examples_for 'Atomic operation' do
+ context 'when the other process is persisting' do
+ let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
+
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
+
+ it 'raise an error' do
+ expect { subject }.to raise_error('Failed to obtain a lock')
+ end
+ end
+ end
context 'when data_store is redis' do
let(:data_store) { :redis }
@@ -290,46 +439,93 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
- build_trace_chunk.send(:redis_set_data, data)
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
- it 'stashes the data' do
- expect(build_trace_chunk.data_store).to eq('redis')
- expect(build_trace_chunk.send(:redis_data)).to eq(data)
- expect(build_trace_chunk.raw_data).to be_nil
+ it 'persists the data' do
+ expect(build_trace_chunk.redis?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject
- expect(build_trace_chunk.data_store).to eq('db')
- expect(build_trace_chunk.send(:redis_data)).to be_nil
- expect(build_trace_chunk.raw_data).to eq(data)
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
+
+ it_behaves_like 'Atomic operation'
end
context 'when data does not exist' do
- it 'does not call UPDATE' do
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0)
+ it 'does not persist' do
+ expect { subject }.to raise_error('Can not persist empty data')
end
end
end
context 'when data_store is database' do
- let(:data_store) { :db }
+ let(:data_store) { :database }
- it 'does not call UPDATE' do
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0)
+ context 'when data exists' do
+ let(:data) { 'Sample data in database' }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
+
+ it 'persists the data' do
+ expect(build_trace_chunk.database?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
+ expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
+
+ subject
+
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
+ end
+
+ it_behaves_like 'Atomic operation'
end
- end
- end
- describe 'ExclusiveLock' do
- before do
- stub_exclusive_lease_taken
- stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1)
+ context 'when data does not exist' do
+ it 'does not persist' do
+ expect { subject }.to raise_error('Can not persist empty data')
+ end
+ end
end
- it 'raise an error' do
- expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+
+ context 'when data exists' do
+ let(:data) { 'Sample data in fog' }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
+
+ it 'does not change data store' do
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
+
+ subject
+
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
+ end
+
+ it_behaves_like 'Atomic operation'
+ end
end
end
diff --git a/spec/models/ci/build_trace_chunks/database_spec.rb b/spec/models/ci/build_trace_chunks/database_spec.rb
new file mode 100644
index 00000000000..d8fc9d57e95
--- /dev/null
+++ b/spec/models/ci/build_trace_chunks/database_spec.rb
@@ -0,0 +1,105 @@
+require 'spec_helper'
+
+describe Ci::BuildTraceChunks::Database do
+ let(:data_store) { described_class.new }
+
+ describe '#available?' do
+ subject { data_store.available? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#data' do
+ subject { data_store.data(model) }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') }
+
+ it 'returns the data' do
+ is_expected.to eq('sample data in database')
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#set_data' do
+ subject { data_store.set_data(model, data) }
+
+ let(:data) { 'abc123' }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') }
+
+ it 'overwrites data' do
+ expect(data_store.data(model)).to eq('sample data in database')
+
+ subject
+
+ expect(data_store.data(model)).to eq('abc123')
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
+
+ it 'sets new data' do
+ expect(data_store.data(model)).to be_nil
+
+ subject
+
+ expect(data_store.data(model)).to eq('abc123')
+ end
+ end
+ end
+
+ describe '#delete_data' do
+ subject { data_store.delete_data(model) }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') }
+
+ it 'deletes data' do
+ expect(data_store.data(model)).to eq('sample data in database')
+
+ subject
+
+ expect(data_store.data(model)).to be_nil
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
+
+ it 'does nothing' do
+ expect(data_store.data(model)).to be_nil
+
+ subject
+
+ expect(data_store.data(model)).to be_nil
+ end
+ end
+ end
+
+ describe '#keys' do
+ subject { data_store.keys(relation) }
+
+ let(:build) { create(:ci_build) }
+ let(:relation) { build.trace_chunks }
+
+ before do
+ create(:ci_build_trace_chunk, :database_with_data, chunk_index: 0, build: build)
+ create(:ci_build_trace_chunk, :database_with_data, chunk_index: 1, build: build)
+ end
+
+ it 'returns empty array' do
+ is_expected.to eq([])
+ end
+ end
+end
diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb
new file mode 100644
index 00000000000..8f49190af13
--- /dev/null
+++ b/spec/models/ci/build_trace_chunks/fog_spec.rb
@@ -0,0 +1,146 @@
+require 'spec_helper'
+
+describe Ci::BuildTraceChunks::Fog do
+ let(:data_store) { described_class.new }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ describe '#available?' do
+ subject { data_store.available? }
+
+ context 'when object storage is enabled' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ describe '#data' do
+ subject { data_store.data(model) }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
+
+ it 'returns the data' do
+ is_expected.to eq('sample data in fog')
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
+
+ it 'returns nil' do
+ expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ end
+ end
+ end
+
+ describe '#set_data' do
+ subject { data_store.set_data(model, data) }
+
+ let(:data) { 'abc123' }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
+
+ it 'overwrites data' do
+ expect(data_store.data(model)).to eq('sample data in fog')
+
+ subject
+
+ expect(data_store.data(model)).to eq('abc123')
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
+
+ it 'sets new data' do
+ expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+
+ subject
+
+ expect(data_store.data(model)).to eq('abc123')
+ end
+ end
+ end
+
+ describe '#delete_data' do
+ subject { data_store.delete_data(model) }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
+
+ it 'deletes data' do
+ expect(data_store.data(model)).to eq('sample data in fog')
+
+ subject
+
+ expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
+
+ it 'does nothing' do
+ expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+
+ subject
+
+ expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ end
+ end
+ end
+
+ describe '#keys' do
+ subject { data_store.keys(relation) }
+
+ let(:build) { create(:ci_build) }
+ let(:relation) { build.trace_chunks }
+
+ before do
+ create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
+ create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
+ end
+
+ it 'returns keys' do
+ is_expected.to eq([[build.id, 0], [build.id, 1]])
+ end
+ end
+
+ describe '#delete_keys' do
+ subject { data_store.delete_keys(keys) }
+
+ let(:build) { create(:ci_build) }
+ let(:relation) { build.trace_chunks }
+ let(:keys) { data_store.keys(relation) }
+
+ before do
+ create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
+ create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
+ end
+
+ it 'deletes multiple data' do
+ ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
+ expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present
+ expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present
+ end
+
+ subject
+
+ ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
+ expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound)
+ expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_trace_chunks/redis_spec.rb b/spec/models/ci/build_trace_chunks/redis_spec.rb
new file mode 100644
index 00000000000..9da1e6a95ee
--- /dev/null
+++ b/spec/models/ci/build_trace_chunks/redis_spec.rb
@@ -0,0 +1,132 @@
+require 'spec_helper'
+
+describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
+ let(:data_store) { described_class.new }
+
+ describe '#available?' do
+ subject { data_store.available? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#data' do
+ subject { data_store.data(model) }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
+
+ it 'returns the data' do
+ is_expected.to eq('sample data in redis')
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#set_data' do
+ subject { data_store.set_data(model, data) }
+
+ let(:data) { 'abc123' }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
+
+ it 'overwrites data' do
+ expect(data_store.data(model)).to eq('sample data in redis')
+
+ subject
+
+ expect(data_store.data(model)).to eq('abc123')
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
+
+ it 'sets new data' do
+ expect(data_store.data(model)).to be_nil
+
+ subject
+
+ expect(data_store.data(model)).to eq('abc123')
+ end
+ end
+ end
+
+ describe '#delete_data' do
+ subject { data_store.delete_data(model) }
+
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') }
+
+ it 'deletes data' do
+ expect(data_store.data(model)).to eq('sample data in redis')
+
+ subject
+
+ expect(data_store.data(model)).to be_nil
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
+
+ it 'does nothing' do
+ expect(data_store.data(model)).to be_nil
+
+ subject
+
+ expect(data_store.data(model)).to be_nil
+ end
+ end
+ end
+
+ describe '#keys' do
+ subject { data_store.keys(relation) }
+
+ let(:build) { create(:ci_build) }
+ let(:relation) { build.trace_chunks }
+
+ before do
+ create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build)
+ create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build)
+ end
+
+ it 'returns keys' do
+ is_expected.to eq([[build.id, 0], [build.id, 1]])
+ end
+ end
+
+ describe '#delete_keys' do
+ subject { data_store.delete_keys(keys) }
+
+ let(:build) { create(:ci_build) }
+ let(:relation) { build.trace_chunks }
+ let(:keys) { data_store.keys(relation) }
+
+ before do
+ create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build)
+ create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build)
+ end
+
+ it 'deletes multiple data' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_truthy
+ expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_truthy
+ end
+
+ subject
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_falsy
+ expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_falsy
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 471b0a74a19..58b5c6a6435 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -25,6 +25,11 @@ module StubObjectStorage
::Fog::Storage.new(connection_params).tap do |connection|
begin
connection.directories.create(key: remote_directory)
+
+ # Cleanup remaining files
+ connection.directories.each do |directory|
+ directory.files.map(&:destroy)
+ end
rescue Excon::Error::Conflict
end
end