summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorShinya Maeda <shinya@gitlab.com>2018-06-25 16:19:40 +0900
committerShinya Maeda <shinya@gitlab.com>2018-06-25 16:19:40 +0900
commit24ba0989878a363c37d86758844a17a99fc7ae0c (patch)
tree6599adaf40d7d8bec14fee9c9fa03de7862ad7f8 /spec
parent494673793d6b4491b2a7843f0614e5bcb3d88a3c (diff)
downloadgitlab-ce-24ba0989878a363c37d86758844a17a99fc7ae0c.tar.gz
Added spec for build trace chunk
Diffstat (limited to 'spec')
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb471
-rw-r--r--spec/support/helpers/stub_object_storage.rb5
2 files changed, 307 insertions, 169 deletions
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 79e0f1f20bf..44eaf7afad3 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -12,6 +12,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
before do
stub_feature_flags(ci_enable_live_trace: true)
+ stub_artifacts_object_storage
end
context 'FastDestroyAll' do
@@ -42,154 +43,220 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis }
before do
- build_trace_chunk.send(:redis_set_data, 'Sample data in redis')
+ build_trace_chunk.send(:unsafe_set_data!, 'Sample data in redis')
end
it { is_expected.to eq('Sample data in redis') }
end
context 'when data_store is database' do
- let(:data_store) { :db }
- let(:raw_data) { 'Sample data in db' }
+ let(:data_store) { :database }
+ let(:raw_data) { 'Sample data in database' }
- it { is_expected.to eq('Sample data in db') }
+ it { is_expected.to eq('Sample data in database') }
end
- end
-
- describe '#set_data' do
- subject { build_trace_chunk.send(:set_data, value) }
- let(:value) { 'Sample data' }
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
- context 'when value bytesize is bigger than CHUNK_SIZE' do
- let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) }
+ before do
+ ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
+ connection.put_object('artifacts', "tmp/builds/#{build.id}/chunks/#{chunk_index}.log", 'Sample data in fog')
+ end
+ end
- it { expect { subject }.to raise_error('too much data') }
+ it { is_expected.to eq('Sample data in fog') }
end
+ end
- context 'when data_store is redis' do
- let(:data_store) { :redis }
+ describe '#append' do
+ subject { build_trace_chunk.append(new_data, offset) }
- it do
- expect(build_trace_chunk.send(:redis_data)).to be_nil
+ let(:new_data) { 'Sample new data' }
+ let(:offset) { 0 }
+ let(:merged_data) { data + new_data.to_s }
- subject
+ shared_examples_for 'Appending correctly' do
+ context 'when offset is negative' do
+ let(:offset) { -1 }
- expect(build_trace_chunk.send(:redis_data)).to eq(value)
+ it { expect { subject }.to raise_error('Offset is out of range') }
end
- context 'when fullfilled chunk size' do
- let(:value) { 'a' * described_class::CHUNK_SIZE }
-
- it 'schedules stashing data' do
- expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
+ context 'when offset is bigger than data size' do
+ let(:offset) { data.bytesize + 1 }
- subject
- end
+ it { expect { subject }.to raise_error('Offset is out of range') }
end
- end
- context 'when data_store is database' do
- let(:data_store) { :db }
+ context 'when new data overflows chunk size' do
+ let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
- it 'sets data' do
- expect(build_trace_chunk.raw_data).to be_nil
+ it { expect { subject }.to raise_error('Chunk size overflow') }
+ end
- subject
+ context 'when offset is EOF' do
+ let(:offset) { data.bytesize }
- expect(build_trace_chunk.raw_data).to eq(value)
- expect(build_trace_chunk.persisted?).to be_truthy
- end
+ it 'appends' do
+ subject
- context 'when raw_data is not changed' do
- it 'does not execute UPDATE' do
- expect(build_trace_chunk.raw_data).to be_nil
- build_trace_chunk.save!
+ expect(build_trace_chunk.data).to eq(merged_data)
+ end
- # First set
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0
- expect(build_trace_chunk.raw_data).to eq(value)
- expect(build_trace_chunk.persisted?).to be_truthy
+ context 'when new_data is nil' do
+ let(:new_data) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('New data is nil')
+ end
+ end
- # Second set
- build_trace_chunk.reload
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0)
+ context 'when new_data is empty' do
+ let(:new_data) { '' }
+
+ it 'does not append' do
+ subject
+
+ expect(build_trace_chunk.data).to eq(data)
+ end
+
+ it 'does not execute UPDATE' do
+ ActiveRecord::QueryRecorder.new { subject }.log.map do |query|
+ expect(query).not_to include('UPDATE')
+ end
+ end
end
end
- context 'when fullfilled chunk size' do
- it 'does not schedule stashing data' do
- expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
+ context 'when offset is middle of datasize' do
+ let(:offset) { data.bytesize / 2 }
+ it 'appends' do
subject
+
+ expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
end
end
end
- context 'when data_store is others' do
- before do
- build_trace_chunk.send(:write_attribute, :data_store, -1)
- end
+ shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
+ context 'when new data fullfilled chunk size' do
+ let(:new_data) { 'a' * described_class::CHUNK_SIZE }
- it { expect { subject }.to raise_error('Unsupported data store') }
- end
- end
+ it 'schedules trace chunk flush worker' do
+ expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
- describe '#truncate' do
- subject { build_trace_chunk.truncate(offset) }
+ subject
+ end
- shared_examples_for 'truncates' do
- context 'when offset is negative' do
- let(:offset) { -1 }
+ it 'migrates data to object storage' do
+ Sidekiq::Testing.inline! do
+ subject
- it { expect { subject }.to raise_error('Offset is out of range') }
+ build_trace_chunk.reload
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(build_trace_chunk.data).to eq(new_data)
+ end
+ end
end
+ end
- context 'when offset is bigger than data size' do
- let(:offset) { data.bytesize + 1 }
-
- it { expect { subject }.to raise_error('Offset is out of range') }
- end
+ shared_examples_for 'Scheduling no sidekiq worker' do
+ context 'when new data fullfilled chunk size' do
+ let(:new_data) { 'a' * described_class::CHUNK_SIZE }
- context 'when offset is 10' do
- let(:offset) { 10 }
+ it 'does not schedule trace chunk flush worker' do
+ expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
- it 'truncates' do
subject
+ end
- expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
+ it 'does not migrate data to object storage' do
+ Sidekiq::Testing.inline! do
+ data_store = build_trace_chunk.data_store
+
+ subject
+
+ build_trace_chunk.reload
+ expect(build_trace_chunk.data_store).to eq(data_store)
+ end
end
end
end
context 'when data_store is redis' do
let(:data_store) { :redis }
- let(:data) { 'Sample data in redis' }
- before do
- build_trace_chunk.send(:redis_set_data, data)
+ context 'when there are no data' do
+ let(:data) { '' }
+
+ it 'has no data' do
+ expect(build_trace_chunk.data).to be_empty
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
- it_behaves_like 'truncates'
+ context 'when there are some data' do
+ let(:data) { 'Sample data in redis' }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
+
+ it 'has data' do
+ expect(build_trace_chunk.data).to eq(data)
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
+ end
end
context 'when data_store is database' do
- let(:data_store) { :db }
- let(:raw_data) { 'Sample data in db' }
- let(:data) { raw_data }
+ let(:data_store) { :database }
- it_behaves_like 'truncates'
+ context 'when there are no data' do
+ let(:data) { '' }
+
+ it 'has no data' do
+ expect(build_trace_chunk.data).to be_empty
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling no sidekiq worker'
+ end
+
+ context 'when there are some data' do
+ let(:raw_data) { 'Sample data in database' }
+ let(:data) { raw_data }
+
+ it 'has data' do
+ expect(build_trace_chunk.data).to eq(data)
+ end
+
+ it_behaves_like 'Appending correctly'
+ it_behaves_like 'Scheduling no sidekiq worker'
+ end
end
- end
- describe '#append' do
- subject { build_trace_chunk.append(new_data, offset) }
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+ let(:data) { '' }
+ let(:offset) { 0 }
- let(:new_data) { 'Sample new data' }
- let(:offset) { 0 }
- let(:total_data) { data + new_data }
+ it 'can not append' do
+ expect { subject }.to raise_error('Fog store does not support appending')
+ end
+ end
+ end
+
+ describe '#truncate' do
+ subject { build_trace_chunk.truncate(offset) }
- shared_examples_for 'appends' do
+ shared_examples_for 'truncates' do
context 'when offset is negative' do
let(:offset) { -1 }
@@ -202,29 +269,13 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it { expect { subject }.to raise_error('Offset is out of range') }
end
- context 'when offset is bigger than data size' do
- let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) }
-
- it { expect { subject }.to raise_error('Chunk size overflow') }
- end
-
- context 'when offset is EOF' do
- let(:offset) { data.bytesize }
-
- it 'appends' do
- subject
-
- expect(build_trace_chunk.data).to eq(total_data)
- end
- end
-
context 'when offset is 10' do
let(:offset) { 10 }
- it 'appends' do
+ it 'truncates' do
subject
- expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
+ expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
end
end
end
@@ -234,18 +285,28 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
- build_trace_chunk.send(:redis_set_data, data)
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
- it_behaves_like 'appends'
+ it_behaves_like 'truncates'
end
context 'when data_store is database' do
- let(:data_store) { :db }
- let(:raw_data) { 'Sample data in db' }
+ let(:data_store) { :database }
+ let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data }
- it_behaves_like 'appends'
+ it_behaves_like 'truncates'
+ end
+
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+ let(:data) { '' }
+ let(:offset) { 0 }
+
+ it 'can not truncate' do
+ expect { subject }.to raise_error('Fog store does not support truncating')
+ end
end
end
@@ -259,7 +320,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
- build_trace_chunk.send(:redis_set_data, data)
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
it { is_expected.to eq(data.bytesize) }
@@ -271,10 +332,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
context 'when data_store is database' do
- let(:data_store) { :db }
+ let(:data_store) { :database }
context 'when data exists' do
- let(:raw_data) { 'Sample data in db' }
+ let(:raw_data) { 'Sample data in database' }
let(:data) { raw_data }
it { is_expected.to eq(data.bytesize) }
@@ -284,6 +345,25 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it { is_expected.to eq(0) }
end
end
+
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+
+ context 'when data exists' do
+ let(:data) { 'Sample data in fog' }
+ let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
+ end
+
+ it { is_expected.to eq(data.bytesize) }
+ end
+
+ context 'when data does not exist' do
+ it { expect{ subject }.to raise_error(Excon::Error::NotFound) }
+ end
+ end
end
describe '#persist_data!' do
@@ -296,93 +376,146 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
- build_trace_chunk.send(:redis_set_data, data)
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
- it 'stashes the data' do
- expect(build_trace_chunk.data_store).to eq('redis')
- expect(build_trace_chunk.send(:redis_data)).to eq(data)
- expect(build_trace_chunk.raw_data).to be_nil
+ it 'persists the data' do
+ expect(build_trace_chunk.redis?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
subject
- expect(build_trace_chunk.data_store).to eq('db')
- expect(build_trace_chunk.send(:redis_data)).to be_nil
- expect(build_trace_chunk.raw_data).to eq(data)
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
end
context 'when data does not exist' do
- it 'does not call UPDATE' do
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0)
+ it 'does not persist' do
+ expect { subject }.to raise_error('Can not persist empty data')
end
end
end
context 'when data_store is database' do
- let(:data_store) { :db }
-
- it 'does not call UPDATE' do
- expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0)
- end
- end
- end
-
- describe 'ExclusiveLock' do
- before do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil }
- stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1)
- end
-
- it 'raise an error' do
- expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
- end
- end
-
- describe 'deletes data in redis after a parent record destroyed' do
- let(:project) { create(:project) }
+ let(:data_store) { :database }
- before do
- pipeline = create(:ci_pipeline, project: project)
- create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
- create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
- create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
- end
+ context 'when data exists' do
+ let(:data) { 'Sample data in database' }
- shared_examples_for 'deletes all build_trace_chunk and data in redis' do
- it do
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3)
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
- expect(described_class.count).to eq(3)
+ it 'persists the data' do
+ expect(build_trace_chunk.database?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
+ expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
- subject
+ subject
- expect(described_class.count).to eq(0)
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
+ end
+ end
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0)
+ context 'when data does not exist' do
+ it 'does not persist' do
+ expect { subject }.to raise_error('Can not persist empty data')
end
end
end
- context 'when traces are archived' do
- let(:subject) do
- project.builds.each do |build|
- build.success!
+ context 'when data_store is fog' do
+ let(:data_store) { :fog }
+
+ context 'when data exists' do
+ let(:data) { 'Sample data in fog' }
+
+ before do
+ build_trace_chunk.send(:unsafe_set_data!, data)
end
- end
- it_behaves_like 'deletes all build_trace_chunk and data in redis'
- end
+ it 'does not change data store' do
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
- context 'when project is destroyed' do
- let(:subject) do
- project.destroy!
- end
+ subject
- it_behaves_like 'deletes all build_trace_chunk and data in redis'
+ expect(build_trace_chunk.fog?).to be_truthy
+ expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
+ end
+ end
end
end
+
+ ## TODO:
+ # describe 'ExclusiveLock' do
+ # before do
+ # allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil }
+ # stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1)
+ # end
+
+ # it 'raise an error' do
+ # expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
+ # end
+ # end
+
+ # describe 'deletes data in redis after a parent record destroyed' do
+ # let(:project) { create(:project) }
+
+ # before do
+ # pipeline = create(:ci_pipeline, project: project)
+ # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
+ # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
+ # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
+ # end
+
+ # shared_examples_for 'deletes all build_trace_chunk and data in redis' do
+ # it do
+ # Gitlab::Redis::SharedState.with do |redis|
+ # expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3)
+ # end
+
+ # expect(described_class.count).to eq(3)
+
+ # subject
+
+ # expect(described_class.count).to eq(0)
+
+ # Gitlab::Redis::SharedState.with do |redis|
+ # expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0)
+ # end
+ # end
+ # end
+
+ # context 'when traces are archived' do
+ # let(:subject) do
+ # project.builds.each do |build|
+ # build.success!
+ # end
+ # end
+
+ # it_behaves_like 'deletes all build_trace_chunk and data in redis'
+ # end
+
+ # context 'when project is destroyed' do
+ # let(:subject) do
+ # project.destroy!
+ # end
+
+ # it_behaves_like 'deletes all build_trace_chunk and data in redis'
+ # end
+ # end
end
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index bceaf8277ee..be122f9578c 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -20,6 +20,11 @@ module StubObjectStorage
::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
begin
connection.directories.create(key: remote_directory)
+
+ # Cleanup remaining files
+ connection.directories.each do |directory|
+ directory.files.map(&:destroy)
+ end
rescue Excon::Error::Conflict
end
end