From 644529590a263f8db215d288c2f59abbe632a09b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Trzci=C5=84ski?= Date: Thu, 7 Jun 2018 10:04:55 +0200 Subject: Allow to store BuildTraceChunks on Object Storage --- app/models/ci/build_trace_chunk.rb | 105 ++++++++++------------- app/models/ci/build_trace_chunks/database.rb | 29 +++++++ app/models/ci/build_trace_chunks/fog.rb | 59 +++++++++++++ app/models/ci/build_trace_chunks/redis.rb | 51 +++++++++++ app/workers/ci/build_trace_chunk_flush_worker.rb | 2 +- spec/models/ci/build_trace_chunk_spec.rb | 4 +- 6 files changed, 189 insertions(+), 61 deletions(-) create mode 100644 app/models/ci/build_trace_chunks/database.rb create mode 100644 app/models/ci/build_trace_chunks/fog.rb create mode 100644 app/models/ci/build_trace_chunks/redis.rb diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 4856f10846c..6b89efe8eb0 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -10,45 +10,48 @@ module Ci WriteError = Class.new(StandardError) CHUNK_SIZE = 128.kilobytes - CHUNK_REDIS_TTL = 1.week WRITE_LOCK_RETRY = 10 WRITE_LOCK_SLEEP = 0.01.seconds WRITE_LOCK_TTL = 1.minute enum data_store: { redis: 1, - db: 2 + database: 2, + fog: 3 } class << self - def redis_data_key(build_id, chunk_index) - "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}" + def all_stores + @all_stores ||= self.data_stores.keys end - def redis_data_keys - redis.pluck(:build_id, :chunk_index).map do |data| - redis_data_key(data.first, data.second) - end + def persist_store + # get first available store from the back of the list + all_stores.reverse.find { |store| get_store_class(store).available? } end - def redis_delete_data(keys) - return if keys.empty? - - Gitlab::Redis::SharedState.with do |redis| - redis.del(keys) - end + def get_store_class(store) + @stores ||= {} + @stores[store] ||= "Ci::BuildTraceChunks::#{store.capitalize}".constantize.new end ## # FastDestroyAll concerns def begin_fast_destroy - redis_data_keys + all_stores.each_with_object({}) do |result, store| + relation = public_send(store) + keys = get_store_class(store).keys(relation) + + result[store] = keys if keys.present? + end end ## # FastDestroyAll concerns def finalize_fast_destroy(keys) - redis_delete_data(keys) + keys.each do |store, value| + get_store_class(store).delete_keys(value) + end end end @@ -69,7 +72,7 @@ module Ci raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) - set_data(data.byteslice(0, offset) + new_data) + set_data!(data.byteslice(0, offset) + new_data) end def size @@ -87,51 +90,53 @@ module Ci def range (start_offset...end_offset) end + + def persisted? + !redis? + end - def use_database! + def persist! in_lock do - break if db? - break unless size > 0 - - self.update!(raw_data: data, data_store: :db) - self.class.redis_delete_data([redis_data_key]) + unsafe_move_to!(self.class.persist_store) end end private + def unsafe_move_to!(new_store) + return if data_store == new_store.to_s + return unless size > 0 + + old_store_class = self.class.get_store_class(data_store) + + self.get_data.tap do |the_data| + self.raw_data = nil + self.data_store = new_store + self.set_data!(the_data) + end + + old_store_class.delete_data(self) + end + def get_data - if redis? - redis_data - elsif db? - raw_data - else - raise 'Unsupported data store' - end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default + self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default end - def set_data(value) + def set_data!(value) raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE in_lock do - if redis? - redis_set_data(value) - elsif db? - self.raw_data = value - else - raise 'Unsupported data store' - end - + self.class.get_store_class(data_store).set_data(self, value) @data = value save! if changed? end - schedule_to_db if full? + schedule_to_persist if full? end - def schedule_to_db - return if db? + def schedule_to_persist + return if persisted? Ci::BuildTraceChunkFlushWorker.perform_async(id) end @@ -140,22 +145,6 @@ module Ci size == CHUNK_SIZE end - def redis_data - Gitlab::Redis::SharedState.with do |redis| - redis.get(redis_data_key) - end - end - - def redis_set_data(data) - Gitlab::Redis::SharedState.with do |redis| - redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL) - end - end - - def redis_data_key - self.class.redis_data_key(build_id, chunk_index) - end - def in_lock write_lock_key = "trace_write:#{build_id}:chunks:#{chunk_index}" diff --git a/app/models/ci/build_trace_chunks/database.rb b/app/models/ci/build_trace_chunks/database.rb new file mode 100644 index 00000000000..3666d77c790 --- /dev/null +++ b/app/models/ci/build_trace_chunks/database.rb @@ -0,0 +1,29 @@ +module Ci + module BuildTraceChunks + class Database + def available? + true + end + + def keys(relation) + [] + end + + def delete_keys(keys) + # no-op + end + + def data(model) + model.raw_data + end + + def set_data(model, data) + model.raw_data = data + end + + def delete_data(model) + model.update_columns(raw_data: nil) unless model.raw_data.nil? + end + end + end +end diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb new file mode 100644 index 00000000000..18b09347381 --- /dev/null +++ b/app/models/ci/build_trace_chunks/fog.rb @@ -0,0 +1,59 @@ +module Ci + module BuildTraceChunks + class Fog + def available? + object_store.enabled + end + + def data(model) + connection.get_object(bucket_name, key(model)).body + end + + def set_data(model, data) + connection.put_object(bucket_name, key(model), data) + end + + def delete_data(model) + delete_keys([[model.build_id, model.chunk_index]]) + end + + def keys(relation) + return [] unless available? + + relation.pluck(:build_id, :chunk_index) + end + + def delete_keys(keys) + keys.each do |key| + connection.delete_object(bucket_name, key_raw(*key)) + end + end + + private + + def key(model) + key_raw(model.build_id, model.chunk_index) + end + + def key_raw(build_id, chunk_index) + "tmp/chunks/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log" + end + + def bucket_name + return unless available? + + object_store.remote_directory + end + + def connection + return unless available? + + @connection ||= ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys) + end + + def object_store + Gitlab.config.artifacts.object_store + end + end + end +end diff --git a/app/models/ci/build_trace_chunks/redis.rb b/app/models/ci/build_trace_chunks/redis.rb new file mode 100644 index 00000000000..fdb6065e2a0 --- /dev/null +++ b/app/models/ci/build_trace_chunks/redis.rb @@ -0,0 +1,51 @@ +module Ci + module BuildTraceChunks + class Redis + CHUNK_REDIS_TTL = 1.week + + def available? + true + end + + def data(model) + Gitlab::Redis::SharedState.with do |redis| + redis.get(key(model)) + end + end + + def set_data(model, data) + Gitlab::Redis::SharedState.with do |redis| + redis.set(key(model), data, ex: CHUNK_REDIS_TTL) + end + end + + def delete_data(model) + delete_keys([[model.build_id, model.chunk_index]]) + end + + def keys(relation) + relation.pluck(:build_id, :chunk_index) + end + + def delete_keys(keys) + return if keys.empty? + + keys = keys.map { |key| key_raw(*key) } + + Gitlab::Redis::SharedState.with do |redis| + redis.del(keys) + end + end + + private + + def key(model) + key_raw(model.build_id, model.chunk_index) + end + + def key_raw(build_id, chunk_index) + "gitlab:ci:trace:#{build_id.to_i}:chunks:#{chunk_index.to_i}" + end + end + end +end diff --git a/app/workers/ci/build_trace_chunk_flush_worker.rb b/app/workers/ci/build_trace_chunk_flush_worker.rb index 218d6688bd9..8e08ccbc414 100644 --- a/app/workers/ci/build_trace_chunk_flush_worker.rb +++ b/app/workers/ci/build_trace_chunk_flush_worker.rb @@ -5,7 +5,7 @@ module Ci def perform(build_trace_chunk_id) ::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| - build_trace_chunk.use_database! + build_trace_chunk.persist! end end end diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index cbcf1e55979..3f89898ed06 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -294,8 +294,8 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end end - describe '#use_database!' do - subject { build_trace_chunk.use_database! } + describe '#persist!' do + subject { build_trace_chunk.persist! } context 'when data_store is redis' do let(:data_store) { :redis } -- cgit v1.2.1 From 082dee862fde985b5d0fc9892059f3f1f2c798bd Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 15 Jun 2018 15:48:03 +0900 Subject: Fix dead lock by in_lock conflicts. Shared out in_lock logic. Changed key_raw path of fog store. and some bug fixes to make it functional. --- app/models/ci/build.rb | 2 +- app/models/ci/build_trace_chunk.rb | 61 ++++++++++++--------------- app/models/ci/build_trace_chunks/fog.rb | 4 +- app/services/concerns/exclusive_lease_lock.rb | 21 +++++++++ 4 files changed, 51 insertions(+), 37 deletions(-) create mode 100644 app/services/concerns/exclusive_lease_lock.rb diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 41446946a5e..8c90232405e 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -385,7 +385,7 @@ module Ci end def erase_old_trace! - update_column(:trace, nil) + update_column(:trace, nil) if old_trace end def needs_touch? diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 6b89efe8eb0..179c5830678 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -1,6 +1,7 @@ module Ci class BuildTraceChunk < ActiveRecord::Base include FastDestroyAll + include ExclusiveLeaseLock extend Gitlab::Ci::Model belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id @@ -14,6 +15,8 @@ module Ci WRITE_LOCK_SLEEP = 0.01.seconds WRITE_LOCK_TTL = 1.minute + # Note: The ordering of this enum is related to the precedence of persist store. + # The bottom item takes the higest precedence, and the top item takes the lowest precedence. enum data_store: { redis: 1, database: 2, @@ -38,8 +41,8 @@ module Ci ## # FastDestroyAll concerns def begin_fast_destroy - all_stores.each_with_object({}) do |result, store| - relation = public_send(store) + all_stores.each_with_object({}) do |store, result| + relation = public_send(store) # rubocop:disable GitlabSecurity/PublicSend keys = get_store_class(store).keys(relation) result[store] = keys if keys.present? @@ -72,7 +75,13 @@ module Ci raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) - set_data!(data.byteslice(0, offset) + new_data) + in_lock(*lock_params) do + self.reload if self.persisted? + + unsafe_set_data!(data.byteslice(0, offset) + new_data) + end + + schedule_to_persist if full? end def size @@ -90,13 +99,15 @@ module Ci def range (start_offset...end_offset) end - + def persisted? !redis? end def persist! - in_lock do + in_lock(*lock_params) do + self.reload if self.persisted? + unsafe_move_to!(self.class.persist_store) end end @@ -109,10 +120,10 @@ module Ci old_store_class = self.class.get_store_class(data_store) - self.get_data.tap do |the_data| + get_data.tap do |the_data| self.raw_data = nil self.data_store = new_store - self.set_data!(the_data) + unsafe_set_data!(the_data) end old_store_class.delete_data(self) @@ -122,17 +133,13 @@ module Ci self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default end - def set_data!(value) + def unsafe_set_data!(value) raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE - in_lock do - self.class.get_store_class(data_store).set_data(self, value) - @data = value - - save! if changed? - end + self.class.get_store_class(data_store).set_data(self, value) + @data = value - schedule_to_persist if full? + save! if changed? end def schedule_to_persist @@ -145,25 +152,11 @@ module Ci size == CHUNK_SIZE end - def in_lock - write_lock_key = "trace_write:#{build_id}:chunks:#{chunk_index}" - - lease = Gitlab::ExclusiveLease.new(write_lock_key, timeout: WRITE_LOCK_TTL) - retry_count = 0 - - until uuid = lease.try_obtain - # Keep trying until we obtain the lease. To prevent hammering Redis too - # much we'll wait for a bit between retries. - sleep(WRITE_LOCK_SLEEP) - break if WRITE_LOCK_RETRY < (retry_count += 1) - end - - raise WriteError, 'Failed to obtain write lock' unless uuid - - self.reload if self.persisted? - return yield - ensure - Gitlab::ExclusiveLease.cancel(write_lock_key, uuid) + def lock_params + ["trace_write:#{build_id}:chunks:#{chunk_index}", + { ttl: WRITE_LOCK_TTL, + retry_max: WRITE_LOCK_RETRY, + sleep_sec: WRITE_LOCK_SLEEP }] end end end diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb index 18b09347381..7506c40a39d 100644 --- a/app/models/ci/build_trace_chunks/fog.rb +++ b/app/models/ci/build_trace_chunks/fog.rb @@ -6,7 +6,7 @@ module Ci end def data(model) - connection.get_object(bucket_name, key(model)).body + connection.get_object(bucket_name, key(model))[:body] end def set_data(model, data) @@ -36,7 +36,7 @@ module Ci end def key_raw(build_id, chunk_index) - "tmp/chunks/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log" + "tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log" end def bucket_name diff --git a/app/services/concerns/exclusive_lease_lock.rb b/app/services/concerns/exclusive_lease_lock.rb new file mode 100644 index 00000000000..6c8bc25ea16 --- /dev/null +++ b/app/services/concerns/exclusive_lease_lock.rb @@ -0,0 +1,21 @@ +module ExclusiveLeaseLock + extend ActiveSupport::Concern + + def in_lock(key, ttl: 1.minute, retry_max: 10, sleep_sec: 0.01.seconds) + lease = Gitlab::ExclusiveLease.new(key, timeout: ttl) + retry_count = 0 + + until uuid = lease.try_obtain + # Keep trying until we obtain the lease. To prevent hammering Redis too + # much we'll wait for a bit. + sleep(sleep_sec) + break if retry_max < (retry_count += 1) + end + + raise WriteError, 'Failed to obtain write lock' unless uuid + + return yield + ensure + Gitlab::ExclusiveLease.cancel(key, uuid) + end +end -- cgit v1.2.1 From 1aeedf41f8681bbc638dde8b8a263a7d7cd13a1e Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 15 Jun 2018 15:50:33 +0900 Subject: Add changelog --- changelogs/unreleased/build-chunks-on-object-storage.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 changelogs/unreleased/build-chunks-on-object-storage.yml diff --git a/changelogs/unreleased/build-chunks-on-object-storage.yml b/changelogs/unreleased/build-chunks-on-object-storage.yml new file mode 100644 index 00000000000..9f36dfee378 --- /dev/null +++ b/changelogs/unreleased/build-chunks-on-object-storage.yml @@ -0,0 +1,6 @@ +--- +title: Use object storage as the first class persistable store for new live trace + architecture +merge_request: 19515 +author: +type: changed -- cgit v1.2.1 From 494673793d6b4491b2a7843f0614e5bcb3d88a3c Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 18 Jun 2018 17:56:16 +0900 Subject: Rename persisted? to data_persisted? --- app/models/ci/build_trace_chunk.rb | 18 +++++++----------- app/workers/ci/build_trace_chunk_flush_worker.rb | 2 +- spec/models/ci/build_trace_chunk_spec.rb | 4 ++-- 3 files changed, 10 insertions(+), 14 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 179c5830678..59096f54f0b 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -75,9 +75,7 @@ module Ci raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) - in_lock(*lock_params) do - self.reload if self.persisted? - + in_lock(*lock_params) do # Write opetation is atomic unsafe_set_data!(data.byteslice(0, offset) + new_data) end @@ -100,21 +98,19 @@ module Ci (start_offset...end_offset) end - def persisted? + def data_persisted? !redis? end - def persist! - in_lock(*lock_params) do - self.reload if self.persisted? - - unsafe_move_to!(self.class.persist_store) + def persist_data! + in_lock(*lock_params) do # Write opetation is atomic + unsafe_migrate_to!(self.class.persist_store) end end private - def unsafe_move_to!(new_store) + def unsafe_migrate_to!(new_store) return if data_store == new_store.to_s return unless size > 0 @@ -143,7 +139,7 @@ module Ci end def schedule_to_persist - return if persisted? + return if data_persisted? Ci::BuildTraceChunkFlushWorker.perform_async(id) end diff --git a/app/workers/ci/build_trace_chunk_flush_worker.rb b/app/workers/ci/build_trace_chunk_flush_worker.rb index 8e08ccbc414..49ff0f4ab94 100644 --- a/app/workers/ci/build_trace_chunk_flush_worker.rb +++ b/app/workers/ci/build_trace_chunk_flush_worker.rb @@ -5,7 +5,7 @@ module Ci def perform(build_trace_chunk_id) ::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk| - build_trace_chunk.persist! + build_trace_chunk.persist_data! end end end diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index f0c36067c87..79e0f1f20bf 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -286,8 +286,8 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end end - describe '#persist!' do - subject { build_trace_chunk.persist! } + describe '#persist_data!' do + subject { build_trace_chunk.persist_data! } context 'when data_store is redis' do let(:data_store) { :redis } -- cgit v1.2.1 From 24ba0989878a363c37d86758844a17a99fc7ae0c Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 25 Jun 2018 16:19:40 +0900 Subject: Added spec for build trace chunk --- app/models/ci/build_trace_chunk.rb | 21 +- app/services/concerns/exclusive_lease_lock.rb | 4 +- spec/models/ci/build_trace_chunk_spec.rb | 471 +++++++++++++++++--------- spec/support/helpers/stub_object_storage.rb | 5 + 4 files changed, 321 insertions(+), 180 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 59096f54f0b..8a34db798db 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -8,8 +8,6 @@ module Ci default_value_for :data_store, :redis - WriteError = Class.new(StandardError) - CHUNK_SIZE = 128.kilobytes WRITE_LOCK_RETRY = 10 WRITE_LOCK_SLEEP = 0.01.seconds @@ -65,6 +63,7 @@ module Ci end def truncate(offset = 0) + raise ArgumentError, 'Fog store does not support truncating' if fog? # If data is null, get_data returns Excon::Error::NotFound raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 return if offset == size # Skip the following process as it doesn't affect anything @@ -72,6 +71,8 @@ module Ci end def append(new_data, offset) + raise ArgumentError, 'Fog store does not support appending' if fog? # If data is null, get_data returns Excon::Error::NotFound + raise ArgumentError, 'New data is nil' unless new_data raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) @@ -98,21 +99,17 @@ module Ci (start_offset...end_offset) end - def data_persisted? - !redis? - end - def persist_data! in_lock(*lock_params) do # Write opetation is atomic - unsafe_migrate_to!(self.class.persist_store) + unsafe_persist_to!(self.class.persist_store) end end private - def unsafe_migrate_to!(new_store) + def unsafe_persist_to!(new_store) return if data_store == new_store.to_s - return unless size > 0 + raise ArgumentError, 'Can not persist empty data' unless size > 0 old_store_class = self.class.get_store_class(data_store) @@ -130,7 +127,7 @@ module Ci end def unsafe_set_data!(value) - raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE + raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE self.class.get_store_class(data_store).set_data(self, value) @data = value @@ -144,6 +141,10 @@ module Ci Ci::BuildTraceChunkFlushWorker.perform_async(id) end + def data_persisted? + !redis? + end + def full? size == CHUNK_SIZE end diff --git a/app/services/concerns/exclusive_lease_lock.rb b/app/services/concerns/exclusive_lease_lock.rb index 6c8bc25ea16..231cfd3e3c5 100644 --- a/app/services/concerns/exclusive_lease_lock.rb +++ b/app/services/concerns/exclusive_lease_lock.rb @@ -1,6 +1,8 @@ module ExclusiveLeaseLock extend ActiveSupport::Concern + FailedToObtainLockError = Class.new(StandardError) + def in_lock(key, ttl: 1.minute, retry_max: 10, sleep_sec: 0.01.seconds) lease = Gitlab::ExclusiveLease.new(key, timeout: ttl) retry_count = 0 @@ -12,7 +14,7 @@ module ExclusiveLeaseLock break if retry_max < (retry_count += 1) end - raise WriteError, 'Failed to obtain write lock' unless uuid + raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid return yield ensure diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 79e0f1f20bf..44eaf7afad3 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -12,6 +12,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do before do stub_feature_flags(ci_enable_live_trace: true) + stub_artifacts_object_storage end context 'FastDestroyAll' do @@ -42,154 +43,220 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data_store) { :redis } before do - build_trace_chunk.send(:redis_set_data, 'Sample data in redis') + build_trace_chunk.send(:unsafe_set_data!, 'Sample data in redis') end it { is_expected.to eq('Sample data in redis') } end context 'when data_store is database' do - let(:data_store) { :db } - let(:raw_data) { 'Sample data in db' } + let(:data_store) { :database } + let(:raw_data) { 'Sample data in database' } - it { is_expected.to eq('Sample data in db') } + it { is_expected.to eq('Sample data in database') } end - end - - describe '#set_data' do - subject { build_trace_chunk.send(:set_data, value) } - let(:value) { 'Sample data' } + context 'when data_store is fog' do + let(:data_store) { :fog } - context 'when value bytesize is bigger than CHUNK_SIZE' do - let(:value) { 'a' * (described_class::CHUNK_SIZE + 1) } + before do + ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| + connection.put_object('artifacts', "tmp/builds/#{build.id}/chunks/#{chunk_index}.log", 'Sample data in fog') + end + end - it { expect { subject }.to raise_error('too much data') } + it { is_expected.to eq('Sample data in fog') } end + end - context 'when data_store is redis' do - let(:data_store) { :redis } + describe '#append' do + subject { build_trace_chunk.append(new_data, offset) } - it do - expect(build_trace_chunk.send(:redis_data)).to be_nil + let(:new_data) { 'Sample new data' } + let(:offset) { 0 } + let(:merged_data) { data + new_data.to_s } - subject + shared_examples_for 'Appending correctly' do + context 'when offset is negative' do + let(:offset) { -1 } - expect(build_trace_chunk.send(:redis_data)).to eq(value) + it { expect { subject }.to raise_error('Offset is out of range') } end - context 'when fullfilled chunk size' do - let(:value) { 'a' * described_class::CHUNK_SIZE } - - it 'schedules stashing data' do - expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once + context 'when offset is bigger than data size' do + let(:offset) { data.bytesize + 1 } - subject - end + it { expect { subject }.to raise_error('Offset is out of range') } end - end - context 'when data_store is database' do - let(:data_store) { :db } + context 'when new data overflows chunk size' do + let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } - it 'sets data' do - expect(build_trace_chunk.raw_data).to be_nil + it { expect { subject }.to raise_error('Chunk size overflow') } + end - subject + context 'when offset is EOF' do + let(:offset) { data.bytesize } - expect(build_trace_chunk.raw_data).to eq(value) - expect(build_trace_chunk.persisted?).to be_truthy - end + it 'appends' do + subject - context 'when raw_data is not changed' do - it 'does not execute UPDATE' do - expect(build_trace_chunk.raw_data).to be_nil - build_trace_chunk.save! + expect(build_trace_chunk.data).to eq(merged_data) + end - # First set - expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0 - expect(build_trace_chunk.raw_data).to eq(value) - expect(build_trace_chunk.persisted?).to be_truthy + context 'when new_data is nil' do + let(:new_data) { nil } + + it 'raises an error' do + expect { subject }.to raise_error('New data is nil') + end + end - # Second set - build_trace_chunk.reload - expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0) + context 'when new_data is empty' do + let(:new_data) { '' } + + it 'does not append' do + subject + + expect(build_trace_chunk.data).to eq(data) + end + + it 'does not execute UPDATE' do + ActiveRecord::QueryRecorder.new { subject }.log.map do |query| + expect(query).not_to include('UPDATE') + end + end end end - context 'when fullfilled chunk size' do - it 'does not schedule stashing data' do - expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async) + context 'when offset is middle of datasize' do + let(:offset) { data.bytesize / 2 } + it 'appends' do subject + + expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) end end end - context 'when data_store is others' do - before do - build_trace_chunk.send(:write_attribute, :data_store, -1) - end + shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do + context 'when new data fullfilled chunk size' do + let(:new_data) { 'a' * described_class::CHUNK_SIZE } - it { expect { subject }.to raise_error('Unsupported data store') } - end - end + it 'schedules trace chunk flush worker' do + expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once - describe '#truncate' do - subject { build_trace_chunk.truncate(offset) } + subject + end - shared_examples_for 'truncates' do - context 'when offset is negative' do - let(:offset) { -1 } + it 'migrates data to object storage' do + Sidekiq::Testing.inline! do + subject - it { expect { subject }.to raise_error('Offset is out of range') } + build_trace_chunk.reload + expect(build_trace_chunk.fog?).to be_truthy + expect(build_trace_chunk.data).to eq(new_data) + end + end end + end - context 'when offset is bigger than data size' do - let(:offset) { data.bytesize + 1 } - - it { expect { subject }.to raise_error('Offset is out of range') } - end + shared_examples_for 'Scheduling no sidekiq worker' do + context 'when new data fullfilled chunk size' do + let(:new_data) { 'a' * described_class::CHUNK_SIZE } - context 'when offset is 10' do - let(:offset) { 10 } + it 'does not schedule trace chunk flush worker' do + expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async) - it 'truncates' do subject + end - expect(build_trace_chunk.data).to eq(data.byteslice(0, offset)) + it 'does not migrate data to object storage' do + Sidekiq::Testing.inline! do + data_store = build_trace_chunk.data_store + + subject + + build_trace_chunk.reload + expect(build_trace_chunk.data_store).to eq(data_store) + end end end end context 'when data_store is redis' do let(:data_store) { :redis } - let(:data) { 'Sample data in redis' } - before do - build_trace_chunk.send(:redis_set_data, data) + context 'when there are no data' do + let(:data) { '' } + + it 'has no data' do + expect(build_trace_chunk.data).to be_empty + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling sidekiq worker to flush data to persist store' end - it_behaves_like 'truncates' + context 'when there are some data' do + let(:data) { 'Sample data in redis' } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it 'has data' do + expect(build_trace_chunk.data).to eq(data) + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling sidekiq worker to flush data to persist store' + end end context 'when data_store is database' do - let(:data_store) { :db } - let(:raw_data) { 'Sample data in db' } - let(:data) { raw_data } + let(:data_store) { :database } - it_behaves_like 'truncates' + context 'when there are no data' do + let(:data) { '' } + + it 'has no data' do + expect(build_trace_chunk.data).to be_empty + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' + end + + context 'when there are some data' do + let(:raw_data) { 'Sample data in database' } + let(:data) { raw_data } + + it 'has data' do + expect(build_trace_chunk.data).to eq(data) + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' + end end - end - describe '#append' do - subject { build_trace_chunk.append(new_data, offset) } + context 'when data_store is fog' do + let(:data_store) { :fog } + let(:data) { '' } + let(:offset) { 0 } - let(:new_data) { 'Sample new data' } - let(:offset) { 0 } - let(:total_data) { data + new_data } + it 'can not append' do + expect { subject }.to raise_error('Fog store does not support appending') + end + end + end + + describe '#truncate' do + subject { build_trace_chunk.truncate(offset) } - shared_examples_for 'appends' do + shared_examples_for 'truncates' do context 'when offset is negative' do let(:offset) { -1 } @@ -202,29 +269,13 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do it { expect { subject }.to raise_error('Offset is out of range') } end - context 'when offset is bigger than data size' do - let(:new_data) { 'a' * (described_class::CHUNK_SIZE + 1) } - - it { expect { subject }.to raise_error('Chunk size overflow') } - end - - context 'when offset is EOF' do - let(:offset) { data.bytesize } - - it 'appends' do - subject - - expect(build_trace_chunk.data).to eq(total_data) - end - end - context 'when offset is 10' do let(:offset) { 10 } - it 'appends' do + it 'truncates' do subject - expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) + expect(build_trace_chunk.data).to eq(data.byteslice(0, offset)) end end end @@ -234,18 +285,28 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data) { 'Sample data in redis' } before do - build_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:unsafe_set_data!, data) end - it_behaves_like 'appends' + it_behaves_like 'truncates' end context 'when data_store is database' do - let(:data_store) { :db } - let(:raw_data) { 'Sample data in db' } + let(:data_store) { :database } + let(:raw_data) { 'Sample data in database' } let(:data) { raw_data } - it_behaves_like 'appends' + it_behaves_like 'truncates' + end + + context 'when data_store is fog' do + let(:data_store) { :fog } + let(:data) { '' } + let(:offset) { 0 } + + it 'can not truncate' do + expect { subject }.to raise_error('Fog store does not support truncating') + end end end @@ -259,7 +320,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data) { 'Sample data in redis' } before do - build_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:unsafe_set_data!, data) end it { is_expected.to eq(data.bytesize) } @@ -271,10 +332,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end context 'when data_store is database' do - let(:data_store) { :db } + let(:data_store) { :database } context 'when data exists' do - let(:raw_data) { 'Sample data in db' } + let(:raw_data) { 'Sample data in database' } let(:data) { raw_data } it { is_expected.to eq(data.bytesize) } @@ -284,6 +345,25 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do it { is_expected.to eq(0) } end end + + context 'when data_store is fog' do + let(:data_store) { :fog } + + context 'when data exists' do + let(:data) { 'Sample data in fog' } + let(:key) { "tmp/builds/#{build.id}/chunks/#{chunk_index}.log" } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it { is_expected.to eq(data.bytesize) } + end + + context 'when data does not exist' do + it { expect{ subject }.to raise_error(Excon::Error::NotFound) } + end + end end describe '#persist_data!' do @@ -296,93 +376,146 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data) { 'Sample data in redis' } before do - build_trace_chunk.send(:redis_set_data, data) + build_trace_chunk.send(:unsafe_set_data!, data) end - it 'stashes the data' do - expect(build_trace_chunk.data_store).to eq('redis') - expect(build_trace_chunk.send(:redis_data)).to eq(data) - expect(build_trace_chunk.raw_data).to be_nil + it 'persists the data' do + expect(build_trace_chunk.redis?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data) + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound) subject - expect(build_trace_chunk.data_store).to eq('db') - expect(build_trace_chunk.send(:redis_data)).to be_nil - expect(build_trace_chunk.raw_data).to eq(data) + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) end end context 'when data does not exist' do - it 'does not call UPDATE' do - expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) + it 'does not persist' do + expect { subject }.to raise_error('Can not persist empty data') end end end context 'when data_store is database' do - let(:data_store) { :db } - - it 'does not call UPDATE' do - expect(ActiveRecord::QueryRecorder.new { subject }.count).to eq(0) - end - end - end - - describe 'ExclusiveLock' do - before do - allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } - stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1) - end - - it 'raise an error' do - expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') - end - end - - describe 'deletes data in redis after a parent record destroyed' do - let(:project) { create(:project) } + let(:data_store) { :database } - before do - pipeline = create(:ci_pipeline, project: project) - create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - end + context 'when data exists' do + let(:data) { 'Sample data in database' } - shared_examples_for 'deletes all build_trace_chunk and data in redis' do - it do - Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3) + before do + build_trace_chunk.send(:unsafe_set_data!, data) end - expect(described_class.count).to eq(3) + it 'persists the data' do + expect(build_trace_chunk.database?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data) + expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound) - subject + subject - expect(described_class.count).to eq(0) + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) + end + end - Gitlab::Redis::SharedState.with do |redis| - expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0) + context 'when data does not exist' do + it 'does not persist' do + expect { subject }.to raise_error('Can not persist empty data') end end end - context 'when traces are archived' do - let(:subject) do - project.builds.each do |build| - build.success! + context 'when data_store is fog' do + let(:data_store) { :fog } + + context 'when data exists' do + let(:data) { 'Sample data in fog' } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) end - end - it_behaves_like 'deletes all build_trace_chunk and data in redis' - end + it 'does not change data store' do + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) - context 'when project is destroyed' do - let(:subject) do - project.destroy! - end + subject - it_behaves_like 'deletes all build_trace_chunk and data in redis' + expect(build_trace_chunk.fog?).to be_truthy + expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil + expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) + end + end end end + + ## TODO: + # describe 'ExclusiveLock' do + # before do + # allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } + # stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1) + # end + + # it 'raise an error' do + # expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') + # end + # end + + # describe 'deletes data in redis after a parent record destroyed' do + # let(:project) { create(:project) } + + # before do + # pipeline = create(:ci_pipeline, project: project) + # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + # end + + # shared_examples_for 'deletes all build_trace_chunk and data in redis' do + # it do + # Gitlab::Redis::SharedState.with do |redis| + # expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3) + # end + + # expect(described_class.count).to eq(3) + + # subject + + # expect(described_class.count).to eq(0) + + # Gitlab::Redis::SharedState.with do |redis| + # expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0) + # end + # end + # end + + # context 'when traces are archived' do + # let(:subject) do + # project.builds.each do |build| + # build.success! + # end + # end + + # it_behaves_like 'deletes all build_trace_chunk and data in redis' + # end + + # context 'when project is destroyed' do + # let(:subject) do + # project.destroy! + # end + + # it_behaves_like 'deletes all build_trace_chunk and data in redis' + # end + # end end diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb index bceaf8277ee..be122f9578c 100644 --- a/spec/support/helpers/stub_object_storage.rb +++ b/spec/support/helpers/stub_object_storage.rb @@ -20,6 +20,11 @@ module StubObjectStorage ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection| begin connection.directories.create(key: remote_directory) + + # Cleanup remaining files + connection.directories.each do |directory| + directory.files.map(&:destroy) + end rescue Excon::Error::Conflict end end -- cgit v1.2.1 From 82d98426854eb375bbe8ce0c830562e7c65a790a Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 25 Jun 2018 17:59:46 +0900 Subject: Enable specs for atomic operations --- spec/models/ci/build_trace_chunk_spec.rb | 152 ++++++++++++++++++------------- 1 file changed, 87 insertions(+), 65 deletions(-) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 44eaf7afad3..2b4dfba5c2a 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -60,9 +60,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:data_store) { :fog } before do - ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| - connection.put_object('artifacts', "tmp/builds/#{build.id}/chunks/#{chunk_index}.log", 'Sample data in fog') - end + build_trace_chunk.send(:unsafe_set_data!, 'Sample data in fog') end it { is_expected.to eq('Sample data in fog') } @@ -104,9 +102,23 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do expect(build_trace_chunk.data).to eq(merged_data) end + context 'when the other process is appending' do + let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" } + + it 'raise an error' do + begin + uuid = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.day).try_obtain + + expect { subject }.to raise_error('Failed to obtain a lock') + ensure + Gitlab::ExclusiveLease.cancel(lease_key, uuid) + end + end + end + context 'when new_data is nil' do let(:new_data) { nil } - + it 'raises an error' do expect { subject }.to raise_error('New data is nil') end @@ -114,10 +126,10 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do context 'when new_data is empty' do let(:new_data) { '' } - + it 'does not append' do subject - + expect(build_trace_chunk.data).to eq(data) end @@ -361,7 +373,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end context 'when data does not exist' do - it { expect{ subject }.to raise_error(Excon::Error::NotFound) } + it { expect { subject }.to raise_error(Excon::Error::NotFound) } end end end @@ -369,6 +381,22 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do describe '#persist_data!' do subject { build_trace_chunk.persist_data! } + shared_examples_for 'Atomic operation' do + context 'when the other process is persisting' do + let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" } + + it 'raise an error' do + begin + uuid = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.day).try_obtain + + expect { subject }.to raise_error('Failed to obtain a lock') + ensure + Gitlab::ExclusiveLease.cancel(lease_key, uuid) + end + end + end + end + context 'when data_store is redis' do let(:data_store) { :redis } @@ -392,6 +420,8 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) end + + it_behaves_like 'Atomic operation' end context 'when data does not exist' do @@ -424,6 +454,8 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) end + + it_behaves_like 'Atomic operation' end context 'when data does not exist' do @@ -456,66 +488,56 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data) end + + it_behaves_like 'Atomic operation' end end end - ## TODO: - # describe 'ExclusiveLock' do - # before do - # allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } - # stub_const('Ci::BuildTraceChunk::WRITE_LOCK_RETRY', 1) - # end - - # it 'raise an error' do - # expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') - # end - # end - - # describe 'deletes data in redis after a parent record destroyed' do - # let(:project) { create(:project) } - - # before do - # pipeline = create(:ci_pipeline, project: project) - # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - # create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) - # end - - # shared_examples_for 'deletes all build_trace_chunk and data in redis' do - # it do - # Gitlab::Redis::SharedState.with do |redis| - # expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3) - # end - - # expect(described_class.count).to eq(3) - - # subject - - # expect(described_class.count).to eq(0) - - # Gitlab::Redis::SharedState.with do |redis| - # expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0) - # end - # end - # end - - # context 'when traces are archived' do - # let(:subject) do - # project.builds.each do |build| - # build.success! - # end - # end - - # it_behaves_like 'deletes all build_trace_chunk and data in redis' - # end - - # context 'when project is destroyed' do - # let(:subject) do - # project.destroy! - # end - - # it_behaves_like 'deletes all build_trace_chunk and data in redis' - # end - # end + describe 'deletes data in redis after a parent record destroyed' do + let(:project) { create(:project) } + + before do + pipeline = create(:ci_pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) + end + + shared_examples_for 'deletes all build_trace_chunk and data in redis' do + it do + Gitlab::Redis::SharedState.with do |redis| + expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(3) + end + + expect(described_class.count).to eq(3) + + subject + + expect(described_class.count).to eq(0) + + Gitlab::Redis::SharedState.with do |redis| + expect(redis.scan_each(match: "gitlab:ci:trace:*:chunks:*").to_a.size).to eq(0) + end + end + end + + context 'when traces are archived' do + let(:subject) do + project.builds.each do |build| + build.success! + end + end + + it_behaves_like 'deletes all build_trace_chunk and data in redis' + end + + context 'when project is destroyed' do + let(:subject) do + project.destroy! + end + + it_behaves_like 'deletes all build_trace_chunk and data in redis' + end + end end -- cgit v1.2.1 From 44cc58765242afc2e035c2972447be2afae8d153 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 25 Jun 2018 19:46:38 +0900 Subject: Add specs for each data store --- spec/factories/ci/build_trace_chunks.rb | 58 ++++++++ spec/models/ci/build_trace_chunks/database_spec.rb | 105 +++++++++++++++ spec/models/ci/build_trace_chunks/fog_spec.rb | 146 +++++++++++++++++++++ spec/models/ci/build_trace_chunks/redis_spec.rb | 132 +++++++++++++++++++ 4 files changed, 441 insertions(+) create mode 100644 spec/models/ci/build_trace_chunks/database_spec.rb create mode 100644 spec/models/ci/build_trace_chunks/fog_spec.rb create mode 100644 spec/models/ci/build_trace_chunks/redis_spec.rb diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb index c0b9a25bfe8..e39b69b4bbd 100644 --- a/spec/factories/ci/build_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -3,5 +3,63 @@ FactoryBot.define do build factory: :ci_build chunk_index 0 data_store :redis + + trait :redis_with_data do + data_store :redis + + transient do + initial_data 'test data' + end + + after(:create) do |build_trace_chunk, evaluator| + Gitlab::Redis::SharedState.with do |redis| + redis.set( + "gitlab:ci:trace:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index.to_i}", + evaluator.initial_data, + ex: 1.day) + end + end + end + + trait :redis_without_data do + data_store :redis + end + + trait :database_with_data do + data_store :database + + transient do + initial_data 'test data' + end + + after(:build) do |build_trace_chunk, evaluator| + build_trace_chunk.raw_data = evaluator.initial_data + end + end + + trait :database_without_data do + data_store :database + end + + trait :fog_with_data do + data_store :fog + + transient do + initial_data 'test data' + end + + after(:create) do |build_trace_chunk, evaluator| + ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| + connection.put_object( + 'artifacts', + "tmp/builds/#{build_trace_chunk.build.id}/chunks/#{build_trace_chunk.chunk_index.to_i}.log", + evaluator.initial_data) + end + end + end + + trait :fog_without_data do + data_store :fog + end end end diff --git a/spec/models/ci/build_trace_chunks/database_spec.rb b/spec/models/ci/build_trace_chunks/database_spec.rb new file mode 100644 index 00000000000..d8fc9d57e95 --- /dev/null +++ b/spec/models/ci/build_trace_chunks/database_spec.rb @@ -0,0 +1,105 @@ +require 'spec_helper' + +describe Ci::BuildTraceChunks::Database do + let(:data_store) { described_class.new } + + describe '#available?' do + subject { data_store.available? } + + it { is_expected.to be_truthy } + end + + describe '#data' do + subject { data_store.data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') } + + it 'returns the data' do + is_expected.to eq('sample data in database') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :database_without_data) } + + it 'returns nil' do + is_expected.to be_nil + end + end + end + + describe '#set_data' do + subject { data_store.set_data(model, data) } + + let(:data) { 'abc123' } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') } + + it 'overwrites data' do + expect(data_store.data(model)).to eq('sample data in database') + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :database_without_data) } + + it 'sets new data' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + end + + describe '#delete_data' do + subject { data_store.delete_data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'sample data in database') } + + it 'deletes data' do + expect(data_store.data(model)).to eq('sample data in database') + + subject + + expect(data_store.data(model)).to be_nil + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :database_without_data) } + + it 'does nothing' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to be_nil + end + end + end + + describe '#keys' do + subject { data_store.keys(relation) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + + before do + create(:ci_build_trace_chunk, :database_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :database_with_data, chunk_index: 1, build: build) + end + + it 'returns empty array' do + is_expected.to eq([]) + end + end +end diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb new file mode 100644 index 00000000000..8f49190af13 --- /dev/null +++ b/spec/models/ci/build_trace_chunks/fog_spec.rb @@ -0,0 +1,146 @@ +require 'spec_helper' + +describe Ci::BuildTraceChunks::Fog do + let(:data_store) { described_class.new } + + before do + stub_artifacts_object_storage + end + + describe '#available?' do + subject { data_store.available? } + + context 'when object storage is enabled' do + it { is_expected.to be_truthy } + end + + context 'when object storage is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end + + it { is_expected.to be_falsy } + end + end + + describe '#data' do + subject { data_store.data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') } + + it 'returns the data' do + is_expected.to eq('sample data in fog') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :fog_without_data) } + + it 'returns nil' do + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + end + end + end + + describe '#set_data' do + subject { data_store.set_data(model, data) } + + let(:data) { 'abc123' } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') } + + it 'overwrites data' do + expect(data_store.data(model)).to eq('sample data in fog') + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :fog_without_data) } + + it 'sets new data' do + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + end + + describe '#delete_data' do + subject { data_store.delete_data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') } + + it 'deletes data' do + expect(data_store.data(model)).to eq('sample data in fog') + + subject + + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :fog_without_data) } + + it 'does nothing' do + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + + subject + + expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound) + end + end + end + + describe '#keys' do + subject { data_store.keys(relation) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + + before do + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build) + end + + it 'returns keys' do + is_expected.to eq([[build.id, 0], [build.id, 1]]) + end + end + + describe '#delete_keys' do + subject { data_store.delete_keys(keys) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + let(:keys) { data_store.keys(relation) } + + before do + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build) + end + + it 'deletes multiple data' do + ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| + expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present + expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present + end + + subject + + ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| + expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound) + expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound) + end + end + end +end diff --git a/spec/models/ci/build_trace_chunks/redis_spec.rb b/spec/models/ci/build_trace_chunks/redis_spec.rb new file mode 100644 index 00000000000..9da1e6a95ee --- /dev/null +++ b/spec/models/ci/build_trace_chunks/redis_spec.rb @@ -0,0 +1,132 @@ +require 'spec_helper' + +describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do + let(:data_store) { described_class.new } + + describe '#available?' do + subject { data_store.available? } + + it { is_expected.to be_truthy } + end + + describe '#data' do + subject { data_store.data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') } + + it 'returns the data' do + is_expected.to eq('sample data in redis') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :redis_without_data) } + + it 'returns nil' do + is_expected.to be_nil + end + end + end + + describe '#set_data' do + subject { data_store.set_data(model, data) } + + let(:data) { 'abc123' } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') } + + it 'overwrites data' do + expect(data_store.data(model)).to eq('sample data in redis') + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :redis_without_data) } + + it 'sets new data' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to eq('abc123') + end + end + end + + describe '#delete_data' do + subject { data_store.delete_data(model) } + + context 'when data exists' do + let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'sample data in redis') } + + it 'deletes data' do + expect(data_store.data(model)).to eq('sample data in redis') + + subject + + expect(data_store.data(model)).to be_nil + end + end + + context 'when data does not exist' do + let(:model) { create(:ci_build_trace_chunk, :redis_without_data) } + + it 'does nothing' do + expect(data_store.data(model)).to be_nil + + subject + + expect(data_store.data(model)).to be_nil + end + end + end + + describe '#keys' do + subject { data_store.keys(relation) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + + before do + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build) + end + + it 'returns keys' do + is_expected.to eq([[build.id, 0], [build.id, 1]]) + end + end + + describe '#delete_keys' do + subject { data_store.delete_keys(keys) } + + let(:build) { create(:ci_build) } + let(:relation) { build.trace_chunks } + let(:keys) { data_store.keys(relation) } + + before do + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 0, build: build) + create(:ci_build_trace_chunk, :redis_with_data, chunk_index: 1, build: build) + end + + it 'deletes multiple data' do + Gitlab::Redis::SharedState.with do |redis| + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_truthy + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_truthy + end + + subject + + Gitlab::Redis::SharedState.with do |redis| + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_falsy + expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_falsy + end + end + end +end -- cgit v1.2.1 From 58a1a0b70c7df0947864d0be933faf0153b537ec Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Mon, 25 Jun 2018 19:59:28 +0900 Subject: Support append/truncate for fog store --- app/models/ci/build_trace_chunk.rb | 4 ++-- spec/models/ci/build_trace_chunk_spec.rb | 39 ++++++++++++++++++++++++-------- 2 files changed, 32 insertions(+), 11 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 8a34db798db..4362570b5ee 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -63,7 +63,6 @@ module Ci end def truncate(offset = 0) - raise ArgumentError, 'Fog store does not support truncating' if fog? # If data is null, get_data returns Excon::Error::NotFound raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 return if offset == size # Skip the following process as it doesn't affect anything @@ -71,7 +70,6 @@ module Ci end def append(new_data, offset) - raise ArgumentError, 'Fog store does not support appending' if fog? # If data is null, get_data returns Excon::Error::NotFound raise ArgumentError, 'New data is nil' unless new_data raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) @@ -124,6 +122,8 @@ module Ci def get_data self.class.get_store_class(data_store).data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default + rescue Excon::Error::NotFound + # If the data store is :fog and the file does not exist in the object storage, this method returns nil. end def unsafe_set_data!(value) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 2b4dfba5c2a..94a5fe8e5f8 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -256,11 +256,31 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do context 'when data_store is fog' do let(:data_store) { :fog } - let(:data) { '' } - let(:offset) { 0 } - it 'can not append' do - expect { subject }.to raise_error('Fog store does not support appending') + context 'when there are no data' do + let(:data) { '' } + + it 'has no data' do + expect(build_trace_chunk.data).to be_empty + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' + end + + context 'when there are some data' do + let(:data) { 'Sample data in fog' } + + before do + build_trace_chunk.send(:unsafe_set_data!, data) + end + + it 'has data' do + expect(build_trace_chunk.data).to eq(data) + end + + it_behaves_like 'Appending correctly' + it_behaves_like 'Scheduling no sidekiq worker' end end end @@ -313,12 +333,13 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do context 'when data_store is fog' do let(:data_store) { :fog } - let(:data) { '' } - let(:offset) { 0 } + let(:data) { 'Sample data in fog' } - it 'can not truncate' do - expect { subject }.to raise_error('Fog store does not support truncating') + before do + build_trace_chunk.send(:unsafe_set_data!, data) end + + it_behaves_like 'truncates' end end @@ -373,7 +394,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end context 'when data does not exist' do - it { expect { subject }.to raise_error(Excon::Error::NotFound) } + it { is_expected.to eq(0) } end end end -- cgit v1.2.1 From 86251e74c54e837b1447deba6c53306c2a38c17d Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Jul 2018 13:31:32 +0900 Subject: Fix documents --- doc/administration/job_traces.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/administration/job_traces.md b/doc/administration/job_traces.md index f1c5b194f4c..24d1a3fd151 100644 --- a/doc/administration/job_traces.md +++ b/doc/administration/job_traces.md @@ -77,10 +77,10 @@ cloud-native, for example on Kubernetes. The data flow is the same as described in the [data flow section](#data-flow) with one change: _the stored path of the first two phases is different_. This new live -trace architecture stores chunks of traces in Redis and the database instead of +trace architecture stores chunks of traces in Redis and a persistent store (object storage or database) instead of file storage. Redis is used as first-class storage, and it stores up-to 128KB -of data. Once the full chunk is sent, it is flushed to database. After a while, -the data in Redis and database will be archived to [object storage](#uploading-traces-to-object-storage). +of data. Once the full chunk is sent, it is flushed a persistent store, either object storage(temporary directory) or database. +After a while, the data in Redis and a persitent store will be archived to [object storage](#uploading-traces-to-object-storage). The data are stored in the following Redis namespace: `Gitlab::Redis::SharedState`. @@ -89,11 +89,11 @@ Here is the detailed data flow: 1. GitLab Runner picks a job from GitLab 1. GitLab Runner sends a piece of trace to GitLab 1. GitLab appends the data to Redis -1. Once the data in Redis reach 128KB, the data is flushed to the database. +1. Once the data in Redis reach 128KB, the data is flushed to a persistent store (object storage or the database). 1. The above steps are repeated until the job is finished. 1. Once the job is finished, GitLab schedules a Sidekiq worker to archive the trace. 1. The Sidekiq worker archives the trace to object storage and cleans up the trace - in Redis and the database. + in Redis and a persistent store (object storage or the database). ### Enabling live trace -- cgit v1.2.1 From b223f7b7a081be31cf5cc6026decad13bd79c813 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Jul 2018 14:33:11 +0900 Subject: Rename persistable_store instead of persist_store --- app/models/ci/build_trace_chunk.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 4362570b5ee..e7c56b94751 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -26,7 +26,7 @@ module Ci @all_stores ||= self.data_stores.keys end - def persist_store + def persistable_store # get first available store from the back of the list all_stores.reverse.find { |store| get_store_class(store).available? } end @@ -99,7 +99,7 @@ module Ci def persist_data! in_lock(*lock_params) do # Write opetation is atomic - unsafe_persist_to!(self.class.persist_store) + unsafe_persist_to!(self.class.persistable_store) end end -- cgit v1.2.1 From 902e69dedd1b4c60ce109c346b65c5e61a46ff4a Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Jul 2018 14:48:00 +0900 Subject: Fix error message --- app/models/ci/build_trace_chunk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index e7c56b94751..b724d0cb517 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -70,7 +70,7 @@ module Ci end def append(new_data, offset) - raise ArgumentError, 'New data is nil' unless new_data + raise ArgumentError, 'New data is missing' unless new_data raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0 raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) -- cgit v1.2.1 From 53234295e587a5b55dcb9417a869642ba0c266aa Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Jul 2018 14:52:01 +0900 Subject: Rename retries and remove retry_max --- app/services/concerns/exclusive_lease_lock.rb | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/app/services/concerns/exclusive_lease_lock.rb b/app/services/concerns/exclusive_lease_lock.rb index 231cfd3e3c5..8da54b0d15d 100644 --- a/app/services/concerns/exclusive_lease_lock.rb +++ b/app/services/concerns/exclusive_lease_lock.rb @@ -3,15 +3,14 @@ module ExclusiveLeaseLock FailedToObtainLockError = Class.new(StandardError) - def in_lock(key, ttl: 1.minute, retry_max: 10, sleep_sec: 0.01.seconds) + def in_lock(key, ttl: 1.minute, retries: 10, sleep_sec: 0.01.seconds) lease = Gitlab::ExclusiveLease.new(key, timeout: ttl) - retry_count = 0 until uuid = lease.try_obtain # Keep trying until we obtain the lease. To prevent hammering Redis too # much we'll wait for a bit. sleep(sleep_sec) - break if retry_max < (retry_count += 1) + break if (retries -= 1) < 0 end raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid -- cgit v1.2.1 From 02e3a624d29df1736a170a1484b5686f6dfe24d5 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Jul 2018 14:57:07 +0900 Subject: Simplified factory traits --- spec/factories/ci/build_trace_chunks.rb | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb index e39b69b4bbd..3aeb7e4af02 100644 --- a/spec/factories/ci/build_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -12,12 +12,7 @@ FactoryBot.define do end after(:create) do |build_trace_chunk, evaluator| - Gitlab::Redis::SharedState.with do |redis| - redis.set( - "gitlab:ci:trace:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index.to_i}", - evaluator.initial_data, - ex: 1.day) - end + Ci::BuildTraceChunk::Redis.new.set_data(build_trace_chunk, evaluator.initial_data) end end @@ -33,7 +28,7 @@ FactoryBot.define do end after(:build) do |build_trace_chunk, evaluator| - build_trace_chunk.raw_data = evaluator.initial_data + Ci::BuildTraceChunk::Database.new.set_data(build_trace_chunk, evaluator.initial_data) end end @@ -49,12 +44,7 @@ FactoryBot.define do end after(:create) do |build_trace_chunk, evaluator| - ::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection| - connection.put_object( - 'artifacts', - "tmp/builds/#{build_trace_chunk.build.id}/chunks/#{build_trace_chunk.chunk_index.to_i}.log", - evaluator.initial_data) - end + Ci::BuildTraceChunk::Fog.new.set_data(build_trace_chunk, evaluator.initial_data) end end -- cgit v1.2.1 From 93a964d449ad29e94e785209d7ecde217a8e9b25 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Tue, 3 Jul 2018 16:20:27 +0900 Subject: Add spec for ExclusiveLeaseHelpers --- app/models/ci/build_trace_chunk.rb | 2 +- app/services/concerns/exclusive_lease_lock.rb | 22 ------- lib/gitlab/exclusive_lease_helpers.rb | 29 ++++++++++ spec/lib/gitlab/exclusive_lease_helpers_spec.rb | 76 +++++++++++++++++++++++++ 4 files changed, 106 insertions(+), 23 deletions(-) delete mode 100644 app/services/concerns/exclusive_lease_lock.rb create mode 100644 lib/gitlab/exclusive_lease_helpers.rb create mode 100644 spec/lib/gitlab/exclusive_lease_helpers_spec.rb diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index b724d0cb517..43bede7638c 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -1,7 +1,7 @@ module Ci class BuildTraceChunk < ActiveRecord::Base include FastDestroyAll - include ExclusiveLeaseLock + include ::Gitlab::ExclusiveLeaseHelpers extend Gitlab::Ci::Model belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id diff --git a/app/services/concerns/exclusive_lease_lock.rb b/app/services/concerns/exclusive_lease_lock.rb deleted file mode 100644 index 8da54b0d15d..00000000000 --- a/app/services/concerns/exclusive_lease_lock.rb +++ /dev/null @@ -1,22 +0,0 @@ -module ExclusiveLeaseLock - extend ActiveSupport::Concern - - FailedToObtainLockError = Class.new(StandardError) - - def in_lock(key, ttl: 1.minute, retries: 10, sleep_sec: 0.01.seconds) - lease = Gitlab::ExclusiveLease.new(key, timeout: ttl) - - until uuid = lease.try_obtain - # Keep trying until we obtain the lease. To prevent hammering Redis too - # much we'll wait for a bit. - sleep(sleep_sec) - break if (retries -= 1) < 0 - end - - raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid - - return yield - ensure - Gitlab::ExclusiveLease.cancel(key, uuid) - end -end diff --git a/lib/gitlab/exclusive_lease_helpers.rb b/lib/gitlab/exclusive_lease_helpers.rb new file mode 100644 index 00000000000..ab6838adc6d --- /dev/null +++ b/lib/gitlab/exclusive_lease_helpers.rb @@ -0,0 +1,29 @@ +module Gitlab + # This module provides helper methods which are intregrated with GitLab::ExclusiveLease + module ExclusiveLeaseHelpers + FailedToObtainLockError = Class.new(StandardError) + + ## + # This helper method blocks a process/thread until the other process cancel the obrainted lease key. + # + # Note: It's basically discouraged to use this method in the unicorn's thread, + # because it holds the connection until all `retries` is consumed. + # This could potentially eat up all connection pools. + def in_lock(key, ttl: 1.minute, retries: 10, sleep_sec: 0.01.seconds) + lease = Gitlab::ExclusiveLease.new(key, timeout: ttl) + + until uuid = lease.try_obtain + # Keep trying until we obtain the lease. To prevent hammering Redis too + # much we'll wait for a bit. + sleep(sleep_sec) + break if (retries -= 1) < 0 + end + + raise FailedToObtainLockError, 'Failed to obtain a lock' unless uuid + + return yield + ensure + Gitlab::ExclusiveLease.cancel(key, uuid) + end + end +end diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb new file mode 100644 index 00000000000..eb2143f9d1d --- /dev/null +++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb @@ -0,0 +1,76 @@ +require 'spec_helper' + +describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do + include ::ExclusiveLeaseHelpers + + let(:class_instance) { (Class.new { include ::Gitlab::ExclusiveLeaseHelpers }).new } + let(:unique_key) { SecureRandom.hex(10) } + + describe '#in_lock' do + subject { class_instance.in_lock(unique_key, **options) { } } + + let(:options) { { } } + + context 'when the lease is not obtained yet' do + before do + stub_exclusive_lease(unique_key, 'uuid') + end + + it 'calls the given block' do + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + end + + it 'calls the given block continuously' do + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_control.once + end + + it 'cancels the exclusive lease after the block' do + expect_to_cancel_exclusive_lease(unique_key, 'uuid') + + subject + end + end + + context 'when the lease is obtained already' do + let!(:lease) { stub_exclusive_lease_taken(unique_key) } + + it 'retries to obtain a lease and raises an error' do + expect(lease).to receive(:try_obtain).exactly(11).times + + expect { subject }.to raise_error('Failed to obtain a lock') + end + + context 'when ttl is specified' do + let(:options) { { ttl: 10.minute } } + + it 'receives the specified argument' do + expect(Gitlab::ExclusiveLease).to receive(:new).with(unique_key, { timeout: 10.minute } ) + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + + context 'when retry count is specified' do + let(:options) { { retries: 3 } } + + it 'retries for the specified times' do + expect(lease).to receive(:try_obtain).exactly(4).times + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + + context 'when sleep second is specified' do + let(:options) { { retries: 0, sleep_sec: 0.05.second } } + + it 'receives the specified argument' do + expect(class_instance).to receive(:sleep).with(0.05.second).once + + expect { subject }.to raise_error('Failed to obtain a lock') + end + end + end + end +end -- cgit v1.2.1 From 202c2c6aef9a15c50921f3fe2c2166384b8dd321 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 4 Jul 2018 13:28:18 +0900 Subject: Fix static analysys --- spec/lib/gitlab/exclusive_lease_helpers_spec.rb | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb index eb2143f9d1d..2e3656b52fb 100644 --- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb +++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb @@ -9,7 +9,7 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do describe '#in_lock' do subject { class_instance.in_lock(unique_key, **options) { } } - let(:options) { { } } + let(:options) { {} } context 'when the lease is not obtained yet' do before do @@ -43,11 +43,11 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do end context 'when ttl is specified' do - let(:options) { { ttl: 10.minute } } + let(:options) { { ttl: 10.minutes } } it 'receives the specified argument' do - expect(Gitlab::ExclusiveLease).to receive(:new).with(unique_key, { timeout: 10.minute } ) - + expect(Gitlab::ExclusiveLease).to receive(:new).with(unique_key, { timeout: 10.minutes } ) + expect { subject }.to raise_error('Failed to obtain a lock') end end @@ -57,17 +57,17 @@ describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state do it 'retries for the specified times' do expect(lease).to receive(:try_obtain).exactly(4).times - + expect { subject }.to raise_error('Failed to obtain a lock') end end context 'when sleep second is specified' do - let(:options) { { retries: 0, sleep_sec: 0.05.second } } + let(:options) { { retries: 0, sleep_sec: 0.05.seconds } } it 'receives the specified argument' do - expect(class_instance).to receive(:sleep).with(0.05.second).once - + expect(class_instance).to receive(:sleep).with(0.05.seconds).once + expect { subject }.to raise_error('Failed to obtain a lock') end end -- cgit v1.2.1 From 7b06877330201deeef91fc4d1d1fedeb599c26ec Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 4 Jul 2018 13:29:47 +0900 Subject: Rename retry_max left overs to retries --- app/models/ci/build_trace_chunk.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb index 43bede7638c..b442de34061 100644 --- a/app/models/ci/build_trace_chunk.rb +++ b/app/models/ci/build_trace_chunk.rb @@ -152,7 +152,7 @@ module Ci def lock_params ["trace_write:#{build_id}:chunks:#{chunk_index}", { ttl: WRITE_LOCK_TTL, - retry_max: WRITE_LOCK_RETRY, + retries: WRITE_LOCK_RETRY, sleep_sec: WRITE_LOCK_SLEEP }] end end -- cgit v1.2.1 From 1f1ef460ca3a02597b4e37af67251f3824cf4ea8 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 4 Jul 2018 13:30:45 +0900 Subject: Decouple optimization for erasing trace --- app/models/ci/build.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 8c90232405e..41446946a5e 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -385,7 +385,7 @@ module Ci end def erase_old_trace! - update_column(:trace, nil) if old_trace + update_column(:trace, nil) end def needs_touch? -- cgit v1.2.1 From 4c0c7ed32d2d177735ad430bb6b22449ddf0a4d0 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 4 Jul 2018 14:29:37 +0900 Subject: Make explicit chunks store classes namespace in factroy --- spec/factories/ci/build_trace_chunks.rb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb index 3aeb7e4af02..ebec85ebf46 100644 --- a/spec/factories/ci/build_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -12,7 +12,7 @@ FactoryBot.define do end after(:create) do |build_trace_chunk, evaluator| - Ci::BuildTraceChunk::Redis.new.set_data(build_trace_chunk, evaluator.initial_data) + ::Ci::BuildTraceChunk::Redis.new.set_data(build_trace_chunk, evaluator.initial_data) end end @@ -28,7 +28,7 @@ FactoryBot.define do end after(:build) do |build_trace_chunk, evaluator| - Ci::BuildTraceChunk::Database.new.set_data(build_trace_chunk, evaluator.initial_data) + ::Ci::BuildTraceChunk::Database.new.set_data(build_trace_chunk, evaluator.initial_data) end end @@ -44,7 +44,7 @@ FactoryBot.define do end after(:create) do |build_trace_chunk, evaluator| - Ci::BuildTraceChunk::Fog.new.set_data(build_trace_chunk, evaluator.initial_data) + ::Ci::BuildTraceChunk::Fog.new.set_data(build_trace_chunk, evaluator.initial_data) end end -- cgit v1.2.1 From b025e89e08888f3b393108f984a25c209526491b Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Wed, 4 Jul 2018 15:19:22 +0900 Subject: Fix spec --- spec/factories/ci/build_trace_chunks.rb | 6 +++--- spec/models/ci/build_trace_chunk_spec.rb | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb index ebec85ebf46..3e8e2736423 100644 --- a/spec/factories/ci/build_trace_chunks.rb +++ b/spec/factories/ci/build_trace_chunks.rb @@ -12,7 +12,7 @@ FactoryBot.define do end after(:create) do |build_trace_chunk, evaluator| - ::Ci::BuildTraceChunk::Redis.new.set_data(build_trace_chunk, evaluator.initial_data) + Ci::BuildTraceChunks::Redis.new.set_data(build_trace_chunk, evaluator.initial_data) end end @@ -28,7 +28,7 @@ FactoryBot.define do end after(:build) do |build_trace_chunk, evaluator| - ::Ci::BuildTraceChunk::Database.new.set_data(build_trace_chunk, evaluator.initial_data) + Ci::BuildTraceChunks::Database.new.set_data(build_trace_chunk, evaluator.initial_data) end end @@ -44,7 +44,7 @@ FactoryBot.define do end after(:create) do |build_trace_chunk, evaluator| - ::Ci::BuildTraceChunk::Fog.new.set_data(build_trace_chunk, evaluator.initial_data) + Ci::BuildTraceChunks::Fog.new.set_data(build_trace_chunk, evaluator.initial_data) end end diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index 97a5de47b37..c0980328070 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -120,7 +120,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do let(:new_data) { nil } it 'raises an error' do - expect { subject }.to raise_error('New data is nil') + expect { subject }.to raise_error('New data is missing') end end -- cgit v1.2.1 From 21399fbc31083dce37a37980668c408520a136e0 Mon Sep 17 00:00:00 2001 From: Shinya Maeda Date: Fri, 6 Jul 2018 14:47:43 +0900 Subject: Add spec for the ordering of the data stores --- spec/models/ci/build_trace_chunk_spec.rb | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb index c0980328070..774a638b430 100644 --- a/spec/models/ci/build_trace_chunk_spec.rb +++ b/spec/models/ci/build_trace_chunk_spec.rb @@ -38,6 +38,22 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do end end + describe '.all_stores' do + subject { described_class.all_stores } + + it 'returns a correctly ordered array' do + is_expected.to eq(%w[redis database fog]) + end + + it 'returns redis store as the the lowest precedence' do + expect(subject.first).to eq('redis') + end + + it 'returns fog store as the the highest precedence' do + expect(subject.last).to eq('fog') + end + end + describe '#data' do subject { build_trace_chunk.data } -- cgit v1.2.1