summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/factories/ci/job_artifacts.rb6
-rw-r--r--spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb156
-rw-r--r--spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb73
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb47
4 files changed, 282 insertions, 0 deletions
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 46aaaf6aa5d..f028803ca74 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -24,6 +24,12 @@ FactoryBot.define do
end
end
+ trait :legacy_archive do
+ archive
+
+ file_location :legacy_path
+ end
+
trait :metadata do
file_type :metadata
file_format :gzip
diff --git a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
new file mode 100644
index 00000000000..2d1505dacfe
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
@@ -0,0 +1,156 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, :migration, schema: 20180816161409 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:jobs) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+
+ subject { described_class.new.perform(*range) }
+
+ context 'when a pipeline exists' do
+ let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let!(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
+ let!(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
+
+ context 'when a legacy artifacts exists' do
+ let(:artifacts_expire_at) { 1.day.since.to_s }
+ let(:file_store) { ::ObjectStorage::Store::REMOTE }
+
+ let!(:job) do
+ jobs.create!(
+ commit_id: pipeline.id,
+ project_id: project.id,
+ status: :success,
+ **artifacts_archive_attributes,
+ **artifacts_metadata_attributes)
+ end
+
+ let(:artifacts_archive_attributes) do
+ {
+ artifacts_file: 'archive.zip',
+ artifacts_file_store: file_store,
+ artifacts_size: 123,
+ artifacts_expire_at: artifacts_expire_at
+ }
+ end
+
+ let(:artifacts_metadata_attributes) do
+ {
+ artifacts_metadata: 'metadata.gz',
+ artifacts_metadata_store: file_store
+ }
+ end
+
+ it 'has legacy artifacts' do
+ expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([artifacts_archive_attributes.values])
+ expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([artifacts_metadata_attributes.values])
+ end
+
+ it 'does not have new artifacts yet' do
+ expect(job_artifacts.count).to be_zero
+ end
+
+ context 'when the record exists inside of the range of a background migration' do
+ let(:range) { [job.id, job.id] }
+
+ it 'migrates a legacy artifact to ci_job_artifacts table' do
+ expect { subject }.to change { job_artifacts.count }.by(2)
+
+ expect(job_artifacts.order(:id).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
+ .to eq([[project.id,
+ job.id,
+ described_class::ARCHIVE_FILE_TYPE,
+ file_store,
+ artifacts_archive_attributes[:artifacts_size],
+ artifacts_expire_at,
+ 'archive.zip',
+ nil,
+ described_class::LEGACY_PATH_FILE_LOCATION],
+ [project.id,
+ job.id,
+ described_class::METADATA_FILE_TYPE,
+ file_store,
+ nil,
+ artifacts_expire_at,
+ 'metadata.gz',
+ nil,
+ described_class::LEGACY_PATH_FILE_LOCATION]])
+
+ expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, artifacts_expire_at]])
+ expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
+ end
+
+ context 'when file_store is nil' do
+ let(:file_store) { nil }
+
+ it 'has nullified file_store in all legacy artifacts' do
+ expect(jobs.pluck('artifacts_file_store, artifacts_metadata_store')).to eq([[nil, nil]])
+ end
+
+ it 'fills file_store by the value of local file store' do
+ subject
+
+ expect(job_artifacts.pluck('file_store')).to all(eq(::ObjectStorage::Store::LOCAL))
+ end
+ end
+
+ context 'when new artifacts has already existed' do
+ context 'when only archive.zip existed' do
+ before do
+ job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
+ end
+
+ it 'had archive.zip already' do
+ expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE)).to be_truthy
+ end
+
+ it 'migrates metadata' do
+ expect { subject }.to change { job_artifacts.count }.by(1)
+
+ expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::METADATA_FILE_TYPE)).to be_truthy
+ end
+ end
+
+ context 'when both archive and metadata existed' do
+ before do
+ job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
+ job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::METADATA_FILE_TYPE, size: 999, file: 'metadata.zip')
+ end
+
+ it 'does not migrate' do
+ expect { subject }.not_to change { job_artifacts.count }
+ end
+ end
+ end
+ end
+
+ context 'when the record exists outside of the range of a background migration' do
+ let(:range) { [job.id + 1, job.id + 1] }
+
+ it 'does not migrate' do
+ expect { subject }.not_to change { job_artifacts.count }
+ end
+ end
+ end
+
+ context 'when the job does not have legacy artifacts' do
+ let!(:job) { jobs.create!(commit_id: pipeline.id, project_id: project.id, status: :success) }
+
+ it 'does not have the legacy artifacts in database' do
+ expect(jobs.count).to eq(1)
+ expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, nil]])
+ expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
+ end
+
+ context 'when the record exists inside of the range of a background migration' do
+ let(:range) { [job.id, job.id] }
+
+ it 'does not migrate' do
+ expect { subject }.not_to change { job_artifacts.count }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
new file mode 100644
index 00000000000..df82672f254
--- /dev/null
+++ b/spec/migrations/migrate_legacy_artifacts_to_job_artifacts_spec.rb
@@ -0,0 +1,73 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20180816161409_migrate_legacy_artifacts_to_job_artifacts.rb')
+
+describe MigrateLegacyArtifactsToJobArtifacts, :migration, :sidekiq do
+ let(:migration_class) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines) }
+ let(:jobs) { table(:ci_builds) }
+ let(:job_artifacts) { table(:ci_job_artifacts) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
+ let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
+ let(:archive_file_type) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::ARCHIVE_FILE_TYPE }
+ let(:metadata_file_type) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::METADATA_FILE_TYPE }
+ let(:local_store) { ::ObjectStorage::Store::LOCAL }
+ let(:remote_store) { ::ObjectStorage::Store::REMOTE }
+ let(:legacy_location) { Gitlab::BackgroundMigration::MigrateLegacyArtifacts::LEGACY_PATH_FILE_LOCATION }
+
+ context 'when legacy artifacts exist' do
+ before do
+ jobs.create!(id: 1, commit_id: pipeline.id, project_id: project.id, status: :success, artifacts_file: 'archive.zip')
+ jobs.create!(id: 2, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_metadata: 'metadata.gz')
+ jobs.create!(id: 3, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_file: 'archive.zip', artifacts_metadata: 'metadata.gz')
+ jobs.create!(id: 4, commit_id: pipeline.id, project_id: project.id, status: :running)
+ jobs.create!(id: 5, commit_id: pipeline.id, project_id: project.id, status: :success, artifacts_file: 'archive.zip', artifacts_file_store: remote_store, artifacts_metadata: 'metadata.gz')
+ jobs.create!(id: 6, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_file: 'archive.zip', artifacts_metadata: 'metadata.gz')
+ end
+
+ it 'schedules a background migration' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(migration_name).to be_scheduled_delayed_migration(5.minutes, 1, 6)
+ expect(BackgroundMigrationWorker.jobs.size).to eq 1
+ end
+ end
+ end
+
+ it 'migrates legacy artifacts to ci_job_artifacts table' do
+ migrate!
+
+ expect(job_artifacts.order(:job_id, :file_type).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
+ .to eq([[project.id, 1, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 3, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 3, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location],
+ [project.id, 5, archive_file_type, remote_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 5, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location],
+ [project.id, 6, archive_file_type, local_store, nil, nil, 'archive.zip', nil, legacy_location],
+ [project.id, 6, metadata_file_type, local_store, nil, nil, 'metadata.gz', nil, legacy_location]])
+ end
+ end
+
+ context 'when legacy artifacts do not exist' do
+ before do
+ jobs.create!(id: 1, commit_id: pipeline.id, project_id: project.id, status: :success)
+ jobs.create!(id: 2, commit_id: pipeline.id, project_id: project.id, status: :failed, artifacts_metadata: 'metadata.gz')
+ end
+
+ it 'does not schedule background migrations' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq 0
+ end
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 3ad5fe7e3b3..061432f082a 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -40,6 +40,53 @@ describe JobArtifactUploader do
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
+ describe '#dynamic_segment' do
+ let(:uploaded_content) { File.binread(Rails.root + 'spec/fixtures/ci_build_artifacts.zip') }
+ let(:model) { uploader.model }
+
+ shared_examples_for 'Read file from legacy path' do
+ it 'store_path returns the legacy path' do
+ expect(model.file.store_path).to eq(File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.job_id.to_s, 'ci_build_artifacts.zip'))
+ end
+
+ it 'has exactly the same content' do
+ expect(::File.binread(model.file.path)).to eq(uploaded_content)
+ end
+ end
+
+ shared_examples_for 'Read file from hashed path' do
+ it 'store_path returns hashed path' do
+ expect(model.file.store_path).to eq(File.join(disk_hash[0..1], disk_hash[2..3], disk_hash, creation_date, model.job_id.to_s, model.id.to_s, 'ci_build_artifacts.zip'))
+ end
+
+ it 'has exactly the same content' do
+ expect(::File.binread(model.file.path)).to eq(uploaded_content)
+ end
+ end
+
+ context 'when a job artifact is stored in legacy_path' do
+ let(:job_artifact) { create(:ci_job_artifact, :legacy_archive) }
+
+ it_behaves_like 'Read file from legacy path'
+ end
+
+ context 'when the artifact file is stored in hashed_path' do
+ let(:job_artifact) { create(:ci_job_artifact, :archive) }
+ let(:disk_hash) { Digest::SHA2.hexdigest(model.project_id.to_s) }
+ let(:creation_date) { model.created_at.utc.strftime('%Y_%m_%d') }
+
+ it_behaves_like 'Read file from hashed path'
+
+ context 'when file_location column is empty' do
+ before do
+ job_artifact.update_column(:file_location, nil)
+ end
+
+ it_behaves_like 'Read file from hashed path'
+ end
+ end
+ end
+
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload('spec/fixtures/trace/sample_trace'))