summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMicaël Bergeron <mbergeron@gitlab.com>2018-03-02 13:21:03 -0500
committerMicaël Bergeron <mbergeron@gitlab.com>2018-03-02 13:21:03 -0500
commit4484587ef8c9bf63c60359a0980dda95e854b744 (patch)
treea917f3d9336e389e485fdab72cc5a8a5ad5fd3af
parent0d458b96e8349a50877ebd55932bf806e93caa21 (diff)
downloadgitlab-ce-4484587ef8c9bf63c60359a0980dda95e854b744.tar.gz
another round of EE code removal
-rw-r--r--app/uploaders/job_artifact_uploader.rb8
-rw-r--r--db/schema.rb7
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb63
-rw-r--r--spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb73
-rw-r--r--spec/ee/spec/models/ee/lfs_object_spec.rb110
-rw-r--r--spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb50
-rw-r--r--spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb22
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb11
8 files changed, 75 insertions, 269 deletions
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index 06842a58571..ef0f8acefd6 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -15,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
end
def open
- raise 'Only File System is supported' unless file_storage?
-
- File.open(path, "rb") if path
+ if file_storage?
+ File.open(path, "rb") if path
+ else
+ ::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
+ end
end
private
diff --git a/db/schema.rb b/db/schema.rb
index 5ce6a015e83..db6360089a8 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -290,12 +290,10 @@ ActiveRecord::Schema.define(version: 20180216121030) do
t.integer "auto_canceled_by_id"
t.boolean "retried"
t.integer "stage_id"
- t.integer "artifacts_file_store", default: 1, null: false
- t.integer "artifacts_metadata_store", default: 1, null: false
- t.boolean "protected"
- t.integer "failure_reason"
t.integer "artifacts_file_store"
t.integer "artifacts_metadata_store"
+ t.boolean "protected"
+ t.integer "failure_reason"
end
add_index "ci_builds", ["artifacts_expire_at"], name: "index_ci_builds_on_artifacts_expire_at", where: "(artifacts_file <> ''::text)", using: :btree
@@ -338,7 +336,6 @@ ActiveRecord::Schema.define(version: 20180216121030) do
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
- t.integer "file_store"
end
add_index "ci_job_artifacts", ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index f3e303bb0fe..31046c202e6 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -1,7 +1,9 @@
+# coding: utf-8
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
+ include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -203,6 +205,41 @@ describe Projects::JobsController do
end
end
+ context 'when trace artifact is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+ allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+ end
+
+ context 'when there are no network issues' do
+ before do
+ stub_remote_trace_206
+
+ get_trace
+ end
+
+ it 'returns a trace' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq job.id
+ expect(json_response['status']).to eq job.status
+ expect(json_response['html']).to eq(job.trace.html)
+ end
+ end
+
+ context 'when there is a network issue' do
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'returns a trace' do
+ expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+ end
+
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
@@ -446,14 +483,18 @@ describe Projects::JobsController do
end
describe 'GET raw' do
- before do
- get_raw
+ subject do
+ post :raw, namespace_id: project.namespace,
+ project_id: project,
+ id: job.id
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read
@@ -464,6 +505,8 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'BUILD TRACE'
@@ -474,14 +517,22 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
- def get_raw
- post :raw, namespace_id: project.namespace,
- project_id: project,
- id: job.id
+ context 'when the trace artifact is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ end
+
+ it 'redirect to the trace file url' do
+ expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
+ end
end
end
end
diff --git a/spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb b/spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb
deleted file mode 100644
index 2bd3a64e3e8..00000000000
--- a/spec/ee/spec/controllers/ee/projects/jobs_controller_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-require 'spec_helper'
-
-describe Projects::JobsController do
- include ApiHelpers
- include HttpIOHelpers
-
- let(:project) { create(:project, :public) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- describe 'GET trace.json' do
- context 'when trace artifact is in ObjectStorage' do
- let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
-
- before do
- allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
- allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
- allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
- end
-
- context 'when there are no network issues' do
- before do
- stub_remote_trace_206
-
- get_trace
- end
-
- it 'returns a trace' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['id']).to eq job.id
- expect(json_response['status']).to eq job.status
- expect(json_response['html']).to eq(job.trace.html)
- end
- end
-
- context 'when there is a network issue' do
- before do
- stub_remote_trace_500
- end
-
- it 'returns a trace' do
- expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
- end
- end
- end
-
- def get_trace
- get :trace, namespace_id: project.namespace,
- project_id: project,
- id: job.id,
- format: :json
- end
- end
-
- describe 'GET raw' do
- subject do
- post :raw, namespace_id: project.namespace,
- project_id: project,
- id: job.id
- end
-
- context 'when the trace artifact is in ObjectStorage' do
- let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
-
- before do
- allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
- end
-
- it 'redirect to the trace file url' do
- expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
- end
- end
- end
-end
diff --git a/spec/ee/spec/models/ee/lfs_object_spec.rb b/spec/ee/spec/models/ee/lfs_object_spec.rb
deleted file mode 100644
index 28dbcbc4189..00000000000
--- a/spec/ee/spec/models/ee/lfs_object_spec.rb
+++ /dev/null
@@ -1,110 +0,0 @@
-require 'spec_helper'
-
-describe LfsObject do
- describe '#local_store?' do
- it 'returns true when file_store is nil' do
- subject.file_store = nil
-
- expect(subject.local_store?).to eq true
- end
-
- it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
- subject.file_store = LfsObjectUploader::Store::LOCAL
-
- expect(subject.local_store?).to eq true
- end
-
- it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
- subject.file_store = LfsObjectUploader::Store::REMOTE
-
- expect(subject.local_store?).to eq false
- end
- end
-
- describe '#destroy' do
- subject { create(:lfs_object, :with_file) }
-
- context 'when running in a Geo primary node' do
- set(:primary) { create(:geo_node, :primary) }
- set(:secondary) { create(:geo_node) }
-
- it 'logs an event to the Geo event log' do
- expect { subject.destroy }.to change(Geo::LfsObjectDeletedEvent, :count).by(1)
- end
- end
- end
-
- describe '#schedule_migration_to_object_storage' do
- before do
- stub_lfs_setting(enabled: true)
- end
-
- subject { create(:lfs_object, :with_file) }
-
- context 'when object storage is disabled' do
- before do
- stub_lfs_object_storage(enabled: false)
- end
-
- it 'does not schedule the migration' do
- expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when object storage is enabled' do
- context 'when background upload is enabled' do
- context 'when is licensed' do
- before do
- stub_lfs_object_storage(background_upload: true)
- end
-
- it 'schedules the model for migration' do
- expect(ObjectStorage::BackgroundMoveWorker)
- .to receive(:perform_async)
- .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
- .once
-
- subject
- end
-
- it 'schedules the model for migration once' do
- expect(ObjectStorage::BackgroundMoveWorker)
- .to receive(:perform_async)
- .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
- .once
-
- lfs_object = create(:lfs_object)
- lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
- lfs_object.save!
- end
- end
-
- context 'when is unlicensed' do
- before do
- stub_lfs_object_storage(background_upload: true, licensed: false)
- end
-
- it 'does not schedule the migration' do
- expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
-
- subject
- end
- end
- end
-
- context 'when background upload is disabled' do
- before do
- stub_lfs_object_storage(background_upload: false)
- end
-
- it 'schedules the model for migration' do
- expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
-
- subject
- end
- end
- end
- end
-end
diff --git a/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
deleted file mode 100644
index 9fa618fdc47..00000000000
--- a/spec/ee/spec/services/ee/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-require 'spec_helper'
-
-describe Projects::HashedStorage::MigrateAttachmentsService do
- let(:project) { create(:project, storage_version: 1) }
- let(:service) { described_class.new(project) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
- let(:old_attachments_path) { legacy_storage.disk_path }
- let(:new_attachments_path) { hashed_storage.disk_path }
-
- describe '#execute' do
- set(:primary) { create(:geo_node, :primary) }
- set(:secondary) { create(:geo_node) }
-
- context 'on success' do
- before do
- TestEnv.clean_test_path
- FileUtils.mkdir_p(File.join(FileUploader.root, old_attachments_path))
- end
-
- it 'returns true' do
- expect(service.execute).to be_truthy
- end
-
- it 'creates a Geo::HashedStorageAttachmentsEvent' do
- expect { service.execute }.to change(Geo::EventLog, :count).by(1)
-
- event = Geo::EventLog.first.event
-
- expect(event).to be_a(Geo::HashedStorageAttachmentsEvent)
- expect(event).to have_attributes(
- old_attachments_path: old_attachments_path,
- new_attachments_path: new_attachments_path
- )
- end
- end
-
- context 'on failure' do
- it 'does not create a Geo event when skipped' do
- expect { service.execute }.not_to change { Geo::EventLog.count }
- end
-
- it 'does not create a Geo event on failure' do
- expect(service).to receive(:move_folder!).and_raise(::Projects::HashedStorage::AttachmentMigrationError)
- expect { service.execute }.to raise_error(::Projects::HashedStorage::AttachmentMigrationError)
- expect(Geo::EventLog.count).to eq(0)
- end
- end
- end
-end
diff --git a/spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb b/spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb
deleted file mode 100644
index 043c597c9e0..00000000000
--- a/spec/ee/spec/uploaders/ee/job_artifact_uploader_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-require 'spec_helper'
-
-describe JobArtifactUploader do
- let(:store) { ObjectStorage::Store::LOCAL }
- let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
- let(:uploader) { described_class.new(job_artifact, :file) }
-
- describe '#open' do
- subject { uploader.open }
-
- context 'when trace is stored in Object storage' do
- before do
- allow(uploader).to receive(:file_storage?) { false }
- allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
- end
-
- it 'returns http io stream' do
- is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
- end
- end
- end
-end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 714b2498538..42036d67f3d 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -48,6 +48,17 @@ describe JobArtifactUploader do
end
end
end
+
+ context 'when trace is stored in Object storage' do
+ before do
+ allow(uploader).to receive(:file_storage?) { false }
+ allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
+ end
+
+ it 'returns http io stream' do
+ is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
+ end
+ end
end
context 'file is stored in valid local_path' do