summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzegorz@gitlab.com>2017-06-08 05:29:35 +0000
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 20:00:27 +0100
commit52c3b8f31264230814d2ffa79d0987c1491676b3 (patch)
treed5827bc9bd891c1dd602eb3cdd4e4062d2e85589
parent64701b51aeacf4f4f932f205a2d831880b757a43 (diff)
downloadgitlab-ce-52c3b8f31264230814d2ffa79d0987c1491676b3.tar.gz
Merge branch 'zj-object-store-artifacts' into 'master'
Object store for artifacts Closes gitlab-ce#29203 See merge request !1762
-rw-r--r--app/models/ci/build.rb20
-rw-r--r--app/services/projects/update_pages_service.rb30
-rw-r--r--app/uploaders/artifact_uploader.rb18
-rw-r--r--app/uploaders/object_store_uploader.rb139
-rw-r--r--app/views/projects/artifacts/_tree_file.html.haml9
-rw-r--r--app/views/projects/jobs/_sidebar.html.haml4
-rw-r--r--changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml4
-rw-r--r--config/gitlab.yml.example8
-rw-r--r--config/initializers/1_settings.rb6
-rw-r--r--db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb17
-rw-r--r--db/schema.rb2
-rw-r--r--doc/administration/job_artifacts.md37
-rw-r--r--lib/api/helpers.rb2
-rw-r--r--lib/ci/api/builds.rb2
-rw-r--r--lib/tasks/gitlab/artifacts.rake19
-rw-r--r--spec/factories/ci/builds.rb5
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/models/ci/build_spec.rb44
-rw-r--r--spec/requests/api/jobs_spec.rb68
-rw-r--r--spec/requests/api/runner_spec.rb29
-rw-r--r--spec/requests/api/v3/builds_spec.rb61
-rw-r--r--spec/requests/ci/api/builds_spec.rb29
-rw-r--r--spec/services/ci/retry_build_service_spec.rb3
-rw-r--r--spec/support/stub_artifacts.rb26
-rw-r--r--spec/uploaders/artifact_uploader_spec.rb27
-rw-r--r--spec/uploaders/object_store_uploader_spec.rb231
26 files changed, 740 insertions, 102 deletions
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 432f3f242eb..39d0647b182 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -295,17 +295,27 @@ module Ci
!artifacts_expired? && artifacts_file.exists?
end
+ def browsable_artifacts?
+ artifacts_metadata?
+ end
+
+ def downloadable_single_artifacts_file?
+ artifacts_metadata? && artifacts_file.file_storage?
+ end
+
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options)
- metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
- artifacts_metadata.path,
- path,
- **options)
+ artifacts_metadata.use_file do |metadata_path|
+ metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+ metadata_path,
+ path,
+ **options)
- metadata.to_entry
+ metadata.to_entry
+ end
end
def erase_artifacts!
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index e60b854f916..3c7c9d421bc 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -65,9 +65,9 @@ module Projects
end
def extract_archive!(temp_path)
- if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
+ if artifacts_filename.ends_with?('.tar.gz') || artifacts_filename.ends_with?('.tgz')
extract_tar_archive!(temp_path)
- elsif artifacts.ends_with?('.zip')
+ elsif artifacts_filename.ends_with?('.zip')
extract_zip_archive!(temp_path)
else
raise 'unsupported artifacts format'
@@ -75,11 +75,13 @@ module Projects
end
def extract_tar_archive!(temp_path)
- results = Open3.pipeline(%W(gunzip -c #{artifacts}),
- %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
- %W(tar -x -C #{temp_path} #{SITE_PATH}),
- err: '/dev/null')
- raise 'pages failed to extract' unless results.compact.all?(&:success?)
+ build.artifacts_file.use_file do |artifacts_path|
+ results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
+ %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
+ %W(tar -x -C #{temp_path} #{SITE_PATH}),
+ err: '/dev/null')
+ raise 'pages failed to extract' unless results.compact.all?(&:success?)
+ end
end
def extract_zip_archive!(temp_path)
@@ -97,8 +99,10 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
- unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
- raise 'pages failed to extract'
+ build.artifacts_file.use_file do |artifacts_path|
+ unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
+ raise 'pages failed to extract'
+ end
end
end
@@ -129,6 +133,10 @@ module Projects
1 + max_size / BLOCK_SIZE
end
+ def artifacts_filename
+ build.artifacts_file.filename
+ end
+
def max_size
current_application_settings.max_pages_size.megabytes || MAX_SIZE
end
@@ -153,10 +161,6 @@ module Projects
build.ref
end
- def artifacts
- build.artifacts_file.path
- end
-
def latest_sha
project.commit(build.ref).try(:sha).to_s
end
diff --git a/app/uploaders/artifact_uploader.rb b/app/uploaders/artifact_uploader.rb
index 14addb6cf14..0bd2bd4f422 100644
--- a/app/uploaders/artifact_uploader.rb
+++ b/app/uploaders/artifact_uploader.rb
@@ -1,7 +1,5 @@
-class ArtifactUploader < GitlabUploader
- storage :file
-
- attr_reader :job, :field
+class ArtifactUploader < ObjectStoreUploader
+ storage_options Gitlab.config.artifacts
def self.local_artifacts_store
Gitlab.config.artifacts.path
@@ -11,12 +9,12 @@ class ArtifactUploader < GitlabUploader
File.join(self.local_artifacts_store, 'tmp/uploads/')
end
- def initialize(job, field)
- @job, @field = job, field
- end
-
def store_dir
- default_local_path
+ if file_storage?
+ default_local_path
+ else
+ default_path
+ end
end
def cache_dir
@@ -34,6 +32,6 @@ class ArtifactUploader < GitlabUploader
end
def default_path
- File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
+ File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
end
end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
new file mode 100644
index 00000000000..32d4d31b37c
--- /dev/null
+++ b/app/uploaders/object_store_uploader.rb
@@ -0,0 +1,139 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+class ObjectStoreUploader < GitlabUploader
+ before :store, :set_default_local_store
+ before :store, :verify_license!
+
+ LOCAL_STORE = 1
+ REMOTE_STORE = 2
+
+ class << self
+ def storage_options(options)
+ @storage_options = options
+ end
+
+ def object_store_options
+ @storage_options&.object_store
+ end
+
+ def object_store_enabled?
+ object_store_options&.enabled
+ end
+ end
+
+ attr_reader :subject, :field
+
+ def initialize(subject, field)
+ @subject = subject
+ @field = field
+ end
+
+ def object_store
+ subject.public_send(:"#{field}_store")
+ end
+
+ def object_store=(value)
+ @storage = nil
+ subject.public_send(:"#{field}_store=", value)
+ end
+
+ def use_file
+ if file_storage?
+ return yield path
+ end
+
+ begin
+ cache_stored_file!
+ yield cache_path
+ ensure
+ cache_storage.delete_dir!(cache_path(nil))
+ end
+ end
+
+ def filename
+ super || file&.filename
+ end
+
+ def migrate!(new_store)
+ raise 'Undefined new store' unless new_store
+
+ return unless object_store != new_store
+ return unless file
+
+ old_file = file
+ old_store = object_store
+
+ # for moving remote file we need to first store it locally
+ cache_stored_file! unless file_storage?
+
+ # change storage
+ self.object_store = new_store
+
+ storage.store!(file).tap do |new_file|
+ # since we change storage store the new storage
+ # in case of failure delete new file
+ begin
+ subject.save!
+ rescue => e
+ new_file.delete
+ self.object_store = old_store
+ raise e
+ end
+
+ old_file.delete
+ end
+ end
+
+ def fog_directory
+ self.class.object_store_options.remote_directory
+ end
+
+ def fog_credentials
+ self.class.object_store_options.connection
+ end
+
+ def fog_public
+ false
+ end
+
+ def move_to_store
+ file.try(:storage) == storage
+ end
+
+ def move_to_cache
+ file.try(:storage) == cache_storage
+ end
+
+ # We block storing artifacts on Object Storage, not receiving
+ def verify_license!(new_file)
+ return if file_storage?
+
+ raise 'Object Storage feature is missing' unless subject.project.feature_available?(:object_storage)
+ end
+
+ private
+
+ def set_default_local_store(new_file)
+ self.object_store = LOCAL_STORE unless self.object_store
+ end
+
+ def storage
+ @storage ||=
+ if object_store == REMOTE_STORE
+ remote_storage
+ else
+ local_storage
+ end
+ end
+
+ def remote_storage
+ raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+ CarrierWave::Storage::Fog.new(self)
+ end
+
+ def local_storage
+ CarrierWave::Storage::File.new(self)
+ end
+end
diff --git a/app/views/projects/artifacts/_tree_file.html.haml b/app/views/projects/artifacts/_tree_file.html.haml
index 8edb9be049a..28f7a52df25 100644
--- a/app/views/projects/artifacts/_tree_file.html.haml
+++ b/app/views/projects/artifacts/_tree_file.html.haml
@@ -1,10 +1,13 @@
-- path_to_file = file_project_job_artifacts_path(@project, @build, path: file.path)
+- path_to_file = file_namespace_project_job_artifacts_path(@project.namespace, @project, @build, path: file.path) if @build.downloadable_single_artifacts_file?
%tr.tree-item{ 'data-link' => path_to_file }
- blob = file.blob
%td.tree-item-file-name
= tree_icon('file', blob.mode, blob.name)
- = link_to path_to_file do
- %span.str-truncated= blob.name
+ %span.str-truncated
+ - if path_to_file
+ = link_to file.name, path_to_file
+ - else
+ = file.name
%td
= number_to_human_size(blob.size, precision: 2)
diff --git a/app/views/projects/jobs/_sidebar.html.haml b/app/views/projects/jobs/_sidebar.html.haml
index bddb587ddc6..408c1511683 100644
--- a/app/views/projects/jobs/_sidebar.html.haml
+++ b/app/views/projects/jobs/_sidebar.html.haml
@@ -32,8 +32,8 @@
= link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download
- - if @build.artifacts_metadata?
- = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
+ - if @build.browsable_artifacts?
+ = link_to browse_namespace_project_job_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Browse
- if @build.trigger_request
diff --git a/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml b/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
new file mode 100644
index 00000000000..3f1499a3ffe
--- /dev/null
+++ b/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
@@ -0,0 +1,4 @@
+---
+title: Allow to Store Artifacts on Object Storage
+merge_request:
+author:
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 221e3d6e03b..28e9a5f420a 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -138,6 +138,14 @@ production: &base
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
+ # object_store:
+ # enabled: false
+ # remote_directory: artifacts
+ # connection:
+ # provider: AWS # Only AWS supported at the moment
+ # aws_access_key_id: AWS_ACCESS_KEY_ID
+ # aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ # region: eu-central-1
## Git LFS
lfs:
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index fa33e602e93..319af2e0b66 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -268,6 +268,12 @@ Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
+Settings.artifacts['object_store'] ||= Settingslogic.new({})
+Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+# Convert upload connection settings to use symbol keys, to make Fog happy
+Settings.artifacts['object_store']['connection']&.deep_symbolize_keys!
+
#
# Registry
#
diff --git a/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
new file mode 100644
index 00000000000..deba890a478
--- /dev/null
+++ b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
@@ -0,0 +1,17 @@
+class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_column_with_default(:ci_builds, :artifacts_file_store, :integer, default: 1)
+ add_column_with_default(:ci_builds, :artifacts_metadata_store, :integer, default: 1)
+ end
+
+ def down
+ remove_column(:ci_builds, :artifacts_file_store)
+ remove_column(:ci_builds, :artifacts_metadata_store)
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 3dbe52c9c80..69e2a8cfd70 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -238,6 +238,8 @@ ActiveRecord::Schema.define(version: 20170707184244) do
t.integer "auto_canceled_by_id"
t.boolean "retried"
t.integer "stage_id"
+ t.integer "artifacts_file_store", default: 1, null: false
+ t.integer "artifacts_metadata_store", default: 1, null: false
end
add_index "ci_builds", ["auto_canceled_by_id"], name: "index_ci_builds_on_auto_canceled_by_id", using: :btree
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index 3587696225c..582f3c67b0d 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -85,12 +85,41 @@ _The artifacts are stored by default in
1. Save the file and [restart GitLab][] for the changes to take effect.
-### Using object storage
+---
+
+**Using Object Store**
+
+The previously mentioned methods use the local disk to store artifacts. However,
+there is the option to use object stores like AWS' S3. To do this, set the
+`object_store` in your `gitlab.yml`. This relies on valid AWS
+credentials to be configured already.
-In [GitLab Enterprise Edition Premium][eep] you can use an object storage like
-AWS S3 to store the artifacts.
+ ```yaml
+ artifacts:
+ enabled: true
+ path: /mnt/storage/artifacts
+ object_store:
+ enabled: true
+ remote_directory: my-bucket-name
+ connection:
+ provider: AWS
+ aws_access_key_id: S3_KEY_ID
+ aws_secret_key_id: S3_SECRET_KEY_ID
+ region: eu-central-1
+ ```
+
+This will allow you to migrate existing artifacts to object store,
+but all new artifacts will still be stored on the local disk.
+In the future you will be given an option to define a default storage artifacts
+for all new files. Currently the artifacts migration has to be executed manually:
+
+ ```bash
+ gitlab-rake gitlab:artifacts:migrate
+ ```
-[Learn how to use the object storage option.][ee-os]
+Please note, that enabling this feature
+will have the effect that artifacts are _not_ browsable anymore through the web
+interface. This limitation will be removed in one of the upcoming releases.
## Expiring artifacts
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index 0f4791841d2..7003390113b 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -318,7 +318,7 @@ module API
if artifacts_file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename)
else
- redirect_to(artifacts_file.url)
+ redirect(artifacts_file.url)
end
end
diff --git a/lib/ci/api/builds.rb b/lib/ci/api/builds.rb
index e2e91ce99cd..ecbdfb3e4b2 100644
--- a/lib/ci/api/builds.rb
+++ b/lib/ci/api/builds.rb
@@ -192,7 +192,7 @@ module Ci
end
unless artifacts_file.file_storage?
- return redirect_to build.artifacts_file.url
+ return redirect(build.artifacts_file.url)
end
present_file!(artifacts_file.path, artifacts_file.filename)
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
new file mode 100644
index 00000000000..5676456b2a0
--- /dev/null
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -0,0 +1,19 @@
+desc "GitLab | Migrate files for artifacts to comply with new storage format"
+namespace :gitlab do
+ namespace :artifacts do
+ task migrate: :environment do
+ puts 'Artifacts'.color(:yellow)
+ Ci::Build.joins(:project).with_artifacts
+ .where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
+ .find_each(batch_size: 100) do |issue|
+ begin
+ build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
+ build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
+ print '.'
+ rescue
+ print 'F'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index a77f01ecb00..79699724875 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -163,6 +163,11 @@ FactoryGirl.define do
end
end
+ trait :remote_store do
+ artifacts_file_store ArtifactUploader::REMOTE_STORE
+ artifacts_metadata_store ArtifactUploader::REMOTE_STORE
+ end
+
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 4ef3db3721f..ac648301d18 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -252,7 +252,9 @@ CommitStatus:
- target_url
- description
- artifacts_file
+- artifacts_file_store
- artifacts_metadata
+- artifacts_metadata_store
- erased_by_id
- erased_at
- artifacts_expire_at
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 154b6759f46..64aedb29816 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -124,6 +124,50 @@ describe Ci::Build, :models do
end
end
+ describe '#browsable_artifacts?' do
+ subject { build.browsable_artifacts? }
+
+ context 'artifacts metadata does not exist' do
+ before do
+ build.update_attributes(artifacts_metadata: nil)
+ end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'artifacts metadata does exists' do
+ let(:build) { create(:ci_build, :artifacts) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#downloadable_single_artifacts_file?' do
+ let(:build) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+
+ subject { build.downloadable_single_artifacts_file? }
+
+ before do
+ expect_any_instance_of(Ci::Build).to receive(:artifacts_metadata?).and_call_original
+ end
+
+ context 'artifacts are stored locally' do
+ let(:store) { ObjectStoreUploader::LOCAL_STORE }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'artifacts are stored remotely' do
+ let(:store) { ObjectStoreUploader::REMOTE_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#artifacts_expired?' do
subject { build.artifacts_expired? }
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 8d647eb1c7e..548b612b5b0 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -1,17 +1,17 @@
require 'spec_helper'
describe API::Jobs, :api do
- let!(:project) do
+ let(:project) do
create(:project, :repository, public_builds: false)
end
- let!(:pipeline) do
+ let(:pipeline) do
create(:ci_empty_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch)
end
- let!(:job) { create(:ci_build, pipeline: pipeline) }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
let(:user) { create(:user) }
let(:api_user) { user }
@@ -26,6 +26,7 @@ describe API::Jobs, :api do
let(:query) { Hash.new }
before do
+ job
get api("/projects/#{project.id}/jobs", api_user), query
end
@@ -89,6 +90,7 @@ describe API::Jobs, :api do
let(:query) { Hash.new }
before do
+ job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
@@ -190,30 +192,41 @@ describe API::Jobs, :api do
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
before do
+ stub_artifacts_object_storage
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'job with artifacts' do
- let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'when artifacts are stored locally' do
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ context 'authorized user' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'returns specific job artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(job.artifacts_file.file.file)
+ end
end
- it 'returns specific job artifacts' do
- expect(response).to have_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(job.artifacts_file.file.file)
+ context 'unauthorized user' do
+ let(:api_user) { nil }
+
+ it 'does not return specific job artifacts' do
+ expect(response).to have_http_status(401)
+ end
end
end
- context 'unauthorized user' do
- let(:api_user) { nil }
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
- it 'does not return specific job artifacts' do
- expect(response).to have_http_status(401)
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
end
end
end
@@ -228,6 +241,7 @@ describe API::Jobs, :api do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
+ stub_artifacts_object_storage
job.success
end
@@ -283,14 +297,24 @@ describe API::Jobs, :api do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{job.artifacts_file.filename}" }
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{job.artifacts_file.filename}" }
+ end
+
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
end
- it { expect(response).to have_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
+ end
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index ca5d98c78ef..358178f918d 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -185,7 +185,7 @@ describe API::Runner do
let(:project) { create(:empty_project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) }
- let!(:job) do
+ let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end
@@ -200,6 +200,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
+ job
stub_container_registry_config(enabled: false)
end
@@ -815,6 +816,7 @@ describe API::Runner do
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do
+ stub_artifacts_object_storage
job.run!
end
@@ -1116,15 +1118,26 @@ describe API::Runner do
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
context 'when using job token' do
- it 'download artifacts' do
- expect(response).to have_http_status(200)
- expect(response.headers).to include download_headers
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include download_headers
+ end
+ end
+
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, :artifacts, :remote_store) }
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(302)
+ end
end
end
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index dc95599546c..37710aedbb1 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -7,13 +7,14 @@ describe API::V3::Builds do
let!(:developer) { create(:project_member, :developer, user: user, project: project) }
let(:reporter) { create(:project_member, :reporter, project: project) }
let(:guest) { create(:project_member, :guest, project: project) }
- let!(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
- let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
describe 'GET /projects/:id/builds ' do
let(:query) { '' }
before do
+ build
create(:ci_build, :skipped, pipeline: pipeline)
get v3_api("/projects/#{project.id}/builds?#{query}", api_user)
@@ -87,6 +88,10 @@ describe API::V3::Builds do
end
describe 'GET /projects/:id/repository/commits/:sha/builds' do
+ before do
+ build
+ end
+
context 'when commit does not exist in repository' do
before do
get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user)
@@ -187,22 +192,33 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
+ stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
context 'job with artifacts' do
- let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'when artifacts are stored locally' do
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+
+ context 'authorized user' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ it 'returns specific job artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(build.artifacts_file.file.file)
+ end
end
+ end
- it 'returns specific job artifacts' do
- expect(response).to have_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(build.artifacts_file.file.file)
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
end
end
@@ -225,6 +241,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
+ stub_artifacts_object_storage
build.success
end
@@ -280,14 +297,24 @@ describe API::V3::Builds do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{build.artifacts_file.filename}" }
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{build.artifacts_file.filename}" }
+ end
+
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
end
- it { expect(response).to have_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
+ end
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb
index c969d08d0dd..76a9cf11ad6 100644
--- a/spec/requests/ci/api/builds_spec.rb
+++ b/spec/requests/ci/api/builds_spec.rb
@@ -470,6 +470,7 @@ describe Ci::API::Builds do
let(:headers_with_token) { headers.merge(Ci::API::Helpers::BUILD_TOKEN_HEADER => token) }
before do
+ stub_artifacts_object_storage
build.run!
end
@@ -807,16 +808,26 @@ describe Ci::API::Builds do
end
context 'build has artifacts' do
- let(:build) { create(:ci_build, :artifacts) }
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
-
shared_examples 'having downloadable artifacts' do
- it 'download artifacts' do
- expect(response).to have_http_status(200)
- expect(response.headers).to include download_headers
+ context 'when stored locally' do
+ let(:build) { create(:ci_build, :artifacts) }
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include download_headers
+ end
+ end
+
+ context 'when stored remotely' do
+ let(:build) { create(:ci_build, :artifacts, :remote_store) }
+
+ it 'redirect to artifacts file' do
+ expect(response).to have_http_status(302)
+ end
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index ef9927c5969..365ecad3a39 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -22,7 +22,8 @@ describe Ci::RetryBuildService, :services do
%i[type lock_version target_url base_tags
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
- user_id auto_canceled_by_id retried].freeze
+ user_id auto_canceled_by_id retried
+ artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do
let(:stage) do
diff --git a/spec/support/stub_artifacts.rb b/spec/support/stub_artifacts.rb
new file mode 100644
index 00000000000..d531be5b8e7
--- /dev/null
+++ b/spec/support/stub_artifacts.rb
@@ -0,0 +1,26 @@
+module StubConfiguration
+ def stub_artifacts_object_storage(enabled: true)
+ Fog.mock!
+ allow(Gitlab.config.artifacts.object_store).to receive_messages(
+ enabled: enabled,
+ remote_directory: 'artifacts',
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
+ region: 'eu-central-1'
+ }
+ )
+
+ allow_any_instance_of(ArtifactUploader).to receive(:verify_license!) { true }
+
+ return unless enabled
+
+ ::Fog::Storage.new(Gitlab.config.artifacts.object_store.connection).tap do |connection|
+ begin
+ connection.directories.create(key: 'artifacts')
+ rescue Excon::Error::Conflict
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/artifact_uploader_spec.rb b/spec/uploaders/artifact_uploader_spec.rb
index 2a3bd0e3bb2..f4ba4a8207f 100644
--- a/spec/uploaders/artifact_uploader_spec.rb
+++ b/spec/uploaders/artifact_uploader_spec.rb
@@ -1,9 +1,10 @@
require 'rails_helper'
describe ArtifactUploader do
- let(:job) { create(:ci_build) }
+ let(:store) { described_class::LOCAL_STORE }
+ let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :artifacts_file) }
- let(:path) { Gitlab.config.artifacts.path }
+ let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do
subject { described_class.local_artifacts_store }
@@ -17,16 +18,30 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path }
-
- it { is_expected.to start_with(path) }
+
+ it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
end
describe '#store_dir' do
subject { uploader.store_dir }
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
+ let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
+
+ context 'when using local storage' do
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to end_with(path) }
+ end
+
+ context 'when using remote storage' do
+ let(:store) { described_class::REMOTE_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it { is_expected.to eq(path) }
+ end
end
describe '#cache_dir' do
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
new file mode 100644
index 00000000000..c6c7d47e703
--- /dev/null
+++ b/spec/uploaders/object_store_uploader_spec.rb
@@ -0,0 +1,231 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+describe ObjectStoreUploader do
+ let(:uploader_class) { Class.new(described_class) }
+ let(:object) { double }
+ let(:uploader) { uploader_class.new(object, :artifacts_file) }
+
+ describe '#object_store' do
+ it "calls artifacts_file_store on object" do
+ expect(object).to receive(:artifacts_file_store)
+
+ uploader.object_store
+ end
+ end
+
+ describe '#object_store=' do
+ it "calls artifacts_file_store= on object" do
+ expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
+
+ uploader.object_store = described_class::REMOTE_STORE
+ end
+ end
+
+ context 'when using ArtifactsUploader' do
+ let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+ let(:uploader) { job.artifacts_file }
+
+ context 'checking described_class' do
+ let(:store) { described_class::LOCAL_STORE }
+
+ it "uploader is of a described_class" do
+ expect(uploader).to be_a(described_class)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ let(:store) { described_class::LOCAL_STORE }
+
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::REMOTE_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+ let(:uploader) { job.artifacts_file }
+ let(:store) { described_class::LOCAL_STORE }
+
+ subject { uploader.migrate!(new_store) }
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::REMOTE_STORE }
+ let(:new_store) { described_class::LOCAL_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::REMOTE_STORE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
+ end
+
+ it "does catch an error" do
+ expect { subject }.to raise_error(/exception/)
+ end
+
+ it "original file is not removed" do
+ begin
+ subject
+ rescue
+ end
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ uploader_class.storage_options double(
+ object_store: double(remote_directory: remote_directory))
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { 'connection' }
+
+ before do
+ uploader_class.storage_options double(
+ object_store: double(connection: connection))
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(connection) }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#verify_license!' do
+ subject { uploader.verify_license!(nil) }
+
+ context 'when using local storage' do
+ before do
+ expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when using remote storage' do
+ let(:project) { double }
+
+ before do
+ uploader_class.storage_options double(
+ object_store: double(enabled: true))
+ expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
+ expect(object).to receive(:project) { project }
+ end
+
+ context 'feature is not available' do
+ before do
+ expect(project).to receive(:feature_available?).with(:object_storage) { false }
+ end
+
+ it "does raise an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'feature is available' do
+ before do
+ expect(project).to receive(:feature_available?).with(:object_storage) { true }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end