summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/controllers/concerns/send_file_upload.rb14
-rw-r--r--app/controllers/projects/artifacts_controller.rb7
-rw-r--r--app/controllers/projects/lfs_storage_controller.rb3
-rw-r--r--app/controllers/projects/raw_controller.rb3
-rw-r--r--app/models/ci/build.rb21
-rw-r--r--app/models/lfs_object.rb2
-rw-r--r--app/services/projects/update_pages_service.rb30
-rw-r--r--app/uploaders/artifact_uploader.rb32
-rw-r--r--app/uploaders/lfs_object_uploader.rb21
-rw-r--r--app/uploaders/object_store_uploader.rb197
-rw-r--r--app/views/projects/jobs/_sidebar.html.haml6
-rw-r--r--app/workers/object_storage_upload_worker.rb19
-rw-r--r--changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml4
-rw-r--r--changelogs/unreleased-ee/jej-lfs-object-storage.yml5
-rw-r--r--changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml5
-rw-r--r--config/gitlab.yml.example44
-rw-r--r--config/initializers/1_settings.rb14
-rw-r--r--config/sidekiq_queues.yml1
-rw-r--r--db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb17
-rw-r--r--db/migrate/20170825015534_add_file_store_to_lfs_objects.rb35
-rw-r--r--db/schema.rb3
-rw-r--r--doc/administration/job_artifacts.md37
-rw-r--r--lib/api/helpers.rb2
-rw-r--r--lib/backup/artifacts.rb2
-rw-r--r--lib/tasks/gitlab/artifacts.rake25
-rw-r--r--lib/tasks/gitlab/lfs.rake22
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb53
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb56
-rw-r--r--spec/factories/ci/builds.rb5
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/models/ci/build_spec.rb44
-rw-r--r--spec/requests/api/jobs_spec.rb69
-rw-r--r--spec/requests/api/runner_spec.rb29
-rw-r--r--spec/requests/api/v3/builds_spec.rb57
-rw-r--r--spec/requests/lfs_http_spec.rb68
-rw-r--r--spec/services/ci/retry_build_service_spec.rb3
-rw-r--r--spec/support/stub_object_storage.rb32
-rw-r--r--spec/tasks/gitlab/lfs_rake_spec.rb37
-rw-r--r--spec/uploaders/artifact_uploader_spec.rb31
-rw-r--r--spec/uploaders/lfs_object_uploader_spec.rb71
-rw-r--r--spec/uploaders/object_store_uploader_spec.rb228
-rw-r--r--spec/workers/object_storage_upload_worker_spec.rb85
42 files changed, 1284 insertions, 157 deletions
diff --git a/app/controllers/concerns/send_file_upload.rb b/app/controllers/concerns/send_file_upload.rb
new file mode 100644
index 00000000000..d4de4cf1fda
--- /dev/null
+++ b/app/controllers/concerns/send_file_upload.rb
@@ -0,0 +1,14 @@
+module SendFileUpload
+ def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil)
+ if attachment
+ redirect_params[:query] = { "response-content-disposition" => "attachment;filename=#{attachment.inspect}" }
+ send_params.merge!(filename: attachment, disposition: 'attachment')
+ end
+
+ if file_upload.file_storage?
+ send_file file_upload.path, send_params
+ else
+ redirect_to file_upload.url(**redirect_params)
+ end
+ end
+end
diff --git a/app/controllers/projects/artifacts_controller.rb b/app/controllers/projects/artifacts_controller.rb
index 0837451cc49..3995a2fc37a 100644
--- a/app/controllers/projects/artifacts_controller.rb
+++ b/app/controllers/projects/artifacts_controller.rb
@@ -1,6 +1,7 @@
class Projects::ArtifactsController < Projects::ApplicationController
include ExtractsPath
include RendersBlob
+ include SendFileUpload
layout 'project'
before_action :authorize_read_build!
@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
before_action :entry, only: [:file]
def download
- if artifacts_file.file_storage?
- send_file artifacts_file.path, disposition: 'attachment'
- else
- redirect_to artifacts_file.url
- end
+ send_upload(artifacts_file, attachment: artifacts_file.filename)
end
def browse
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 32759672b6c..134892b5d7b 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -1,6 +1,7 @@
class Projects::LfsStorageController < Projects::GitHttpClientController
include LfsRequest
include WorkhorseRequest
+ include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
@@ -11,7 +12,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
return
end
- send_file lfs_object.file.path, content_type: "application/octet-stream"
+ send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
end
def upload_authorize
diff --git a/app/controllers/projects/raw_controller.rb b/app/controllers/projects/raw_controller.rb
index a02cc477e08..9bc774b7636 100644
--- a/app/controllers/projects/raw_controller.rb
+++ b/app/controllers/projects/raw_controller.rb
@@ -2,6 +2,7 @@
class Projects::RawController < Projects::ApplicationController
include ExtractsPath
include BlobHelper
+ include SendFileUpload
before_action :require_non_empty_project
before_action :assign_ref_vars
@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController
lfs_object = find_lfs_object
if lfs_object && lfs_object.project_allowed_access?(@project)
- send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment'
+ send_upload(lfs_object.file, attachment: @blob.name)
else
render_404
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 6ca46ae89c1..ebeab87d8fc 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -34,6 +34,7 @@ module Ci
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
+ scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
@@ -324,17 +325,27 @@ module Ci
!artifacts_expired? && artifacts_file.exists?
end
+ def browsable_artifacts?
+ artifacts_metadata?
+ end
+
+ def downloadable_single_artifacts_file?
+ artifacts_metadata? && artifacts_file.file_storage?
+ end
+
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options)
- metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
- artifacts_metadata.path,
- path,
- **options)
+ artifacts_metadata.use_file do |metadata_path|
+ metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+ metadata_path,
+ path,
+ **options)
- metadata.to_entry
+ metadata.to_entry
+ end
end
def erase_artifacts!
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index b7cf96abe83..0056b0f80f4 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -4,6 +4,8 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true
+ scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
+
mount_uploader :file, LfsObjectUploader
def storage_project(project)
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index d34903c9989..78cf195d3b3 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -69,9 +69,9 @@ module Projects
end
def extract_archive!(temp_path)
- if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
+ if artifacts_filename.ends_with?('.tar.gz') || artifacts_filename.ends_with?('.tgz')
extract_tar_archive!(temp_path)
- elsif artifacts.ends_with?('.zip')
+ elsif artifacts_filename.ends_with?('.zip')
extract_zip_archive!(temp_path)
else
raise 'unsupported artifacts format'
@@ -79,11 +79,13 @@ module Projects
end
def extract_tar_archive!(temp_path)
- results = Open3.pipeline(%W(gunzip -c #{artifacts}),
- %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
- %W(tar -x -C #{temp_path} #{SITE_PATH}),
- err: '/dev/null')
- raise 'pages failed to extract' unless results.compact.all?(&:success?)
+ build.artifacts_file.use_file do |artifacts_path|
+ results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
+ %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
+ %W(tar -x -C #{temp_path} #{SITE_PATH}),
+ err: '/dev/null')
+ raise 'pages failed to extract' unless results.compact.all?(&:success?)
+ end
end
def extract_zip_archive!(temp_path)
@@ -101,8 +103,10 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
- unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
- raise 'pages failed to extract'
+ build.artifacts_file.use_file do |artifacts_path|
+ unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
+ raise 'pages failed to extract'
+ end
end
end
@@ -133,6 +137,10 @@ module Projects
1 + max_size / BLOCK_SIZE
end
+ def artifacts_filename
+ build.artifacts_file.filename
+ end
+
def max_size
max_pages_size = current_application_settings.max_pages_size.megabytes
@@ -161,10 +169,6 @@ module Projects
build.ref
end
- def artifacts
- build.artifacts_file.path
- end
-
def latest_sha
project.commit(build.ref).try(:sha).to_s
end
diff --git a/app/uploaders/artifact_uploader.rb b/app/uploaders/artifact_uploader.rb
index 14addb6cf14..f6e32aae2fd 100644
--- a/app/uploaders/artifact_uploader.rb
+++ b/app/uploaders/artifact_uploader.rb
@@ -1,39 +1,17 @@
-class ArtifactUploader < GitlabUploader
- storage :file
+class ArtifactUploader < ObjectStoreUploader
+ storage_options Gitlab.config.artifacts
- attr_reader :job, :field
-
- def self.local_artifacts_store
+ def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
- File.join(self.local_artifacts_store, 'tmp/uploads/')
- end
-
- def initialize(job, field)
- @job, @field = job, field
- end
-
- def store_dir
- default_local_path
- end
-
- def cache_dir
- File.join(self.class.local_artifacts_store, 'tmp/cache')
- end
-
- def work_dir
- File.join(self.class.local_artifacts_store, 'tmp/work')
+ File.join(self.local_store_path, 'tmp/uploads/')
end
private
- def default_local_path
- File.join(self.class.local_artifacts_store, default_path)
- end
-
def default_path
- File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
+ File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
end
end
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index d11ebf0f9ca..8a5f599c1d3 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,19 +1,18 @@
-class LfsObjectUploader < GitlabUploader
- storage :file
+class LfsObjectUploader < ObjectStoreUploader
+ storage_options Gitlab.config.lfs
+ after :store, :schedule_migration_to_object_storage
- def store_dir
- "#{Gitlab.config.lfs.storage_path}/#{model.oid[0, 2]}/#{model.oid[2, 2]}"
- end
-
- def cache_dir
- "#{Gitlab.config.lfs.storage_path}/tmp/cache"
+ def self.local_store_path
+ Gitlab.config.lfs.storage_path
end
def filename
- model.oid[4..-1]
+ subject.oid[4..-1]
end
- def work_dir
- File.join(Gitlab.config.lfs.storage_path, 'tmp', 'work')
+ private
+
+ def default_path
+ "#{subject.oid[0, 2]}/#{subject.oid[2, 2]}"
end
end
diff --git a/app/uploaders/object_store_uploader.rb b/app/uploaders/object_store_uploader.rb
new file mode 100644
index 00000000000..3a742d4f715
--- /dev/null
+++ b/app/uploaders/object_store_uploader.rb
@@ -0,0 +1,197 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+class ObjectStoreUploader < GitlabUploader
+ before :store, :set_default_local_store
+ before :store, :verify_license!
+
+ LOCAL_STORE = 1
+ REMOTE_STORE = 2
+
+ class << self
+ def storage_options(options)
+ @storage_options = options
+ end
+
+ def object_store_options
+ @storage_options&.object_store
+ end
+
+ def object_store_enabled?
+ object_store_options&.enabled
+ end
+
+ def background_upload_enabled?
+ object_store_options&.background_upload
+ end
+
+ def object_store_credentials
+ @object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
+ end
+
+ def object_store_directory
+ object_store_options&.remote_directory
+ end
+
+ def local_store_path
+ raise NotImplementedError
+ end
+ end
+
+ attr_reader :subject, :field
+
+ def initialize(subject, field)
+ @subject = subject
+ @field = field
+ end
+
+ def object_store
+ subject.public_send(:"#{field}_store")
+ end
+
+ def object_store=(value)
+ @storage = nil
+ subject.public_send(:"#{field}_store=", value)
+ end
+
+ def store_dir
+ if file_storage?
+ default_local_path
+ else
+ default_path
+ end
+ end
+
+ def use_file
+ if file_storage?
+ return yield path
+ end
+
+ begin
+ cache_stored_file!
+ yield cache_path
+ ensure
+ cache_storage.delete_dir!(cache_path(nil))
+ end
+ end
+
+ def filename
+ super || file&.filename
+ end
+
+ def migrate!(new_store)
+ raise 'Undefined new store' unless new_store
+
+ return unless object_store != new_store
+ return unless file
+
+ old_file = file
+ old_store = object_store
+
+ # for moving remote file we need to first store it locally
+ cache_stored_file! unless file_storage?
+
+ # change storage
+ self.object_store = new_store
+
+ storage.store!(file).tap do |new_file|
+ # since we change storage store the new storage
+ # in case of failure delete new file
+ begin
+ subject.save!
+ rescue => e
+ new_file.delete
+ self.object_store = old_store
+ raise e
+ end
+
+ old_file.delete
+ end
+ end
+
+ def schedule_migration_to_object_storage(new_file)
+ if self.class.object_store_enabled? && licensed? && file_storage?
+ ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id)
+ end
+ end
+
+ def fog_directory
+ self.class.object_store_options.remote_directory
+ end
+
+ def fog_credentials
+ self.class.object_store_options.connection
+ end
+
+ def fog_public
+ false
+ end
+
+ def move_to_store
+ file.try(:storage) == storage
+ end
+
+ def move_to_cache
+ file.try(:storage) == cache_storage
+ end
+
+ # We block storing artifacts on Object Storage, not receiving
+ def verify_license!(new_file)
+ return if file_storage?
+
+ raise 'Object Storage feature is missing' unless licensed?
+ end
+
+ def exists?
+ file.try(:exists?)
+ end
+
+ def cache_dir
+ File.join(self.class.local_store_path, 'tmp/cache')
+ end
+
+ # Override this if you don't want to save local files by default to the Rails.root directory
+ def work_dir
+ # Default path set by CarrierWave:
+ # https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
+ # CarrierWave.tmp_path
+ File.join(self.class.local_store_path, 'tmp/work')
+ end
+
+ def licensed?
+ License.feature_available?(:object_storage)
+ end
+
+ private
+
+ def set_default_local_store(new_file)
+ self.object_store = LOCAL_STORE unless self.object_store
+ end
+
+ def default_local_path
+ File.join(self.class.local_store_path, default_path)
+ end
+
+ def default_path
+ raise NotImplementedError
+ end
+
+ def storage
+ @storage ||=
+ if object_store == REMOTE_STORE
+ remote_storage
+ else
+ local_storage
+ end
+ end
+
+ def remote_storage
+ raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+ CarrierWave::Storage::Fog.new(self)
+ end
+
+ def local_storage
+ CarrierWave::Storage::File.new(self)
+ end
+end
diff --git a/app/views/projects/jobs/_sidebar.html.haml b/app/views/projects/jobs/_sidebar.html.haml
index b5067367802..5a12607afa4 100644
--- a/app/views/projects/jobs/_sidebar.html.haml
+++ b/app/views/projects/jobs/_sidebar.html.haml
@@ -35,9 +35,9 @@
= link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download
- - if @build.artifacts_metadata?
- = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
- Browse
+ - if @build.browsable_artifacts?
+ = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
+ Browse
- if @build.trigger_request
.build-widget.block
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
new file mode 100644
index 00000000000..0a374c4323f
--- /dev/null
+++ b/app/workers/object_storage_upload_worker.rb
@@ -0,0 +1,19 @@
+class ObjectStorageUploadWorker
+ include Sidekiq::Worker
+ include DedicatedSidekiqQueue
+
+ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
+ uploader_class = uploader_class_name.constantize
+ subject_class = subject_class_name.constantize
+
+ return unless uploader_class.object_store_enabled?
+ return unless uploader_class.background_upload_enabled?
+
+ subject = subject_class.find(subject_id)
+ file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
+
+ return unless file.licensed?
+
+ file.migrate!(uploader_class::REMOTE_STORE)
+ end
+end
diff --git a/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml b/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
new file mode 100644
index 00000000000..3f1499a3ffe
--- /dev/null
+++ b/changelogs/unreleased-ee/allow-to-store-artifacts-on-object-storage.yml
@@ -0,0 +1,4 @@
+---
+title: Allow to Store Artifacts on Object Storage
+merge_request:
+author:
diff --git a/changelogs/unreleased-ee/jej-lfs-object-storage.yml b/changelogs/unreleased-ee/jej-lfs-object-storage.yml
new file mode 100644
index 00000000000..c7a6388afb1
--- /dev/null
+++ b/changelogs/unreleased-ee/jej-lfs-object-storage.yml
@@ -0,0 +1,5 @@
+---
+title: LFS files can be stored in remote object storage such as S3
+merge_request: 2760
+author:
+type: added
diff --git a/changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml b/changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml
new file mode 100644
index 00000000000..70ffaa45bfd
--- /dev/null
+++ b/changelogs/unreleased-ee/zj-improve-object-store-rake-task.yml
@@ -0,0 +1,5 @@
+---
+title: Use a logger for the artifacts migration rake task
+merge_request:
+author:
+type: changed
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index 7547ba4a8fa..dfc69e358cb 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -145,12 +145,34 @@ production: &base
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
+ # object_store:
+ # enabled: false
+ # remote_directory: artifacts # The bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # connection:
+ # provider: AWS # Only AWS supported at the moment
+ # aws_access_key_id: AWS_ACCESS_KEY_ID
+ # aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ # region: eu-central-1
## Git LFS
lfs:
enabled: true
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # Bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ connection:
+ provider: AWS
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ # Use the following options to configure an AWS compatible host
+ # host: 'localhost' # default: s3.amazonaws.com
+ # endpoint: 'http://127.0.0.1:9000' # default: nil
+ # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
@@ -639,6 +661,28 @@ test:
enabled: true
lfs:
enabled: false
+ # The location where LFS objects are stored (default: shared/lfs-objects).
+ # storage_path: shared/lfs-objects
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ artifacts:
+ enabled: true
+ # The location where build artifacts are stored (default: shared/artifacts).
+ # path: shared/artifacts
+ object_store:
+ enabled: false
+ remote_directory: artifacts # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
gitlab:
host: localhost
port: 80
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index d1156b0c8a8..224ae5aa56b 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -301,6 +301,13 @@ Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
+Settings.artifacts['object_store'] ||= Settingslogic.new({})
+Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
+
#
# Registry
#
@@ -336,6 +343,13 @@ Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
+Settings.lfs['object_store'] ||= Settingslogic.new({})
+Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
+Settings.lfs['object_store']['remote_directory'] ||= nil
+Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.lfs['object_store']['connection']&.deep_stringify_keys!
+
#
# Mattermost
#
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index e2bb766ee47..41b78bad8cc 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -62,6 +62,7 @@
- [update_user_activity, 1]
- [propagate_service_template, 1]
- [background_migration, 1]
+ - [object_storage_upload, 1]
- [gcp_cluster, 1]
- [project_migrate_hashed_storage, 1]
- [storage_migrator, 1]
diff --git a/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
new file mode 100644
index 00000000000..deba890a478
--- /dev/null
+++ b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
@@ -0,0 +1,17 @@
+class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_column_with_default(:ci_builds, :artifacts_file_store, :integer, default: 1)
+ add_column_with_default(:ci_builds, :artifacts_metadata_store, :integer, default: 1)
+ end
+
+ def down
+ remove_column(:ci_builds, :artifacts_file_store)
+ remove_column(:ci_builds, :artifacts_metadata_store)
+ end
+end
diff --git a/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
new file mode 100644
index 00000000000..4d459ccab2c
--- /dev/null
+++ b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
@@ -0,0 +1,35 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddFileStoreToLfsObjects < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ # When a migration requires downtime you **must** uncomment the following
+ # constant and define a short and easy to understand explanation as to why the
+ # migration requires downtime.
+ # DOWNTIME_REASON = ''
+
+ # When using the methods "add_concurrent_index", "remove_concurrent_index" or
+ # "add_column_with_default" you must disable the use of transactions
+ # as these methods can not run in an existing transaction.
+ # When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
+ # that either of them is the _only_ method called in the migration,
+ # any other changes should go in a separate migration.
+ # This ensures that upon failure _only_ the index creation or removing fails
+ # and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ disable_ddl_transaction!
+
+ def up
+ add_column(:lfs_objects, :file_store, :integer)
+ end
+
+ def down
+ remove_column(:lfs_objects, :file_store)
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index c60cb729b75..68942944ae8 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -273,6 +273,8 @@ ActiveRecord::Schema.define(version: 20171106101200) do
t.integer "auto_canceled_by_id"
t.boolean "retried"
t.integer "stage_id"
+ t.integer "artifacts_file_store", default: 1, null: false
+ t.integer "artifacts_metadata_store", default: 1, null: false
t.boolean "protected"
t.integer "failure_reason"
end
@@ -917,6 +919,7 @@ ActiveRecord::Schema.define(version: 20171106101200) do
t.datetime "created_at"
t.datetime "updated_at"
t.string "file"
+ t.integer "file_store"
end
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index 86b436d89dd..59305774c4a 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -85,12 +85,41 @@ _The artifacts are stored by default in
1. Save the file and [restart GitLab][] for the changes to take effect.
-### Using object storage
+---
+
+**Using Object Store**
+
+The previously mentioned methods use the local disk to store artifacts. However,
+there is the option to use object stores like AWS' S3. To do this, set the
+`object_store` in your `gitlab.yml`. This relies on valid AWS
+credentials to be configured already.
-In [GitLab Enterprise Edition Premium][eep] you can use an object storage like
-AWS S3 to store the artifacts.
+ ```yaml
+ artifacts:
+ enabled: true
+ path: /mnt/storage/artifacts
+ object_store:
+ enabled: true
+ remote_directory: my-bucket-name
+ connection:
+ provider: AWS
+ aws_access_key_id: S3_KEY_ID
+ aws_secret_key_id: S3_SECRET_KEY_ID
+ region: eu-central-1
+ ```
+
+This will allow you to migrate existing artifacts to object store,
+but all new artifacts will still be stored on the local disk.
+In the future you will be given an option to define a default storage artifacts
+for all new files. Currently the artifacts migration has to be executed manually:
+
+ ```bash
+ gitlab-rake gitlab:artifacts:migrate
+ ```
-[Learn how to use the object storage option.][ee-os]
+Please note, that enabling this feature
+will have the effect that artifacts are _not_ browsable anymore through the web
+interface. This limitation will be removed in one of the upcoming releases.
## Expiring artifacts
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index 5f9b94cc89c..6e58022a265 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -378,7 +378,7 @@ module API
if artifacts_file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename)
else
- redirect_to(artifacts_file.url)
+ redirect(artifacts_file.url)
end
end
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 1f4bda6f588..d9436e1d5e5 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
- super('artifacts', ArtifactUploader.local_artifacts_store)
+ super('artifacts', ArtifactUploader.local_store_path)
end
def create_files_dir
diff --git a/lib/tasks/gitlab/artifacts.rake b/lib/tasks/gitlab/artifacts.rake
new file mode 100644
index 00000000000..29d8a145be8
--- /dev/null
+++ b/lib/tasks/gitlab/artifacts.rake
@@ -0,0 +1,25 @@
+require 'logger'
+require 'resolv-replace'
+
+desc "GitLab | Migrate files for artifacts to comply with new storage format"
+namespace :gitlab do
+ namespace :artifacts do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of artifacts')
+
+ Ci::Build.joins(:project)
+ .with_artifacts_stored_locally
+ .find_each(batch_size: 10) do |build|
+ begin
+ build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
+ build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
+
+ logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
+ rescue => e
+ logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/lfs.rake b/lib/tasks/gitlab/lfs.rake
new file mode 100644
index 00000000000..c17c05f8589
--- /dev/null
+++ b/lib/tasks/gitlab/lfs.rake
@@ -0,0 +1,22 @@
+require 'logger'
+
+desc "GitLab | Migrate LFS objects to remote storage"
+namespace :gitlab do
+ namespace :lfs do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of LFS files to object storage')
+
+ LfsObject.with_files_stored_locally
+ .find_each(batch_size: 10) do |lfs_object|
+ begin
+ lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+
+ logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
+ rescue => e
+ logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index d1051741430..bc3d277fc8e 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -22,7 +22,7 @@ describe Projects::ArtifactsController do
describe 'GET download' do
it 'sends the artifacts file' do
- expect(controller).to receive(:send_file).with(job.artifacts_file.path, disposition: 'attachment').and_call_original
+ expect(controller).to receive(:send_file).with(job.artifacts_file.path, hash_including(disposition: 'attachment')).and_call_original
get :download, namespace_id: project.namespace, project_id: project, job_id: job
end
@@ -114,19 +114,52 @@ describe Projects::ArtifactsController do
describe 'GET raw' do
context 'when the file exists' do
- it 'serves the file using workhorse' do
- get :raw, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
+ let(:path) { 'ci_artifacts.txt' }
+ let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
- send_data = response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
+ shared_examples 'a valid file' do
+ it 'serves the file using workhorse' do
+ subject
- expect(send_data).to start_with('artifacts-entry:')
+ expect(send_data).to start_with('artifacts-entry:')
- base64_params = send_data.sub(/\Aartifacts\-entry:/, '')
- params = JSON.parse(Base64.urlsafe_decode64(base64_params))
+ expect(params.keys).to eq(%w(Archive Entry))
+ expect(params['Archive']).to start_with(archive_path)
+ # On object storage, the URL can end with a query string
+ expect(params['Archive']).to match(/build_artifacts.zip(\?[^?]+)?$/)
+ expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt'))
+ end
+
+ def send_data
+ response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
+ end
- expect(params.keys).to eq(%w(Archive Entry))
- expect(params['Archive']).to end_with('build_artifacts.zip')
- expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt'))
+ def params
+ @params ||= begin
+ base64_params = send_data.sub(/\Aartifacts\-entry:/, '')
+ JSON.parse(Base64.urlsafe_decode64(base64_params))
+ end
+ end
+ end
+
+ context 'when using local file storage' do
+ it_behaves_like 'a valid file' do
+ let(:store) { ObjectStoreUploader::LOCAL_STORE }
+ let(:archive_path) { ArtifactUploader.local_store_path }
+ end
+ end
+
+ context 'when using remote file storage' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it_behaves_like 'a valid file' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+ let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
+ let(:store) { ObjectStorage::Store::REMOTE }
+ let(:archive_path) { 'https://' }
+ end
end
end
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index 3a0c3faa7b4..e4310a4847b 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -8,10 +8,7 @@ describe Projects::RawController do
let(:id) { 'master/README.md' }
it 'delivers ASCII file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -25,10 +22,7 @@ describe Projects::RawController do
let(:id) { 'master/files/images/6049019_460s.jpg' }
it 'sets image content type header' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('image/jpeg')
@@ -54,21 +48,40 @@ describe Projects::RawController do
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
end
+
+ context 'and lfs uses object storage' do
+ before do
+ lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.save!
+ stub_lfs_object_storage
+ lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ end
+
+ it 'responds with redirect to file' do
+ get_show(public_project, id)
+
+ expect(response).to have_gitlab_http_status(302)
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+
+ it 'sets content disposition' do
+ get_show(public_project, id)
+
+ file_uri = URI.parse(response.location)
+ params = CGI.parse(file_uri.query)
+
+ expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
+ end
+ end
end
context 'when project does not have access' do
it 'does not serve the file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(404)
end
@@ -81,10 +94,7 @@ describe Projects::RawController do
end
it 'delivers ASCII file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -95,4 +105,10 @@ describe Projects::RawController do
end
end
end
+
+ def get_show(project, id)
+ get(:show, namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: id)
+ end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index cf38066dedc..4c485461dc7 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -168,6 +168,11 @@ FactoryGirl.define do
end
end
+ trait :remote_store do
+ artifacts_file_store ArtifactUploader::REMOTE_STORE
+ artifacts_metadata_store ArtifactUploader::REMOTE_STORE
+ end
+
trait :artifacts_expired do
after(:create) do |build, _|
build.artifacts_file =
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 4e36af18aa7..4f97f2017ca 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -266,7 +266,9 @@ CommitStatus:
- target_url
- description
- artifacts_file
+- artifacts_file_store
- artifacts_metadata
+- artifacts_metadata_store
- erased_by_id
- erased_at
- artifacts_expire_at
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 5ed2e1ca99a..99a669464e0 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -162,6 +162,50 @@ describe Ci::Build do
end
end
+ describe '#browsable_artifacts?' do
+ subject { build.browsable_artifacts? }
+
+ context 'artifacts metadata does not exist' do
+ before do
+ build.update_attributes(artifacts_metadata: nil)
+ end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'artifacts metadata does exists' do
+ let(:build) { create(:ci_build, :artifacts) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#downloadable_single_artifacts_file?' do
+ let(:build) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+
+ subject { build.downloadable_single_artifacts_file? }
+
+ before do
+ expect_any_instance_of(Ci::Build).to receive(:artifacts_metadata?).and_call_original
+ end
+
+ context 'artifacts are stored locally' do
+ let(:store) { ObjectStoreUploader::LOCAL_STORE }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'artifacts are stored remotely' do
+ let(:store) { ObjectStoreUploader::REMOTE_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#artifacts_expired?' do
subject { build.artifacts_expired? }
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 1765907c1b4..8bb3d5ffb03 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -11,14 +11,18 @@ describe API::Jobs do
ref: project.default_branch)
end
- let!(:job) { create(:ci_build, pipeline: pipeline) }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
let(:user) { create(:user) }
let(:api_user) { user }
let(:reporter) { create(:project_member, :reporter, project: project).user }
let(:guest) { create(:project_member, :guest, project: project).user }
+ let(:cross_project_pipeline_enabled) { true }
+ let(:object_storage_enabled) { true }
before do
+ stub_licensed_features(cross_project_pipelines: cross_project_pipeline_enabled,
+ object_storage: object_storage_enabled)
project.add_developer(user)
end
@@ -26,6 +30,7 @@ describe API::Jobs do
let(:query) { Hash.new }
before do
+ job
get api("/projects/#{project.id}/jobs", api_user), query
end
@@ -89,6 +94,7 @@ describe API::Jobs do
let(:query) { Hash.new }
before do
+ job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
@@ -278,31 +284,41 @@ describe API::Jobs do
describe 'GET /projects/:id/jobs/:job_id/artifacts' do
before do
+ stub_artifacts_object_storage
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'job with artifacts' do
- let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'when artifacts are stored locally' do
+ let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
+
+ context 'authorized user' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ it 'returns specific job artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(job.artifacts_file.file.file)
+ end
end
- it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(job.artifacts_file.file.file)
+ context 'unauthorized user' do
+ let(:api_user) { nil }
+
+ it 'does not return specific job artifacts' do
+ expect(response).to have_http_status(401)
+ end
end
end
- context 'when anonymous user is accessing private artifacts' do
- let(:api_user) { nil }
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
- it 'hides artifacts and rejects request' do
- expect(project).to be_private
- expect(response).to have_gitlab_http_status(404)
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
end
end
end
@@ -317,6 +333,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
+ stub_artifacts_object_storage(licensed: :skip)
job.success
end
@@ -373,14 +390,24 @@ describe API::Jobs do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{job.artifacts_file.filename}" }
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{job.artifacts_file.filename}" }
+ end
+
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
end
- it { expect(response).to have_gitlab_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
+ end
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 47f4ccd4887..671b988ec91 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -190,7 +190,7 @@ describe API::Runner do
let(:project) { create(:project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) }
- let!(:job) do
+ let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end
@@ -205,6 +205,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
+ job
stub_container_registry_config(enabled: false)
end
@@ -863,6 +864,7 @@ describe API::Runner do
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do
+ stub_artifacts_object_storage
job.run!
end
@@ -1164,15 +1166,26 @@ describe API::Runner do
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
- end
context 'when using job token' do
- it 'download artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include download_headers
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include download_headers
+ end
+ end
+
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, :artifacts, :remote_store) }
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(302)
+ end
end
end
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 3f58b7ef384..3b7d99b84b0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -14,6 +14,7 @@ describe API::V3::Builds do
let(:query) { '' }
before do
+ build
create(:ci_build, :skipped, pipeline: pipeline)
get v3_api("/projects/#{project.id}/builds?#{query}", api_user)
@@ -87,6 +88,10 @@ describe API::V3::Builds do
end
describe 'GET /projects/:id/repository/commits/:sha/builds' do
+ before do
+ build
+ end
+
context 'when commit does not exist in repository' do
before do
get v3_api("/projects/#{project.id}/repository/commits/1a271fd1/builds", api_user)
@@ -187,22 +192,33 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
+ stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
context 'job with artifacts' do
- let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ context 'when artifacts are stored locally' do
+ let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
- context 'authorized user' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ context 'authorized user' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ it 'returns specific job artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include(download_headers)
+ expect(response.body).to match_file(build.artifacts_file.file.file)
+ end
end
+ end
- it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include(download_headers)
- expect(response.body).to match_file(build.artifacts_file.file.file)
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
end
end
@@ -225,6 +241,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
+ stub_artifacts_object_storage
build.success
end
@@ -280,14 +297,24 @@ describe API::V3::Builds do
context 'find proper job' do
shared_examples 'a valid file' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' =>
- "attachment; filename=#{build.artifacts_file.filename}" }
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' =>
+ "attachment; filename=#{build.artifacts_file.filename}" }
+ end
+
+ it { expect(response).to have_http_status(200) }
+ it { expect(response.headers).to include(download_headers) }
end
- it { expect(response).to have_gitlab_http_status(200) }
- it { expect(response.headers).to include(download_headers) }
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) }
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
+ end
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 52e93e157f1..e3dfecd8898 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do
describe 'when fetching lfs object' do
let(:project) { create(:project) }
let(:update_permissions) { }
+ let(:before_get) { }
before do
enable_lfs
update_permissions
+ before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end
@@ -239,6 +241,21 @@ describe 'Git LFS API and storage' do
end
it_behaves_like 'responds with a file'
+
+ context 'when LFS uses object storage' do
+ let(:before_get) do
+ stub_lfs_object_storage
+ lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
+ end
+
+ it 'responds with redirect' do
+ expect(response).to have_gitlab_http_status(302)
+ end
+
+ it 'responds with the file location' do
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+ end
end
end
@@ -973,6 +990,46 @@ describe 'Git LFS API and storage' do
end
end
+ context 'and workhorse requests upload finalize for a new lfs object' do
+ before do
+ allow_any_instance_of(LfsObjectUploader).to receive(:exists?) { false }
+ end
+
+ context 'with object storage disabled' do
+ it "doesn't attempt to migrate file to object storage" do
+ expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+ put_finalize(with_tempfile: true)
+ end
+ end
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'schedules migration of file to object storage' do
+ expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
+
+ put_finalize(with_tempfile: true)
+ end
+ end
+ end
+
+ context 'and project has limit enabled but will stay under the limit' do
+ before do
+ allow_any_instance_of(EE::Project).to receive_messages(
+ actual_size_limit: 200,
+ size_limit_enabled?: true)
+
+ put_finalize
+ end
+
+ it 'responds with status 200' do
+ expect(response).to have_gitlab_http_status(200)
+ end
+ end
+
context 'invalid tempfiles' do
it 'rejects slashes in the tempfile name (path traversal' do
put_finalize('foo/bar')
@@ -1172,7 +1229,9 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
- def put_finalize(lfs_tmp = lfs_tmp_file)
+ def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
+ setup_tempfile(lfs_tmp) if with_tempfile
+
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
end
@@ -1180,6 +1239,13 @@ describe 'Git LFS API and storage' do
def lfs_tmp_file
"#{sample_oid}012345678"
end
+
+ def setup_tempfile(lfs_tmp)
+ upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
+
+ FileUtils.mkdir_p(upload_path)
+ FileUtils.touch(File.join(upload_path, lfs_tmp))
+ end
end
def enable_lfs
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index b61d1cb765e..c1ba021fcba 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -23,7 +23,8 @@ describe Ci::RetryBuildService do
%i[type lock_version target_url base_tags trace_sections
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
- user_id auto_canceled_by_id retried failure_reason].freeze
+ user_id auto_canceled_by_id retried failure_reason
+ artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do
let(:stage) do
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
new file mode 100644
index 00000000000..df7e05585d2
--- /dev/null
+++ b/spec/support/stub_object_storage.rb
@@ -0,0 +1,32 @@
+module StubConfiguration
+ def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled: true, licensed: true)
+ Fog.mock!
+
+ allow(config).to receive(:enabled) { enabled }
+
+ stub_licensed_features(object_storage: licensed) unless licensed == :skip
+
+ return unless enabled
+
+ ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
+ begin
+ connection.directories.create(key: remote_directory)
+ rescue Excon::Error::Conflict
+ end
+ end
+ end
+
+ def stub_artifacts_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
+ uploader: ArtifactUploader,
+ remote_directory: 'artifacts',
+ **params)
+ end
+
+ def stub_lfs_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
+ uploader: LfsObjectUploader,
+ remote_directory: 'lfs-objects',
+ **params)
+ end
+end
diff --git a/spec/tasks/gitlab/lfs_rake_spec.rb b/spec/tasks/gitlab/lfs_rake_spec.rb
new file mode 100644
index 00000000000..faed24f2010
--- /dev/null
+++ b/spec/tasks/gitlab/lfs_rake_spec.rb
@@ -0,0 +1,37 @@
+require 'rake_helper'
+
+describe 'gitlab:lfs namespace rake task' do
+ before :all do
+ Rake.application.rake_require 'tasks/gitlab/lfs'
+ end
+
+ describe 'migrate' do
+ let(:local) { ObjectStoreUploader::LOCAL_STORE }
+ let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+ let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+
+ def lfs_migrate
+ run_rake_task('gitlab:lfs:migrate')
+ end
+
+ context 'object storage disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it "doesn't migrate files" do
+ expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+
+ context 'object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'migrates local file to object storage' do
+ expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+ end
+ end
+ end
+end
diff --git a/spec/uploaders/artifact_uploader_spec.rb b/spec/uploaders/artifact_uploader_spec.rb
index 2a3bd0e3bb2..88f394b2938 100644
--- a/spec/uploaders/artifact_uploader_spec.rb
+++ b/spec/uploaders/artifact_uploader_spec.rb
@@ -1,12 +1,13 @@
require 'rails_helper'
describe ArtifactUploader do
- let(:job) { create(:ci_build) }
+ let(:store) { described_class::LOCAL_STORE }
+ let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :artifacts_file) }
- let(:path) { Gitlab.config.artifacts.path }
+ let(:local_path) { Gitlab.config.artifacts.path }
- describe '.local_artifacts_store' do
- subject { described_class.local_artifacts_store }
+ describe '.local_store_path' do
+ subject { described_class.local_store_path }
it "delegate to artifacts path" do
expect(Gitlab.config.artifacts).to receive(:path)
@@ -17,16 +18,30 @@ describe ArtifactUploader do
describe '.artifacts_upload_path' do
subject { described_class.artifacts_upload_path }
-
- it { is_expected.to start_with(path) }
+
+ it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
end
describe '#store_dir' do
subject { uploader.store_dir }
- it { is_expected.to start_with(path) }
- it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
+ let(:path) { "#{job.created_at.utc.strftime('%Y_%m')}/#{job.project_id}/#{job.id}" }
+
+ context 'when using local storage' do
+ it { is_expected.to start_with(local_path) }
+ it { is_expected.to end_with(path) }
+ end
+
+ context 'when using remote storage' do
+ let(:store) { described_class::REMOTE_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it { is_expected.to eq(path) }
+ end
end
describe '#cache_dir' do
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 7088bc23334..1e09958d369 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe LfsObjectUploader do
let(:lfs_object) { create(:lfs_object, :with_file) }
- let(:uploader) { described_class.new(lfs_object) }
+ let(:uploader) { described_class.new(lfs_object, :file) }
let(:path) { Gitlab.config.lfs.storage_path }
describe '#move_to_cache' do
@@ -37,4 +37,73 @@ describe LfsObjectUploader do
it { is_expected.to start_with(path) }
it { is_expected.to end_with('/tmp/work') }
end
+
+ describe 'migration to object storage' do
+ context 'with object storage disabled' do
+ it "is skipped" do
+ expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+ lfs_object
+ end
+ end
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'is scheduled to run after creation' do
+ expect(ObjectStorageUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
+
+ lfs_object
+ end
+ end
+
+ context 'with object storage unlicenced' do
+ before do
+ stub_lfs_object_storage(licensed: false)
+ end
+
+ it 'is skipped' do
+ expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+ lfs_object
+ end
+ end
+ end
+
+ describe 'remote file' do
+ let(:remote) { described_class::REMOTE_STORE }
+ let(:lfs_object) { create(:lfs_object, file_store: remote) }
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'can store file remotely' do
+ allow(ObjectStorageUploadWorker).to receive(:perform_async)
+
+ store_file(lfs_object)
+
+ expect(lfs_object.file_store).to eq remote
+ expect(lfs_object.file.path).not_to be_blank
+ end
+ end
+
+ context 'with object storage unlicenced' do
+ before do
+ stub_lfs_object_storage(licensed: false)
+ end
+
+ it 'can not store file remotely' do
+ expect { store_file(lfs_object) }.to raise_error('Object Storage feature is missing')
+ end
+ end
+ end
+
+ def store_file(lfs_object)
+ lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.save!
+ end
end
diff --git a/spec/uploaders/object_store_uploader_spec.rb b/spec/uploaders/object_store_uploader_spec.rb
new file mode 100644
index 00000000000..c5554502980
--- /dev/null
+++ b/spec/uploaders/object_store_uploader_spec.rb
@@ -0,0 +1,228 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+describe ObjectStoreUploader do
+ let(:uploader_class) { Class.new(described_class) }
+ let(:object) { double }
+ let(:uploader) { uploader_class.new(object, :artifacts_file) }
+
+ describe '#object_store' do
+ it "calls artifacts_file_store on object" do
+ expect(object).to receive(:artifacts_file_store)
+
+ uploader.object_store
+ end
+ end
+
+ describe '#object_store=' do
+ it "calls artifacts_file_store= on object" do
+ expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE)
+
+ uploader.object_store = described_class::REMOTE_STORE
+ end
+ end
+
+ context 'when using ArtifactsUploader' do
+ let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+ let(:uploader) { job.artifacts_file }
+
+ context 'checking described_class' do
+ let(:store) { described_class::LOCAL_STORE }
+
+ it "uploader is of a described_class" do
+ expect(uploader).to be_a(described_class)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ let(:store) { described_class::LOCAL_STORE }
+
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(/tmp\/cache/)
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::REMOTE_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(/tmp\/cache/)
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) }
+ let(:uploader) { job.artifacts_file }
+ let(:store) { described_class::LOCAL_STORE }
+
+ subject { uploader.migrate!(new_store) }
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::REMOTE_STORE }
+ let(:new_store) { described_class::LOCAL_STORE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::REMOTE_STORE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(job).to receive(:save!).and_raise(RuntimeError, "exception")
+ end
+
+ it "does catch an error" do
+ expect { subject }.to raise_error(/exception/)
+ end
+
+ it "original file is not removed" do
+ begin
+ subject
+ rescue
+ end
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ uploader_class.storage_options double(
+ object_store: double(remote_directory: remote_directory))
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { 'connection' }
+
+ before do
+ uploader_class.storage_options double(
+ object_store: double(connection: connection))
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(connection) }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe '#verify_license!' do
+ subject { uploader.verify_license!(nil) }
+
+ context 'when using local storage' do
+ before do
+ expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when using remote storage' do
+ before do
+ uploader_class.storage_options double(
+ object_store: double(enabled: true))
+ expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
+ end
+
+ context 'feature is not available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage) { false }
+ end
+
+ it "does raise an error" do
+ expect { subject }.to raise_error(/Object Storage feature is missing/)
+ end
+ end
+
+ context 'feature is available' do
+ before do
+ expect(License).to receive(:feature_available?).with(:object_storage) { true }
+ end
+
+ it "does not raise an error" do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb
new file mode 100644
index 00000000000..8a8f7a065a0
--- /dev/null
+++ b/spec/workers/object_storage_upload_worker_spec.rb
@@ -0,0 +1,85 @@
+require 'spec_helper'
+
+describe ObjectStorageUploadWorker do
+ let(:local) { ObjectStoreUploader::LOCAL_STORE }
+ let(:remote) { ObjectStoreUploader::REMOTE_STORE }
+
+ def perform
+ described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
+ end
+
+ context 'for LFS' do
+ let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+ let(:uploader_class) { LfsObjectUploader }
+ let(:subject_class) { LfsObject }
+ let(:file_field) { :file }
+ let(:subject_id) { lfs_object.id }
+
+ context 'when object storage is enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'uploads object to storage' do
+ expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
+ end
+
+ it 'is skipped' do
+ expect { perform }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it "doesn't migrate files" do
+ perform
+
+ expect(lfs_object.reload.file_store).to eq(local)
+ end
+ end
+ end
+
+ context 'for artifacts' do
+ let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
+ let(:uploader_class) { ArtifactUploader }
+ let(:subject_class) { Ci::Build }
+ let(:file_field) { :artifacts_file }
+ let(:subject_id) { job.id }
+
+ context 'when local storage is used' do
+ let(:store) { local }
+
+ context 'and remote storage is defined' do
+ before do
+ stub_artifacts_object_storage
+ job
+ end
+
+ it "migrates file to remote storage" do
+ perform
+
+ expect(job.reload.artifacts_file_store).to eq(remote)
+ end
+
+ context 'for artifacts_metadata' do
+ let(:file_field) { :artifacts_metadata }
+
+ it 'migrates metadata to remote storage' do
+ perform
+
+ expect(job.reload.artifacts_metadata_store).to eq(remote)
+ end
+ end
+ end
+ end
+ end
+end