summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSean McGivern <sean@mcgivern.me.uk>2018-03-27 08:20:03 +0000
committerSean McGivern <sean@mcgivern.me.uk>2018-03-27 08:20:03 +0000
commit835fe4d324b5a89c09f06d74d5ddc73a2c41a38b (patch)
tree471a633221bf9d44bf4af94ff3137a94a2bf1407
parentab8f13c3ef6e07eb8d44805dc9eef4b008e1bbe9 (diff)
parent6d63a098f9a40f87ad922fa4c0ddcfb754e60023 (diff)
downloadgitlab-ce-835fe4d324b5a89c09f06d74d5ddc73a2c41a38b.tar.gz
Merge branch '40781-os-to-ce' into 'master'
Bring Object Storage to CE Closes #4171, #4163, #3370, #2841, and #29203 See merge request gitlab-org/gitlab-ce!17358
-rw-r--r--app/controllers/concerns/send_file_upload.rb17
-rw-r--r--app/controllers/concerns/uploads_actions.rb30
-rw-r--r--app/controllers/projects/artifacts_controller.rb12
-rw-r--r--app/controllers/projects/jobs_controller.rb32
-rw-r--r--app/controllers/projects/lfs_storage_controller.rb8
-rw-r--r--app/controllers/projects/raw_controller.rb3
-rw-r--r--app/models/appearance.rb2
-rw-r--r--app/models/ci/build.rb18
-rw-r--r--app/models/ci/job_artifact.rb10
-rw-r--r--app/models/concerns/avatarable.rb1
-rw-r--r--app/models/lfs_object.rb9
-rw-r--r--app/models/upload.rb19
-rw-r--r--app/services/projects/update_pages_service.rb18
-rw-r--r--app/uploaders/attachment_uploader.rb6
-rw-r--r--app/uploaders/avatar_uploader.rb8
-rw-r--r--app/uploaders/file_mover.rb9
-rw-r--r--app/uploaders/file_uploader.rb56
-rw-r--r--app/uploaders/gitlab_uploader.rb10
-rw-r--r--app/uploaders/job_artifact_uploader.rb9
-rw-r--r--app/uploaders/legacy_artifact_uploader.rb1
-rw-r--r--app/uploaders/lfs_object_uploader.rb6
-rw-r--r--app/uploaders/namespace_file_uploader.rb11
-rw-r--r--app/uploaders/object_storage.rb335
-rw-r--r--app/uploaders/personal_file_uploader.rb17
-rw-r--r--app/uploaders/records_uploads.rb3
-rw-r--r--app/views/projects/jobs/_sidebar.html.haml2
-rw-r--r--app/workers/all_queues.yml4
-rw-r--r--app/workers/concerns/object_storage_queue.rb8
-rw-r--r--app/workers/object_storage/background_move_worker.rb29
-rw-r--r--app/workers/object_storage/migrate_uploads_worker.rb202
-rw-r--r--app/workers/object_storage_upload_worker.rb21
-rw-r--r--changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml6
-rw-r--r--changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml5
-rw-r--r--changelogs/unreleased-ee/bvl-external-policy-classification.yml5
-rw-r--r--changelogs/unreleased/40781-os-to-ce.yml5
-rw-r--r--changelogs/unreleased/poc-upload-hashing-path.yml5
-rw-r--r--config/gitlab.yml.example66
-rw-r--r--config/initializers/1_settings.rb21
-rw-r--r--config/initializers/fog_google_https_private_urls.rb20
-rw-r--r--config/sidekiq_queues.yml2
-rw-r--r--db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb10
-rw-r--r--db/migrate/20170825015534_add_file_store_to_lfs_objects.rb31
-rw-r--r--db/migrate/20170918072949_add_file_store_job_artifacts.rb10
-rw-r--r--db/migrate/20171214144320_add_store_column_to_uploads.rb12
-rw-r--r--db/schema.rb5
-rw-r--r--doc/administration/job_artifacts.md122
-rw-r--r--doc/administration/raketasks/uploads/migrate.md74
-rw-r--r--doc/administration/uploads.md209
-rw-r--r--doc/raketasks/README.md1
-rw-r--r--doc/workflow/lfs/lfs_administration.md115
-rw-r--r--lib/api/helpers.rb16
-rw-r--r--lib/api/job_artifacts.rb4
-rw-r--r--lib/api/jobs.rb2
-rw-r--r--lib/api/project_export.rb2
-rw-r--r--lib/api/runner.rb3
-rw-r--r--lib/api/v3/builds.rb6
-rw-r--r--lib/gitlab/ci/trace/http_io.rb187
-rw-r--r--lib/gitlab/ci/trace/stream.rb2
-rw-r--r--lib/gitlab/verify/lfs_objects.rb2
-rw-r--r--lib/gitlab/verify/uploads.rb2
-rw-r--r--lib/tasks/gitlab/artifacts/migrate.rake25
-rw-r--r--lib/tasks/gitlab/lfs/migrate.rake22
-rw-r--r--lib/tasks/gitlab/uploads/migrate.rake33
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb89
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb14
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb63
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb56
-rw-r--r--spec/factories/appearances.rb17
-rw-r--r--spec/factories/ci/job_artifacts.rb4
-rw-r--r--spec/factories/lfs_objects.rb4
-rw-r--r--spec/factories/uploads.rb5
-rw-r--r--spec/initializers/fog_google_https_private_urls_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/trace/http_io_spec.rb315
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/verify/lfs_objects_spec.rb16
-rw-r--r--spec/lib/gitlab/verify/uploads_spec.rb16
-rw-r--r--spec/migrations/remove_empty_fork_networks_spec.rb4
-rw-r--r--spec/models/ci/build_spec.rb30
-rw-r--r--spec/models/ci/job_artifact_spec.rb44
-rw-r--r--spec/models/lfs_object_spec.rb85
-rw-r--r--spec/requests/api/jobs_spec.rb84
-rw-r--r--spec/requests/api/runner_spec.rb70
-rw-r--r--spec/requests/api/v3/builds_spec.rb32
-rw-r--r--spec/requests/lfs_http_spec.rb54
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb1
-rw-r--r--spec/services/ci/retry_build_service_spec.rb3
-rw-r--r--spec/services/issues/move_service_spec.rb2
-rw-r--r--spec/support/http_io/http_io_helpers.rb64
-rw-r--r--spec/support/shared_examples/uploaders/object_storage_shared_examples.rb126
-rw-r--r--spec/support/stub_object_storage.rb43
-rw-r--r--spec/tasks/gitlab/artifacts/migrate_rake_spec.rb118
-rw-r--r--spec/tasks/gitlab/lfs/migrate_rake_spec.rb37
-rw-r--r--spec/tasks/gitlab/uploads/migrate_rake_spec.rb28
-rw-r--r--spec/uploaders/attachment_uploader_spec.rb22
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb27
-rw-r--r--spec/uploaders/file_mover_spec.rb6
-rw-r--r--spec/uploaders/file_uploader_spec.rb61
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb35
-rw-r--r--spec/uploaders/legacy_artifact_uploader_spec.rb14
-rw-r--r--spec/uploaders/lfs_object_uploader_spec.rb58
-rw-r--r--spec/uploaders/namespace_file_uploader_spec.rb22
-rw-r--r--spec/uploaders/object_storage_spec.rb326
-rw-r--r--spec/uploaders/personal_file_uploader_spec.rb22
-rw-r--r--spec/workers/object_storage_upload_worker_spec.rb108
104 files changed, 3867 insertions, 230 deletions
diff --git a/app/controllers/concerns/send_file_upload.rb b/app/controllers/concerns/send_file_upload.rb
new file mode 100644
index 00000000000..55011c89886
--- /dev/null
+++ b/app/controllers/concerns/send_file_upload.rb
@@ -0,0 +1,17 @@
+module SendFileUpload
+ def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil, disposition: 'attachment')
+ if attachment
+ redirect_params[:query] = { "response-content-disposition" => "#{disposition};filename=#{attachment.inspect}" }
+ send_params.merge!(filename: attachment, disposition: disposition)
+ end
+
+ if file_upload.file_storage?
+ send_file file_upload.path, send_params
+ elsif file_upload.class.proxy_download_enabled?
+ headers.store(*Gitlab::Workhorse.send_url(file_upload.url(**redirect_params)))
+ head :ok
+ else
+ redirect_to file_upload.url(**redirect_params)
+ end
+ end
+end
diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index 3dbfabcae8a..b9b9b6e4e88 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -1,5 +1,6 @@
module UploadsActions
include Gitlab::Utils::StrongMemoize
+ include SendFileUpload
UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
@@ -26,14 +27,11 @@ module UploadsActions
def show
return render_404 unless uploader&.exists?
- if uploader.file_storage?
- disposition = uploader.image_or_video? ? 'inline' : 'attachment'
- expires_in 0.seconds, must_revalidate: true, private: true
+ expires_in 0.seconds, must_revalidate: true, private: true
- send_file uploader.file.path, disposition: disposition
- else
- redirect_to uploader.url
- end
+ disposition = uploader.image_or_video? ? 'inline' : 'attachment'
+
+ send_upload(uploader, attachment: uploader.filename, disposition: disposition)
end
private
@@ -62,19 +60,27 @@ module UploadsActions
end
def build_uploader_from_upload
- return nil unless params[:secret] && params[:filename]
+ return unless uploader = build_uploader
- upload_path = uploader_class.upload_path(params[:secret], params[:filename])
- upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
+ upload_paths = uploader.upload_paths(params[:filename])
+ upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_paths)
upload&.build_uploader
end
def build_uploader_from_params
+ return unless uploader = build_uploader
+
+ uploader.retrieve_from_store!(params[:filename])
+ uploader
+ end
+
+ def build_uploader
+ return unless params[:secret] && params[:filename]
+
uploader = uploader_class.new(model, secret: params[:secret])
- return nil unless uploader.model_valid?
+ return unless uploader.model_valid?
- uploader.retrieve_from_store!(params[:filename])
uploader
end
diff --git a/app/controllers/projects/artifacts_controller.rb b/app/controllers/projects/artifacts_controller.rb
index 0837451cc49..abc283d7aa9 100644
--- a/app/controllers/projects/artifacts_controller.rb
+++ b/app/controllers/projects/artifacts_controller.rb
@@ -1,6 +1,7 @@
class Projects::ArtifactsController < Projects::ApplicationController
include ExtractsPath
include RendersBlob
+ include SendFileUpload
layout 'project'
before_action :authorize_read_build!
@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
before_action :entry, only: [:file]
def download
- if artifacts_file.file_storage?
- send_file artifacts_file.path, disposition: 'attachment'
- else
- redirect_to artifacts_file.url
- end
+ send_upload(artifacts_file, attachment: artifacts_file.filename)
end
def browse
@@ -45,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def raw
- path = Gitlab::Ci::Build::Artifacts::Path
- .new(params[:path])
+ path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
send_artifacts_entry(build, path)
end
@@ -75,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def validate_artifacts!
- render_404 unless build && build.artifacts?
+ render_404 unless build&.artifacts?
end
def build
diff --git a/app/controllers/projects/jobs_controller.rb b/app/controllers/projects/jobs_controller.rb
index 8b54ba3ad7c..85e972d9731 100644
--- a/app/controllers/projects/jobs_controller.rb
+++ b/app/controllers/projects/jobs_controller.rb
@@ -1,4 +1,6 @@
class Projects::JobsController < Projects::ApplicationController
+ include SendFileUpload
+
before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!,
@@ -117,11 +119,17 @@ class Projects::JobsController < Projects::ApplicationController
end
def raw
- build.trace.read do |stream|
- if stream.file?
- send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
- else
- render_404
+ if trace_artifact_file
+ send_upload(trace_artifact_file,
+ send_params: raw_send_params,
+ redirect_params: raw_redirect_params)
+ else
+ build.trace.read do |stream|
+ if stream.file?
+ send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
+ else
+ render_404
+ end
end
end
end
@@ -136,9 +144,21 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :erase_build, build)
end
+ def raw_send_params
+ { type: 'text/plain; charset=utf-8', disposition: 'inline' }
+ end
+
+ def raw_redirect_params
+ { query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
+ end
+
+ def trace_artifact_file
+ @trace_artifact_file ||= build.job_artifacts_trace&.file
+ end
+
def build
@build ||= project.builds.find(params[:id])
- .present(current_user: current_user)
+ .present(current_user: current_user)
end
def build_path(build)
diff --git a/app/controllers/projects/lfs_storage_controller.rb b/app/controllers/projects/lfs_storage_controller.rb
index 941638db427..6b16f1ccbbb 100644
--- a/app/controllers/projects/lfs_storage_controller.rb
+++ b/app/controllers/projects/lfs_storage_controller.rb
@@ -1,6 +1,7 @@
class Projects::LfsStorageController < Projects::GitHttpClientController
include LfsRequest
include WorkhorseRequest
+ include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
@@ -11,7 +12,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
return
end
- send_file lfs_object.file.path, content_type: "application/octet-stream"
+ send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
end
def upload_authorize
@@ -70,10 +71,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end
def move_tmp_file_to_storage(object, path)
- File.open(path) do |f|
- object.file = f
- end
-
+ object.file = File.open(path)
object.file.store!
object.save
end
diff --git a/app/controllers/projects/raw_controller.rb b/app/controllers/projects/raw_controller.rb
index a02cc477e08..9bc774b7636 100644
--- a/app/controllers/projects/raw_controller.rb
+++ b/app/controllers/projects/raw_controller.rb
@@ -2,6 +2,7 @@
class Projects::RawController < Projects::ApplicationController
include ExtractsPath
include BlobHelper
+ include SendFileUpload
before_action :require_non_empty_project
before_action :assign_ref_vars
@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController
lfs_object = find_lfs_object
if lfs_object && lfs_object.project_allowed_access?(@project)
- send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment'
+ send_upload(lfs_object.file, attachment: @blob.name)
else
render_404
end
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index dcd14c08f3c..2a6406d63c7 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -1,5 +1,7 @@
class Appearance < ActiveRecord::Base
include CacheMarkdownField
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 1e066b69c6e..08bb5915d10 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -3,6 +3,7 @@ module Ci
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
include Presentable
include Importable
@@ -45,6 +46,7 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
+ scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
@@ -365,13 +367,19 @@ module Ci
project.running_or_pending_build_count(force: true)
end
+ def browsable_artifacts?
+ artifacts_metadata?
+ end
+
def artifacts_metadata_entry(path, **options)
- metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
- artifacts_metadata.path,
- path,
- **options)
+ artifacts_metadata.use_file do |metadata_path|
+ metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+ metadata_path,
+ path,
+ **options)
- metadata.to_entry
+ metadata.to_entry
+ end
end
def erase_artifacts!
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 0a599f72bc7..df57b4f65e3 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -1,5 +1,7 @@
module Ci
class JobArtifact < ActiveRecord::Base
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model
belongs_to :project
@@ -7,9 +9,11 @@ module Ci
before_save :set_size, if: :file_changed?
+ scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
+
mount_uploader :file, JobArtifactUploader
- delegate :open, :exists?, to: :file
+ delegate :exists?, :open, to: :file
enum file_type: {
archive: 1,
@@ -21,6 +25,10 @@ module Ci
self.where(project: project).sum(:size)
end
+ def local_store?
+ [nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
+ end
+
def set_size
self.size = file.size
end
diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb
index 318df11727e..7677891b9ce 100644
--- a/app/models/concerns/avatarable.rb
+++ b/app/models/concerns/avatarable.rb
@@ -3,6 +3,7 @@ module Avatarable
included do
prepend ShadowMethods
+ include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index b444812a4cf..64e88d5a6a2 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -1,7 +1,12 @@
class LfsObject < ActiveRecord::Base
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
+
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects
+ scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
+
validates :oid, presence: true, uniqueness: true
mount_uploader :file, LfsObjectUploader
@@ -10,6 +15,10 @@ class LfsObject < ActiveRecord::Base
projects.exists?(project.lfs_storage_project.id)
end
+ def local_store?
+ [nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
+ end
+
def self.destroy_unreferenced
joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id")
.where(lfs_objects_projects: { id: nil })
diff --git a/app/models/upload.rb b/app/models/upload.rb
index 99ad37dc892..cf71a7b76fc 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -9,6 +9,8 @@ class Upload < ActiveRecord::Base
validates :model, presence: true
validates :uploader, presence: true
+ scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
+
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
@@ -21,6 +23,7 @@ class Upload < ActiveRecord::Base
end
def absolute_path
+ raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
@@ -30,11 +33,11 @@ class Upload < ActiveRecord::Base
self.checksum = nil
return unless checksummable?
- self.checksum = self.class.hexdigest(absolute_path)
+ self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end
- def build_uploader
- uploader_class.new(model, mount_point, **uploader_context).tap do |uploader|
+ def build_uploader(mounted_as = nil)
+ uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
@@ -51,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact
end
+ def local?
+ return true if store.nil?
+
+ store == ObjectStorage::Store::LOCAL
+ end
+
private
def delete_file!
@@ -61,10 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist?
end
- def local?
- true
- end
-
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index 00fdd047208..5bf8208e035 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -81,11 +81,13 @@ module Projects
end
def extract_tar_archive!(temp_path)
- results = Open3.pipeline(%W(gunzip -c #{artifacts}),
- %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
- %W(tar -x -C #{temp_path} #{SITE_PATH}),
- err: '/dev/null')
- raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
+ build.artifacts_file.use_file do |artifacts_path|
+ results = Open3.pipeline(%W(gunzip -c #{artifacts_path}),
+ %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
+ %W(tar -x -C #{temp_path} #{SITE_PATH}),
+ err: '/dev/null')
+ raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?)
+ end
end
def extract_zip_archive!(temp_path)
@@ -103,8 +105,10 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
- unless system(*%W(unzip -qq -n #{artifacts} #{site_path} -d #{temp_path}))
- raise FailedToExtractError, 'pages failed to extract'
+ build.artifacts_file.use_file do |artifacts_path|
+ unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
+ raise FailedToExtractError, 'pages failed to extract'
+ end
end
end
diff --git a/app/uploaders/attachment_uploader.rb b/app/uploaders/attachment_uploader.rb
index 4930fb2fca7..cd819dc9bff 100644
--- a/app/uploaders/attachment_uploader.rb
+++ b/app/uploaders/attachment_uploader.rb
@@ -1,8 +1,8 @@
class AttachmentUploader < GitlabUploader
- include UploaderHelper
include RecordsUploads::Concern
-
- storage :file
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
+ include UploaderHelper
private
diff --git a/app/uploaders/avatar_uploader.rb b/app/uploaders/avatar_uploader.rb
index 5c8e1cea62e..5848e6c6994 100644
--- a/app/uploaders/avatar_uploader.rb
+++ b/app/uploaders/avatar_uploader.rb
@@ -1,18 +1,18 @@
class AvatarUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
-
- storage :file
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
def exists?
model.avatar.file && model.avatar.file.present?
end
- def move_to_cache
+ def move_to_store
false
end
- def move_to_store
+ def move_to_cache
false
end
diff --git a/app/uploaders/file_mover.rb b/app/uploaders/file_mover.rb
index 8f56f09c9f7..bd7736ad74e 100644
--- a/app/uploaders/file_mover.rb
+++ b/app/uploaders/file_mover.rb
@@ -10,7 +10,11 @@ class FileMover
def execute
move
- uploader.record_upload if update_markdown
+
+ if update_markdown
+ uploader.record_upload
+ uploader.schedule_background_upload
+ end
end
private
@@ -24,11 +28,8 @@ class FileMover
updated_text = model.read_attribute(update_field)
.gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text)
-
- true
rescue
revert
-
false
end
diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb
index bde1161dfa8..133fdf6684d 100644
--- a/app/uploaders/file_uploader.rb
+++ b/app/uploaders/file_uploader.rb
@@ -9,14 +9,18 @@
class FileUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
+ include ObjectStorage::Concern
+ prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
- storage :file
-
after :remove, :prune_store_dir
+ # FileUploader do not run in a model transaction, so we can simply
+ # enqueue a job after the :store hook.
+ after :store, :schedule_background_upload
+
def self.root
File.join(options.storage_path, 'uploads')
end
@@ -28,8 +32,11 @@ class FileUploader < GitlabUploader
)
end
- def self.base_dir(model)
- model_path_segment(model)
+ def self.base_dir(model, store = Store::LOCAL)
+ decorated_model = model
+ decorated_model = Storage::HashedProject.new(model) if store == Store::REMOTE
+
+ model_path_segment(decorated_model)
end
# used in migrations and import/exports
@@ -47,21 +54,24 @@ class FileUploader < GitlabUploader
#
# Returns a String without a trailing slash
def self.model_path_segment(model)
- if model.hashed_storage?(:attachments)
- model.disk_path
+ case model
+ when Storage::HashedProject then model.disk_path
else
- model.full_path
+ model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
end
end
- def self.upload_path(secret, identifier)
- File.join(secret, identifier)
- end
-
def self.generate_secret
SecureRandom.hex
end
+ def upload_paths(filename)
+ [
+ File.join(secret, filename),
+ File.join(base_dir(Store::REMOTE), secret, filename)
+ ]
+ end
+
attr_accessor :model
def initialize(model, mounted_as = nil, **uploader_context)
@@ -71,8 +81,10 @@ class FileUploader < GitlabUploader
apply_context!(uploader_context)
end
- def base_dir
- self.class.base_dir(@model)
+ # enforce the usage of Hashed storage when storing to
+ # remote store as the FileMover doesn't support OS
+ def base_dir(store = nil)
+ self.class.base_dir(@model, store || object_store)
end
# we don't need to know the actual path, an uploader instance should be
@@ -82,15 +94,19 @@ class FileUploader < GitlabUploader
end
def upload_path
- self.class.upload_path(dynamic_segment, identifier)
- end
-
- def model_path_segment
- self.class.model_path_segment(@model)
+ if file_storage?
+ # Legacy path relative to project.full_path
+ File.join(dynamic_segment, identifier)
+ else
+ File.join(store_dir, identifier)
+ end
end
- def store_dir
- File.join(base_dir, dynamic_segment)
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
+ }
end
def markdown_link
diff --git a/app/uploaders/gitlab_uploader.rb b/app/uploaders/gitlab_uploader.rb
index 010100f2da1..f12f0466a1d 100644
--- a/app/uploaders/gitlab_uploader.rb
+++ b/app/uploaders/gitlab_uploader.rb
@@ -37,12 +37,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
cache_storage.is_a?(CarrierWave::Storage::File)
end
- # Reduce disk IO
def move_to_cache
file_storage?
end
- # Reduce disk IO
def move_to_store
file_storage?
end
@@ -51,10 +49,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
file.present?
end
- def store_dir
- File.join(base_dir, dynamic_segment)
- end
-
def cache_dir
File.join(root, base_dir, 'tmp/cache')
end
@@ -76,6 +70,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
+ #
+ # For example, `FileUploader` builds the storage path based on the associated
+ # project model's `path_with_namespace` value, which can change when the
+ # project or its containing namespace is moved or renamed.
def dynamic_segment
raise(NotImplementedError)
end
diff --git a/app/uploaders/job_artifact_uploader.rb b/app/uploaders/job_artifact_uploader.rb
index ad5385f45a4..ef0f8acefd6 100644
--- a/app/uploaders/job_artifact_uploader.rb
+++ b/app/uploaders/job_artifact_uploader.rb
@@ -1,5 +1,6 @@
class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
+ include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
@@ -14,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
end
def open
- raise 'Only File System is supported' unless file_storage?
-
- File.open(path, "rb") if path
+ if file_storage?
+ File.open(path, "rb") if path
+ else
+ ::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
+ end
end
private
diff --git a/app/uploaders/legacy_artifact_uploader.rb b/app/uploaders/legacy_artifact_uploader.rb
index 28c458d3ff1..b726b053493 100644
--- a/app/uploaders/legacy_artifact_uploader.rb
+++ b/app/uploaders/legacy_artifact_uploader.rb
@@ -1,5 +1,6 @@
class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
+ include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
diff --git a/app/uploaders/lfs_object_uploader.rb b/app/uploaders/lfs_object_uploader.rb
index e04c97ce179..eb521a22ebc 100644
--- a/app/uploaders/lfs_object_uploader.rb
+++ b/app/uploaders/lfs_object_uploader.rb
@@ -1,10 +1,6 @@
class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath
-
- # LfsObject are in `tmp/upload` instead of `tmp/uploads`
- def self.workhorse_upload_path
- File.join(root, 'tmp/upload')
- end
+ include ObjectStorage::Concern
storage_options Gitlab.config.lfs
diff --git a/app/uploaders/namespace_file_uploader.rb b/app/uploaders/namespace_file_uploader.rb
index 993e85fbc13..1085ecb1700 100644
--- a/app/uploaders/namespace_file_uploader.rb
+++ b/app/uploaders/namespace_file_uploader.rb
@@ -4,7 +4,7 @@ class NamespaceFileUploader < FileUploader
options.storage_path
end
- def self.base_dir(model)
+ def self.base_dir(model, _store = nil)
File.join(options.base_dir, 'namespace', model_path_segment(model))
end
@@ -14,6 +14,13 @@ class NamespaceFileUploader < FileUploader
# Re-Override
def store_dir
- File.join(base_dir, dynamic_segment)
+ store_dirs[object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join('namespace', self.class.model_path_segment(model), dynamic_segment)
+ }
end
end
diff --git a/app/uploaders/object_storage.rb b/app/uploaders/object_storage.rb
new file mode 100644
index 00000000000..7218cb0a0fc
--- /dev/null
+++ b/app/uploaders/object_storage.rb
@@ -0,0 +1,335 @@
+require 'fog/aws'
+require 'carrierwave/storage/fog'
+
+#
+# This concern should add object storage support
+# to the GitlabUploader class
+#
+module ObjectStorage
+ RemoteStoreError = Class.new(StandardError)
+ UnknownStoreError = Class.new(StandardError)
+ ObjectStorageUnavailable = Class.new(StandardError)
+
+ module Store
+ LOCAL = 1
+ REMOTE = 2
+ end
+
+ module Extension
+ # this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
+ module RecordsUploads
+ extend ActiveSupport::Concern
+
+ def prepended(base)
+ raise "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
+
+ base.include(RecordsUploads::Concern)
+ end
+
+ def retrieve_from_store!(identifier)
+ paths = store_dirs.map { |store, path| File.join(path, identifier) }
+
+ unless current_upload_satisfies?(paths, model)
+ # the upload we already have isn't right, find the correct one
+ self.upload = uploads.find_by(model: model, path: paths)
+ end
+
+ super
+ end
+
+ def build_upload
+ super.tap do |upload|
+ upload.store = object_store
+ end
+ end
+
+ def upload=(upload)
+ return unless upload
+
+ self.object_store = upload.store
+ super
+ end
+
+ def schedule_background_upload(*args)
+ return unless schedule_background_upload?
+ return unless upload
+
+ ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
+ upload.class.to_s,
+ mounted_as,
+ upload.id)
+ end
+
+ private
+
+ def current_upload_satisfies?(paths, model)
+ return false unless upload
+ return false unless model
+
+ paths.include?(upload.path) &&
+ upload.model_id == model.id &&
+ upload.model_type == model.class.base_class.sti_name
+ end
+ end
+ end
+
+ # Add support for automatic background uploading after the file is stored.
+ #
+ module BackgroundMove
+ extend ActiveSupport::Concern
+
+ def background_upload(mount_points = [])
+ return unless mount_points.any?
+
+ run_after_commit do
+ mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+
+ def changed_mounts
+ self.class.uploaders.select do |mount, uploader_class|
+ mounted_as = uploader_class.serialization_column(self.class, mount)
+ uploader = send(:"#{mounted_as}") # rubocop:disable GitlabSecurity/PublicSend
+
+ next unless uploader
+ next unless uploader.exists?
+ next unless send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
+
+ mount
+ end.keys
+ end
+
+ included do
+ after_save on: [:create, :update] do
+ background_upload(changed_mounts)
+ end
+ end
+ end
+
+ module Concern
+ extend ActiveSupport::Concern
+
+ included do |base|
+ base.include(ObjectStorage)
+
+ after :migrate, :delete_migrated_file
+ end
+
+ class_methods do
+ def object_store_options
+ options.object_store
+ end
+
+ def object_store_enabled?
+ object_store_options.enabled
+ end
+
+ def background_upload_enabled?
+ object_store_options.background_upload
+ end
+
+ def proxy_download_enabled?
+ object_store_options.proxy_download
+ end
+
+ def direct_download_enabled?
+ !proxy_download_enabled?
+ end
+
+ def object_store_credentials
+ object_store_options.connection.to_hash.deep_symbolize_keys
+ end
+
+ def remote_store_path
+ object_store_options.remote_directory
+ end
+
+ def serialization_column(model_class, mount_point)
+ model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
+ end
+ end
+
+ def file_storage?
+ storage.is_a?(CarrierWave::Storage::File)
+ end
+
+ def file_cache_storage?
+ cache_storage.is_a?(CarrierWave::Storage::File)
+ end
+
+ def object_store
+ @object_store ||= model.try(store_serialization_column) || Store::LOCAL
+ end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def object_store=(value)
+ @object_store = value || Store::LOCAL
+ @storage = storage_for(object_store)
+ end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+ # Return true if the current file is part or the model (i.e. is mounted in the model)
+ #
+ def persist_object_store?
+ model.respond_to?(:"#{store_serialization_column}=")
+ end
+
+ # Save the current @object_store to the model <mounted_as>_store column
+ def persist_object_store!
+ return unless persist_object_store?
+
+ updated = model.update_column(store_serialization_column, object_store)
+ raise 'Failed to update object store' unless updated
+ end
+
+ def use_file
+ if file_storage?
+ return yield path
+ end
+
+ begin
+ cache_stored_file!
+ yield cache_path
+ ensure
+ cache_storage.delete_dir!(cache_path(nil))
+ end
+ end
+
+ def filename
+ super || file&.filename
+ end
+
+ #
+ # Move the file to another store
+ #
+ # new_store: Enum (Store::LOCAL, Store::REMOTE)
+ #
+ def migrate!(new_store)
+ uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
+ raise 'Already running' unless uuid
+
+ unsafe_migrate!(new_store)
+ ensure
+ Gitlab::ExclusiveLease.cancel(exclusive_lease_key, uuid)
+ end
+
+ def schedule_background_upload(*args)
+ return unless schedule_background_upload?
+
+ ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
+ model.class.name,
+ mounted_as,
+ model.id)
+ end
+
+ def fog_directory
+ self.class.remote_store_path
+ end
+
+ def fog_credentials
+ self.class.object_store_credentials
+ end
+
+ def fog_public
+ false
+ end
+
+ def delete_migrated_file(migrated_file)
+ migrated_file.delete if exists?
+ end
+
+ def exists?
+ file.present?
+ end
+
+ def store_dir(store = nil)
+ store_dirs[store || object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(dynamic_segment)
+ }
+ end
+
+ private
+
+ def schedule_background_upload?
+ self.class.object_store_enabled? &&
+ self.class.background_upload_enabled? &&
+ self.file_storage?
+ end
+
+ # this is a hack around CarrierWave. The #migrate method needs to be
+ # able to force the current file to the migrated file upon success.
+ def file=(file)
+ @file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def serialization_column
+ self.class.serialization_column(model.class, mounted_as)
+ end
+
+ # Returns the column where the 'store' is saved
+ # defaults to 'store'
+ def store_serialization_column
+ [serialization_column, 'store'].compact.join('_').to_sym
+ end
+
+ def storage
+ @storage ||= storage_for(object_store)
+ end
+
+ def storage_for(store)
+ case store
+ when Store::REMOTE
+ raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
+
+ CarrierWave::Storage::Fog.new(self)
+ when Store::LOCAL
+ CarrierWave::Storage::File.new(self)
+ else
+ raise UnknownStoreError
+ end
+ end
+
+ def exclusive_lease_key
+ "object_storage_migrate:#{model.class}:#{model.id}"
+ end
+
+ #
+ # Move the file to another store
+ #
+ # new_store: Enum (Store::LOCAL, Store::REMOTE)
+ #
+ def unsafe_migrate!(new_store)
+ return unless object_store != new_store
+ return unless file
+
+ new_file = nil
+ file_to_delete = file
+ from_object_store = object_store
+ self.object_store = new_store # changes the storage and file
+
+ cache_stored_file! if file_storage?
+
+ with_callbacks(:migrate, file_to_delete) do
+ with_callbacks(:store, file_to_delete) do # for #store_versions!
+ new_file = storage.store!(file)
+ persist_object_store!
+ self.file = new_file
+ end
+ end
+
+ file
+ rescue => e
+ # in case of failure delete new file
+ new_file.delete unless new_file.nil?
+ # revert back to the old file
+ self.object_store = from_object_store
+ self.file = file_to_delete
+ raise e
+ end
+ end
+end
diff --git a/app/uploaders/personal_file_uploader.rb b/app/uploaders/personal_file_uploader.rb
index f2ad0badd53..e3898b07730 100644
--- a/app/uploaders/personal_file_uploader.rb
+++ b/app/uploaders/personal_file_uploader.rb
@@ -4,7 +4,7 @@ class PersonalFileUploader < FileUploader
options.storage_path
end
- def self.base_dir(model)
+ def self.base_dir(model, _store = nil)
File.join(options.base_dir, model_path_segment(model))
end
@@ -14,6 +14,12 @@ class PersonalFileUploader < FileUploader
File.join(model.class.to_s.underscore, model.id.to_s)
end
+ def object_store
+ return Store::LOCAL unless model
+
+ super
+ end
+
# model_path_segment does not require a model to be passed, so we can always
# generate a path, even when there's no model.
def model_valid?
@@ -22,7 +28,14 @@ class PersonalFileUploader < FileUploader
# Revert-Override
def store_dir
- File.join(base_dir, dynamic_segment)
+ store_dirs[object_store]
+ end
+
+ def store_dirs
+ {
+ Store::LOCAL => File.join(base_dir, dynamic_segment),
+ Store::REMOTE => File.join(self.class.model_path_segment(model), dynamic_segment)
+ }
end
private
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index 458928bc067..89c74a78835 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -24,8 +24,7 @@ module RecordsUploads
uploads.where(path: upload_path).delete_all
upload.destroy! if upload
- self.upload = build_upload
- upload.save!
+ self.upload = build_upload.tap(&:save!)
end
end
diff --git a/app/views/projects/jobs/_sidebar.html.haml b/app/views/projects/jobs/_sidebar.html.haml
index e779473c239..ecf186e3dc8 100644
--- a/app/views/projects/jobs/_sidebar.html.haml
+++ b/app/views/projects/jobs/_sidebar.html.haml
@@ -35,7 +35,7 @@
= link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download
- - if @build.artifacts_metadata?
+ - if @build.browsable_artifacts?
= link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do
Browse
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index f65e8385ac8..9a11cdb121e 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -39,6 +39,10 @@
- github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository
+- object_storage_upload
+- object_storage:object_storage_background_move
+- object_storage:object_storage_migrate_uploads
+
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
diff --git a/app/workers/concerns/object_storage_queue.rb b/app/workers/concerns/object_storage_queue.rb
new file mode 100644
index 00000000000..a80f473a6d4
--- /dev/null
+++ b/app/workers/concerns/object_storage_queue.rb
@@ -0,0 +1,8 @@
+# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
+module ObjectStorageQueue
+ extend ActiveSupport::Concern
+
+ included do
+ queue_namespace :object_storage
+ end
+end
diff --git a/app/workers/object_storage/background_move_worker.rb b/app/workers/object_storage/background_move_worker.rb
new file mode 100644
index 00000000000..9c4d72e0ecf
--- /dev/null
+++ b/app/workers/object_storage/background_move_worker.rb
@@ -0,0 +1,29 @@
+module ObjectStorage
+ class BackgroundMoveWorker
+ include ApplicationWorker
+ include ObjectStorageQueue
+
+ sidekiq_options retry: 5
+
+ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
+ uploader_class = uploader_class_name.constantize
+ subject_class = subject_class_name.constantize
+
+ return unless uploader_class < ObjectStorage::Concern
+ return unless uploader_class.object_store_enabled?
+ return unless uploader_class.background_upload_enabled?
+
+ subject = subject_class.find(subject_id)
+ uploader = build_uploader(subject, file_field&.to_sym)
+ uploader.migrate!(ObjectStorage::Store::REMOTE)
+ end
+
+ def build_uploader(subject, mount_point)
+ case subject
+ when Upload then subject.build_uploader(mount_point)
+ else
+ subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+end
diff --git a/app/workers/object_storage/migrate_uploads_worker.rb b/app/workers/object_storage/migrate_uploads_worker.rb
new file mode 100644
index 00000000000..01ed123e6c8
--- /dev/null
+++ b/app/workers/object_storage/migrate_uploads_worker.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
+module ObjectStorage
+ class MigrateUploadsWorker
+ include ApplicationWorker
+ include ObjectStorageQueue
+
+ SanityCheckError = Class.new(StandardError)
+
+ class Upload < ActiveRecord::Base
+ # Upper limit for foreground checksum processing
+ CHECKSUM_THRESHOLD = 100.megabytes
+
+ belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
+
+ validates :size, presence: true
+ validates :path, presence: true
+ validates :model, presence: true
+ validates :uploader, presence: true
+
+ before_save :calculate_checksum!, if: :foreground_checksummable?
+ after_commit :schedule_checksum, if: :checksummable?
+
+ scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
+ scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
+
+ def self.hexdigest(path)
+ Digest::SHA256.file(path).hexdigest
+ end
+
+ def absolute_path
+ raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
+ return path unless relative_path?
+
+ uploader_class.absolute_path(self)
+ end
+
+ def calculate_checksum!
+ self.checksum = nil
+ return unless checksummable?
+
+ self.checksum = self.class.hexdigest(absolute_path)
+ end
+
+ def build_uploader(mounted_as = nil)
+ uploader_class.new(model, mounted_as).tap do |uploader|
+ uploader.upload = self
+ uploader.retrieve_from_store!(identifier)
+ end
+ end
+
+ def exist?
+ File.exist?(absolute_path)
+ end
+
+ def local?
+ return true if store.nil?
+
+ store == ObjectStorage::Store::LOCAL
+ end
+
+ private
+
+ def checksummable?
+ checksum.nil? && local? && exist?
+ end
+
+ def foreground_checksummable?
+ checksummable? && size <= CHECKSUM_THRESHOLD
+ end
+
+ def schedule_checksum
+ UploadChecksumWorker.perform_async(id)
+ end
+
+ def relative_path?
+ !path.start_with?('/')
+ end
+
+ def identifier
+ File.basename(path)
+ end
+
+ def uploader_class
+ Object.const_get(uploader)
+ end
+ end
+
+ class MigrationResult
+ attr_reader :upload
+ attr_accessor :error
+
+ def initialize(upload, error = nil)
+ @upload, @error = upload, error
+ end
+
+ def success?
+ error.nil?
+ end
+
+ def to_s
+ success? ? "Migration successful." : "Error while migrating #{upload.id}: #{error.message}"
+ end
+ end
+
+ module Report
+ class MigrationFailures < StandardError
+ attr_reader :errors
+
+ def initialize(errors)
+ @errors = errors
+ end
+
+ def message
+ errors.map(&:message).join("\n")
+ end
+ end
+
+ def report!(results)
+ success, failures = results.partition(&:success?)
+
+ Rails.logger.info header(success, failures)
+ Rails.logger.warn failures(failures)
+
+ raise MigrationFailures.new(failures.map(&:error)) if failures.any?
+ end
+
+ def header(success, failures)
+ "Migrated #{success.count}/#{success.count + failures.count} files."
+ end
+
+ def failures(failures)
+ failures.map { |f| "\t#{f}" }.join('\n')
+ end
+ end
+
+ include Report
+
+ def self.enqueue!(uploads, mounted_as, to_store)
+ sanity_check!(uploads, mounted_as)
+
+ perform_async(uploads.ids, mounted_as, to_store)
+ end
+
+ # We need to be sure all the uploads are for the same uploader and model type
+ # and that the mount point exists if provided.
+ #
+ def self.sanity_check!(uploads, mounted_as)
+ upload = uploads.first
+
+ uploader_class = upload.uploader.constantize
+ model_class = uploads.first.model_type.constantize
+
+ uploader_types = uploads.map(&:uploader).uniq
+ model_types = uploads.map(&:model_type).uniq
+ model_has_mount = mounted_as.nil? || model_class.uploaders[mounted_as] == uploader_class
+
+ raise(SanityCheckError, "Multiple uploaders found: #{uploader_types}") unless uploader_types.count == 1
+ raise(SanityCheckError, "Multiple model types found: #{model_types}") unless model_types.count == 1
+ raise(SanityCheckError, "Mount point #{mounted_as} not found in #{model_class}.") unless model_has_mount
+ end
+
+ def perform(ids, mounted_as, to_store)
+ @mounted_as = mounted_as&.to_sym
+ @to_store = to_store
+
+ uploads = Upload.preload(:model).where(id: ids)
+
+ sanity_check!(uploads)
+ results = migrate(uploads)
+
+ report!(results)
+ rescue SanityCheckError => e
+ # do not retry: the job is insane
+ Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
+ end
+
+ def sanity_check!(uploads)
+ self.class.sanity_check!(uploads, @mounted_as)
+ end
+
+ def build_uploaders(uploads)
+ uploads.map { |upload| upload.build_uploader(@mounted_as) }
+ end
+
+ def migrate(uploads)
+ build_uploaders(uploads).map(&method(:process_uploader))
+ end
+
+ def process_uploader(uploader)
+ MigrationResult.new(uploader.upload).tap do |result|
+ begin
+ uploader.migrate!(@to_store)
+ rescue => e
+ result.error = e
+ end
+ end
+ end
+ end
+end
diff --git a/app/workers/object_storage_upload_worker.rb b/app/workers/object_storage_upload_worker.rb
new file mode 100644
index 00000000000..5c80f34069c
--- /dev/null
+++ b/app/workers/object_storage_upload_worker.rb
@@ -0,0 +1,21 @@
+# @Deprecated - remove once the `object_storage_upload` queue is empty
+# The queue has been renamed `object_storage:object_storage_background_upload`
+#
+class ObjectStorageUploadWorker
+ include ApplicationWorker
+
+ sidekiq_options retry: 5
+
+ def perform(uploader_class_name, subject_class_name, file_field, subject_id)
+ uploader_class = uploader_class_name.constantize
+ subject_class = subject_class_name.constantize
+
+ return unless uploader_class < ObjectStorage::Concern
+ return unless uploader_class.object_store_enabled?
+ return unless uploader_class.background_upload_enabled?
+
+ subject = subject_class.find(subject_id)
+ uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
+ uploader.migrate!(ObjectStorage::Store::REMOTE)
+ end
+end
diff --git a/changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml b/changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml
deleted file mode 100644
index a38b447e345..00000000000
--- a/changelogs/unreleased-ee/39118-dynamic-pipeline-variables-fe.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: Update CI/CD secret variables list to be dynamic and save without reloading
- the page
-merge_request: 4110
-author:
-type: added
diff --git a/changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml b/changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml
deleted file mode 100644
index bbb6cbd05be..00000000000
--- a/changelogs/unreleased-ee/4378-fix-cluster-js-not-running-on-update-page.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Fix JavaScript bundle running on Cluster update/destroy pages
-merge_request:
-author:
-type: fixed
diff --git a/changelogs/unreleased-ee/bvl-external-policy-classification.yml b/changelogs/unreleased-ee/bvl-external-policy-classification.yml
deleted file mode 100644
index 074629c8c12..00000000000
--- a/changelogs/unreleased-ee/bvl-external-policy-classification.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Authorize project access with an external service
-merge_request: 4675
-author:
-type: added
diff --git a/changelogs/unreleased/40781-os-to-ce.yml b/changelogs/unreleased/40781-os-to-ce.yml
new file mode 100644
index 00000000000..4a364292c60
--- /dev/null
+++ b/changelogs/unreleased/40781-os-to-ce.yml
@@ -0,0 +1,5 @@
+---
+title: Add object storage support for LFS objects, CI artifacts, and uploads.
+merge_request: 17358
+author:
+type: added
diff --git a/changelogs/unreleased/poc-upload-hashing-path.yml b/changelogs/unreleased/poc-upload-hashing-path.yml
new file mode 100644
index 00000000000..7970405bea1
--- /dev/null
+++ b/changelogs/unreleased/poc-upload-hashing-path.yml
@@ -0,0 +1,5 @@
+---
+title: File uploads in remote storage now support project renaming.
+merge_request: 4597
+author:
+type: fixed
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index bd696a7f2c5..05299adfa93 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -145,18 +145,55 @@ production: &base
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
+ # object_store:
+ # enabled: false
+ # remote_directory: artifacts # The bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
+ # connection:
+ # provider: AWS # Only AWS supported at the moment
+ # aws_access_key_id: AWS_ACCESS_KEY_ID
+ # aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ # region: eu-central-1
## Git LFS
lfs:
enabled: true
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # Bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
+ connection:
+ provider: AWS
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ # Use the following options to configure an AWS compatible host
+ # host: 'localhost' # default: s3.amazonaws.com
+ # endpoint: 'http://127.0.0.1:9000' # default: nil
+ # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## Uploads (attachments, avatars, etc...)
uploads:
# The location where uploads objects are stored (default: public/).
# storage_path: public/
# base_dir: uploads/-/system
+ object_store:
+ enabled: false
+ # remote_directory: uploads # Bucket name
+ # background_upload: false # Temporary option to limit automatic upload (Default: true)
+ # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
+ # connection:
+ # provider: AWS
+ # aws_access_key_id: AWS_ACCESS_KEY_ID
+ # aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ # region: eu-central-1
+ # host: 'localhost' # default: s3.amazonaws.com
+ # endpoint: 'http://127.0.0.1:9000' # default: nil
+ # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
@@ -655,10 +692,39 @@ test:
enabled: true
lfs:
enabled: false
+ # The location where LFS objects are stored (default: shared/lfs-objects).
+ # storage_path: shared/lfs-objects
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
artifacts:
path: tmp/tests/artifacts
+ enabled: true
+ # The location where build artifacts are stored (default: shared/artifacts).
+ # path: shared/artifacts
+ object_store:
+ enabled: false
+ remote_directory: artifacts # The bucket name
+ background_upload: false
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
uploads:
storage_path: tmp/tests/public
+ object_store:
+ enabled: false
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACCESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
gitlab:
host: localhost
port: 80
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 53cf0010d8e..906ae8b6180 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -305,6 +305,13 @@ Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values
# Settings.artifact['path'] is deprecated, use `storage_path` instead
Settings.artifacts['path'] = Settings.artifacts['storage_path']
Settings.artifacts['max_size'] ||= 100 # in megabytes
+Settings.artifacts['object_store'] ||= Settingslogic.new({})
+Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
+Settings.artifacts['object_store']['remote_directory'] ||= nil
+Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
+Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
#
# Registry
@@ -340,6 +347,13 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
+Settings.lfs['object_store'] ||= Settingslogic.new({})
+Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
+Settings.lfs['object_store']['remote_directory'] ||= nil
+Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
+Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.lfs['object_store']['connection']&.deep_stringify_keys!
#
# Uploads
@@ -347,6 +361,13 @@ Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] ||
Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
+Settings.uploads['object_store'] ||= Settingslogic.new({})
+Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
+Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
+Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
+Settings.uploads['object_store']['proxy_download'] = false if Settings.uploads['object_store']['proxy_download'].nil?
+# Convert upload connection settings to use string keys, to make Fog happy
+Settings.uploads['object_store']['connection']&.deep_stringify_keys!
#
# Mattermost
diff --git a/config/initializers/fog_google_https_private_urls.rb b/config/initializers/fog_google_https_private_urls.rb
new file mode 100644
index 00000000000..f92e623a5d2
--- /dev/null
+++ b/config/initializers/fog_google_https_private_urls.rb
@@ -0,0 +1,20 @@
+#
+# Monkey patching the https support for private urls
+# See https://gitlab.com/gitlab-org/gitlab-ee/issues/4879
+#
+module Fog
+ module Storage
+ class GoogleXML
+ class File < Fog::Model
+ module MonkeyPatch
+ def url(expires)
+ requires :key
+ collection.get_https_url(key, expires)
+ end
+ end
+
+ prepend MonkeyPatch
+ end
+ end
+ end
+end
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index 554502c5d83..c811034b29d 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -68,5 +68,7 @@
- [project_migrate_hashed_storage, 1]
- [storage_migrator, 1]
- [pages_domain_verification, 1]
+ - [object_storage_upload, 1]
+ - [object_storage, 1]
- [plugin, 1]
- [pipeline_background, 1]
diff --git a/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
new file mode 100644
index 00000000000..e82109190a7
--- /dev/null
+++ b/db/migrate/20170601163708_add_artifacts_store_to_ci_build.rb
@@ -0,0 +1,10 @@
+class AddArtifactsStoreToCiBuild < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column(:ci_builds, :artifacts_file_store, :integer)
+ add_column(:ci_builds, :artifacts_metadata_store, :integer)
+ end
+end
diff --git a/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
new file mode 100644
index 00000000000..41bb031014f
--- /dev/null
+++ b/db/migrate/20170825015534_add_file_store_to_lfs_objects.rb
@@ -0,0 +1,31 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddFileStoreToLfsObjects < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ # When a migration requires downtime you **must** uncomment the following
+ # constant and define a short and easy to understand explanation as to why the
+ # migration requires downtime.
+ # DOWNTIME_REASON = ''
+
+ # When using the methods "add_concurrent_index", "remove_concurrent_index" or
+ # "add_column_with_default" you must disable the use of transactions
+ # as these methods can not run in an existing transaction.
+ # When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
+ # that either of them is the _only_ method called in the migration,
+ # any other changes should go in a separate migration.
+ # This ensures that upon failure _only_ the index creation or removing fails
+ # and can be retried or reverted easily.
+ #
+ # To disable transactions uncomment the following line and remove these
+ # comments:
+ # disable_ddl_transaction!
+
+ def change
+ add_column(:lfs_objects, :file_store, :integer)
+ end
+end
diff --git a/db/migrate/20170918072949_add_file_store_job_artifacts.rb b/db/migrate/20170918072949_add_file_store_job_artifacts.rb
new file mode 100644
index 00000000000..b1f1bea6deb
--- /dev/null
+++ b/db/migrate/20170918072949_add_file_store_job_artifacts.rb
@@ -0,0 +1,10 @@
+class AddFileStoreJobArtifacts < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+ DOWNTIME = false
+
+ def change
+ add_column(:ci_job_artifacts, :file_store, :integer)
+ end
+end
diff --git a/db/migrate/20171214144320_add_store_column_to_uploads.rb b/db/migrate/20171214144320_add_store_column_to_uploads.rb
new file mode 100644
index 00000000000..e35798e2c41
--- /dev/null
+++ b/db/migrate/20171214144320_add_store_column_to_uploads.rb
@@ -0,0 +1,12 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddStoreColumnToUploads < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column(:uploads, :store, :integer)
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index b6adc3fe1f4..b3b2d5b0da9 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -307,6 +307,8 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "auto_canceled_by_id"
t.boolean "retried"
t.integer "stage_id"
+ t.integer "artifacts_file_store"
+ t.integer "artifacts_metadata_store"
t.boolean "protected"
t.integer "failure_reason"
end
@@ -345,6 +347,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.integer "project_id", null: false
t.integer "job_id", null: false
t.integer "file_type", null: false
+ t.integer "file_store"
t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -1009,6 +1012,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at"
t.datetime "updated_at"
t.string "file"
+ t.integer "file_store"
end
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
@@ -1824,6 +1828,7 @@ ActiveRecord::Schema.define(version: 20180323150945) do
t.datetime "created_at", null: false
t.string "mount_point"
t.string "secret"
+ t.integer "store"
end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index d86a54daadd..ac3a12930c3 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -87,10 +87,124 @@ _The artifacts are stored by default in
### Using object storage
+>**Notes:**
+- [Introduced][ee-1762] in [GitLab Premium][eep] 9.4.
+- Since version 9.5, artifacts are [browsable], when object storage is enabled.
+ 9.4 lacks this feature.
> Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
+> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
+
+If you don't want to use the local disk where GitLab is installed to store the
+artifacts, you can use an object storage like AWS S3 instead.
+This configuration relies on valid AWS credentials to be configured already.
+Use an [Object storage option][os] like AWS S3 to store job artifacts.
+
+### Object Storage Settings
+
+For source installations the following settings are nested under `artifacts:` and then `object_store:`. On omnibus installs they are prefixed by `artifacts_object_store_`.
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `enabled` | Enable/disable object storage | `false` |
+| `remote_directory` | The bucket name where Artfacts will be stored| |
+| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
+| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
+| `connection` | Various connection options described below | |
+
+#### S3 compatible connection settings
+
+The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `provider` | Always `AWS` for compatible hosts | AWS |
+| `aws_access_key_id` | AWS credentials, or compatible | |
+| `aws_secret_access_key` | AWS credentials, or compatible | |
+| `region` | AWS region | us-east-1 |
+| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
+| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
+| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
+
+**In Omnibus installations:**
+
+_The artifacts are stored by default in
+`/var/opt/gitlab/gitlab-rails/shared/artifacts`._
+
+1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
+ the values you want:
+
+ ```ruby
+ gitlab_rails['artifacts_enabled'] = true
+ gitlab_rails['artifacts_object_store_enabled'] = true
+ gitlab_rails['artifacts_object_store_remote_directory'] = "artifacts"
+ gitlab_rails['artifacts_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
+ 'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
+ }
+ ```
+
+ NOTE: For GitLab 9.4+, if you are using AWS IAM profiles, be sure to omit the
+ AWS access key and secret acces key/value pairs. For example:
+
+ ```ruby
+ gitlab_rails['artifacts_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'use_iam_profile' => true
+ }
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+1. Migrate any existing local artifacts to the object storage:
+
+ ```bash
+ gitlab-rake gitlab:artifacts:migrate
+ ```
+
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing artifacts to the object storage, but all new
+ artifacts will still be stored on the local disk. In the future
+ you will be given an option to define a default storage artifacts for all
+ new files.
+
+---
+
+**In installations from source:**
+
+_The artifacts are stored by default in
+`/home/git/gitlab/shared/artifacts`._
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
+ lines:
+
+ ```yaml
+ artifacts:
+ enabled: true
+ object_store:
+ enabled: true
+ remote_directory: "artifacts" # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+1. Migrate any existing local artifacts to the object storage:
+
+ ```bash
+ sudo -u git -H bundle exec rake gitlab:artifacts:migrate RAILS_ENV=production
+ ```
-Use an [Object storage option][ee-os] like AWS S3 to store job artifacts.
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing artifacts to the object storage, but all new
+ artifacts will still be stored on the local disk. In the future
+ you will be given an option to define a default storage artifacts for all
+ new files.
## Expiring artifacts
@@ -194,7 +308,7 @@ When clicking on a specific file, [GitLab Workhorse] extracts it
from the archive and the download begins. This implementation saves space,
memory and disk I/O.
-[reconfigure gitlab]: restart_gitlab.md "How to restart GitLab"
-[restart gitlab]: restart_gitlab.md "How to restart GitLab"
+[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
[gitlab workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse "GitLab Workhorse repository"
-[ee-os]: https://docs.gitlab.com/ee/administration/job_artifacts.html#using-object-storage
+[os]: https://docs.gitlab.com/administration/job_artifacts.html#using-object-storage
diff --git a/doc/administration/raketasks/uploads/migrate.md b/doc/administration/raketasks/uploads/migrate.md
new file mode 100644
index 00000000000..0cd33ffc122
--- /dev/null
+++ b/doc/administration/raketasks/uploads/migrate.md
@@ -0,0 +1,74 @@
+# Uploads Migrate Rake Task
+
+## Migrate to Object Storage
+
+After [configuring the object storage](../../uploads.md#using-object-storage) for GitLab's uploads, you may use this task to migrate existing uploads from the local storage to the remote storage.
+
+>**Note:**
+All of the processing will be done in a background worker and requires **no downtime**.
+
+This tasks uses 3 parameters to find uploads to migrate.
+
+>**Note:**
+These parameters are mainly internal to GitLab's structure, you may want to refer to the task list instead below.
+
+Parameter | Type | Description
+--------- | ---- | -----------
+`uploader_class` | string | Type of the uploader to migrate from
+`model_class` | string | Type of the model to migrate from
+`mount_point` | string/symbol | Name of the model's column on which the uploader is mounted on.
+
+This task also accepts some environment variables which you can use to override
+certain values:
+
+Variable | Type | Description
+-------- | ---- | -----------
+`BATCH` | integer | Specifies the size of the batch. Defaults to 200.
+
+** Omnibus Installation**
+
+```bash
+# gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
+
+# Avatars
+gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+# Attachments
+gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+# Markdown
+gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
+gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+```
+
+**Source Installation**
+
+>**Note:**
+Use `RAILS_ENV=production` for every task.
+
+```bash
+# sudo -u git -H bundle exec rake gitlab:uploads:migrate
+
+# Avatars
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+# Attachments
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+# Markdown
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+
+```
diff --git a/doc/administration/uploads.md b/doc/administration/uploads.md
new file mode 100644
index 00000000000..a82735cc72c
--- /dev/null
+++ b/doc/administration/uploads.md
@@ -0,0 +1,209 @@
+# Uploads administration
+
+>**Notes:**
+Uploads represent all user data that may be sent to GitLab as a single file. As an example, avatars and notes' attachments are uploads. Uploads are integral to GitLab functionality, and therefore cannot be disabled.
+
+### Using local storage
+
+>**Notes:**
+This is the default configuration
+
+To change the location where the uploads are stored locally, follow the steps
+below.
+
+---
+
+**In Omnibus installations:**
+
+>**Notes:**
+For historical reasons, uploads are stored into a base directory, which by default is `uploads/-/system`. It is strongly discouraged to change this configuration option on an existing GitLab installation.
+
+_The uploads are stored by default in `/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
+
+1. To change the storage path for example to `/mnt/storage/uploads`, edit
+ `/etc/gitlab/gitlab.rb` and add the following line:
+
+ ```ruby
+ gitlab_rails['uploads_storage_path'] = "/mnt/storage/"
+ gitlab_rails['uploads_base_dir'] = "uploads"
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+
+---
+
+**In installations from source:**
+
+_The uploads are stored by default in
+`/home/git/gitlab/public/uploads/-/system`._
+
+1. To change the storage path for example to `/mnt/storage/uploads`, edit
+ `/home/git/gitlab/config/gitlab.yml` and add or amend the following lines:
+
+ ```yaml
+ uploads:
+ storage_path: /mnt/storage
+ base_dir: uploads
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+
+### Using object storage
+
+>**Notes:**
+- [Introduced][ee-3867] in [GitLab Enterprise Edition Premium][eep] 10.5.
+
+If you don't want to use the local disk where GitLab is installed to store the
+uploads, you can use an object storage provider like AWS S3 instead.
+This configuration relies on valid AWS credentials to be configured already.
+
+### Object Storage Settings
+
+For source installations the following settings are nested under `uploads:` and then `object_store:`. On omnibus installs they are prefixed by `uploads_object_store_`.
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `enabled` | Enable/disable object storage | `false` |
+| `remote_directory` | The bucket name where Uploads will be stored| |
+| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
+| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
+| `connection` | Various connection options described below | |
+
+#### S3 compatible connection settings
+
+The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `provider` | Always `AWS` for compatible hosts | AWS |
+| `aws_access_key_id` | AWS credentials, or compatible | |
+| `aws_secret_access_key` | AWS credentials, or compatible | |
+| `region` | AWS region | us-east-1 |
+| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
+| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
+| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
+
+**In Omnibus installations:**
+
+_The uploads are stored by default in
+`/var/opt/gitlab/gitlab-rails/public/uploads/-/system`._
+
+1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
+ the values you want:
+
+ ```ruby
+ gitlab_rails['uploads_object_store_enabled'] = true
+ gitlab_rails['uploads_object_store_remote_directory'] = "uploads"
+ gitlab_rails['uploads_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'aws_access_key_id' => 'AWS_ACCESS_KEY_ID',
+ 'aws_secret_access_key' => 'AWS_SECRET_ACCESS_KEY'
+ }
+ ```
+
+>**Note:**
+If you are using AWS IAM profiles, be sure to omit the AWS access key and secret acces key/value pairs.
+
+ ```ruby
+ gitlab_rails['uploads_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'use_iam_profile' => true
+ }
+ ```
+
+1. Save the file and [reconfigure GitLab][] for the changes to take effect.
+1. Migrate any existing local uploads to the object storage:
+
+>**Notes:**
+These task complies with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
+
+ ```bash
+ # gitlab-rake gitlab:uploads:migrate[uploader_class, model_class, mount_point]
+
+ # Avatars
+ gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+ gitlab-rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+ gitlab-rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+ # Attachments
+ gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+ gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+ gitlab-rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+ # Markdown
+ gitlab-rake "gitlab:uploads:migrate[FileUploader, Project]"
+ gitlab-rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+ gitlab-rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+ gitlab-rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+ ```
+
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing uploads to the object storage, but all new
+ uploads will still be stored on the local disk. In the future
+ you will be given an option to define a default storage for all
+ new files.
+
+---
+
+**In installations from source:**
+
+_The uploads are stored by default in
+`/home/git/gitlab/public/uploads/-/system`._
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
+ lines:
+
+ ```yaml
+ uploads:
+ object_store:
+ enabled: true
+ remote_directory: "uploads" # The bucket name
+ connection:
+ provider: AWS # Only AWS supported at the moment
+ aws_access_key_id: AWS_ACESS_KEY_ID
+ aws_secret_access_key: AWS_SECRET_ACCESS_KEY
+ region: eu-central-1
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+1. Migrate any existing local uploads to the object storage:
+
+>**Notes:**
+
+- These task comply with the `BATCH` environment variable to process uploads in batch (200 by default). All of the processing will be done in a background worker and requires **no downtime**.
+
+- To migrate in production use `RAILS_ENV=production` environment variable.
+
+ ```bash
+ # sudo -u git -H bundle exec rake gitlab:uploads:migrate
+
+ # Avatars
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Project, :avatar]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, Group, :avatar]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AvatarUploader, User, :avatar]"
+
+ # Attachments
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Note, :attachment]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :logo]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[AttachmentUploader, Appearance, :header_logo]"
+
+ # Markdown
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, Project]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[PersonalFileUploader, Snippet]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[NamespaceFileUploader, Snippet]"
+ sudo -u git -H bundle exec rake "gitlab:uploads:migrate[FileUploader, MergeRequest]"
+
+ ```
+
+ Currently this has to be executed manually and it will allow you to
+ migrate the existing uploads to the object storage, but all new
+ uploads will still be stored on the local disk. In the future
+ you will be given an option to define a default storage for all
+ new files.
+
+[reconfigure gitlab]: restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: restart_gitlab.md#installations-from-source "How to restart GitLab"
+[eep]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition Premium"
+[ee-3867]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3867
diff --git a/doc/raketasks/README.md b/doc/raketasks/README.md
index 2f916f5dea7..90187617c41 100644
--- a/doc/raketasks/README.md
+++ b/doc/raketasks/README.md
@@ -14,3 +14,4 @@ comments: false
- [Webhooks](web_hooks.md)
- [Import](import.md) of git repositories in bulk
- [Rebuild authorized_keys file](http://docs.gitlab.com/ce/raketasks/maintenance.html#rebuild-authorized_keys-file) task for administrators
+- [Migrate Uploads](../administration/raketasks/uploads/migrate.md)
diff --git a/doc/workflow/lfs/lfs_administration.md b/doc/workflow/lfs/lfs_administration.md
index d768b73286d..ca28e0a3304 100644
--- a/doc/workflow/lfs/lfs_administration.md
+++ b/doc/workflow/lfs/lfs_administration.md
@@ -5,6 +5,7 @@ Documentation on how to use Git LFS are under [Managing large binary files with
## Requirements
* Git LFS is supported in GitLab starting with version 8.2.
+* Support for object storage, such as AWS S3, was introduced in 10.0.
* Users need to install [Git LFS client](https://git-lfs.github.com) version 1.0.1 and up.
## Configuration
@@ -12,16 +13,18 @@ Documentation on how to use Git LFS are under [Managing large binary files with
Git LFS objects can be large in size. By default, they are stored on the server
GitLab is installed on.
-There are two configuration options to help GitLab server administrators:
+There are various configuration options to help GitLab server administrators:
* Enabling/disabling Git LFS support
* Changing the location of LFS object storage
+* Setting up AWS S3 compatible object storage
### Omnibus packages
In `/etc/gitlab/gitlab.rb`:
```ruby
+# Change to true to enable lfs
gitlab_rails['lfs_enabled'] = false
# Optionally, change the storage path location. Defaults to
@@ -35,11 +38,114 @@ gitlab_rails['lfs_storage_path'] = "/mnt/storage/lfs-objects"
In `config/gitlab.yml`:
```yaml
+# Change to true to enable lfs
lfs:
enabled: false
storage_path: /mnt/storage/lfs-objects
```
+## Setting up S3 compatible object storage
+
+> **Note:** [Introduced][ee-2760] in [GitLab Premium][eep] 10.0.
+> Available in [GitLab CE][ce] 10.7
+
+It is possible to store LFS objects on remote object storage instead of on a local disk.
+
+This allows you to offload storage to an external AWS S3 compatible service, freeing up disk space locally. You can also host your own S3 compatible storage decoupled from GitLab, with with a service such as [Minio](https://www.minio.io/).
+
+Object storage currently transfers files first to GitLab, and then on the object storage in a second stage. This can be done either by using a rake task to transfer existing objects, or in a background job after each file is received.
+
+### Object Storage Settings
+
+For source installations the following settings are nested under `lfs:` and then `object_store:`. On omnibus installs they are prefixed by `lfs_object_store_`.
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `enabled` | Enable/disable object storage | `false` |
+| `remote_directory` | The bucket name where LFS objects will be stored| |
+| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
+| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
+| `connection` | Various connection options described below | |
+
+#### S3 compatible connection settings
+
+The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
+
+| Setting | Description | Default |
+|---------|-------------|---------|
+| `provider` | Always `AWS` for compatible hosts | AWS |
+| `aws_access_key_id` | AWS credentials, or compatible | |
+| `aws_secret_access_key` | AWS credentials, or compatible | |
+| `region` | AWS region | us-east-1 |
+| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
+| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
+| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
+
+
+### From source
+
+1. Edit `/home/git/gitlab/config/gitlab.yml` and add or amend the following
+ lines:
+
+ ```yaml
+ lfs:
+ enabled: true
+ object_store:
+ enabled: false
+ remote_directory: lfs-objects # Bucket name
+ connection:
+ provider: AWS
+ aws_access_key_id: 1ABCD2EFGHI34JKLM567N
+ aws_secret_access_key: abcdefhijklmnopQRSTUVwxyz0123456789ABCDE
+ region: eu-central-1
+ # Use the following options to configure an AWS compatible host such as Minio
+ host: 'localhost'
+ endpoint: 'http://127.0.0.1:9000'
+ path_style: true
+ ```
+
+1. Save the file and [restart GitLab][] for the changes to take effect.
+1. Migrate any existing local LFS objects to the object storage:
+
+ ```bash
+ sudo -u git -H bundle exec rake gitlab:lfs:migrate RAILS_ENV=production
+ ```
+
+ This will migrate existing LFS objects to object storage. New LFS objects
+ will be forwarded to object storage unless
+ `gitlab_rails['lfs_object_store_background_upload']` is set to false.
+
+### In Omnibus
+
+1. Edit `/etc/gitlab/gitlab.rb` and add the following lines by replacing with
+ the values you want:
+
+ ```ruby
+ gitlab_rails['lfs_object_store_enabled'] = true
+ gitlab_rails['lfs_object_store_remote_directory'] = "lfs-objects"
+ gitlab_rails['lfs_object_store_connection'] = {
+ 'provider' => 'AWS',
+ 'region' => 'eu-central-1',
+ 'aws_access_key_id' => '1ABCD2EFGHI34JKLM567N',
+ 'aws_secret_access_key' => 'abcdefhijklmnopQRSTUVwxyz0123456789ABCDE',
+ # The below options configure an S3 compatible host instead of AWS
+ 'host' => 'localhost',
+ 'endpoint' => 'http://127.0.0.1:9000',
+ 'path_style' => true
+ }
+ ```
+
+1. Save the file and [reconfigure GitLab]s for the changes to take effect.
+1. Migrate any existing local LFS objects to the object storage:
+
+ ```bash
+ gitlab-rake gitlab:lfs:migrate
+ ```
+
+ This will migrate existing LFS objects to object storage. New LFS objects
+ will be forwarded to object storage unless
+ `gitlab_rails['lfs_object_store_background_upload']` is set to false.
+
## Storage statistics
You can see the total storage used for LFS objects on groups and projects
@@ -48,10 +154,13 @@ and [projects APIs](../../api/projects.md).
## Known limitations
-* Currently, storing GitLab Git LFS objects on a non-local storage (like S3 buckets)
- is not supported
* Support for removing unreferenced LFS objects was added in 8.14 onwards.
* LFS authentications via SSH was added with GitLab 8.12
* Only compatible with the GitLFS client versions 1.1.0 and up, or 1.0.2.
* The storage statistics currently count each LFS object multiple times for
every project linking to it
+
+[reconfigure gitlab]: ../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure "How to reconfigure Omnibus GitLab"
+[restart gitlab]: ../../administration/restart_gitlab.md#installations-from-source "How to restart GitLab"
+[eep]: https://about.gitlab.com/products/ "GitLab Premium"
+[ee-2760]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/2760
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index e4fca77ab5d..e59e8a45908 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -410,7 +410,7 @@ module API
)
end
- def present_file!(path, filename, content_type = 'application/octet-stream')
+ def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}"
header['Content-Transfer-Encoding'] = 'binary'
@@ -426,13 +426,17 @@ module API
end
end
- def present_artifacts!(artifacts_file)
- return not_found! unless artifacts_file.exists?
+ def present_carrierwave_file!(file, supports_direct_download: true)
+ return not_found! unless file.exists?
- if artifacts_file.file_storage?
- present_file!(artifacts_file.path, artifacts_file.filename)
+ if file.file_storage?
+ present_disk_file!(file.path, file.filename)
+ elsif supports_direct_download && file.class.direct_download_enabled?
+ redirect(file.url)
else
- redirect_to(artifacts_file.url)
+ header(*Gitlab::Workhorse.send_url(file.url))
+ status :ok
+ body
end
end
diff --git a/lib/api/job_artifacts.rb b/lib/api/job_artifacts.rb
index 47e5eeab31d..b1adef49d46 100644
--- a/lib/api/job_artifacts.rb
+++ b/lib/api/job_artifacts.rb
@@ -28,7 +28,7 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
- present_artifacts!(latest_build.artifacts_file)
+ present_carrierwave_file!(latest_build.artifacts_file)
end
desc 'Download the artifacts archive from a job' do
@@ -43,7 +43,7 @@ module API
build = find_build!(params[:job_id])
- present_artifacts!(build.artifacts_file)
+ present_carrierwave_file!(build.artifacts_file)
end
desc 'Download a specific file from artifacts archive' do
diff --git a/lib/api/jobs.rb b/lib/api/jobs.rb
index 9c205514b3a..60911c8d733 100644
--- a/lib/api/jobs.rb
+++ b/lib/api/jobs.rb
@@ -72,7 +72,7 @@ module API
present build, with: Entities::Job
end
- # TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace
+ # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific job of a project'
diff --git a/lib/api/project_export.rb b/lib/api/project_export.rb
index b0a7fd6f4ab..efc4a33ae1b 100644
--- a/lib/api/project_export.rb
+++ b/lib/api/project_export.rb
@@ -25,7 +25,7 @@ module API
render_api_error!('404 Not found or has expired', 404) unless path
- present_file!(path, File.basename(path), 'application/gzip')
+ present_disk_file!(path, File.basename(path), 'application/gzip')
end
desc 'Start export' do
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 7e6c33ec33d..8da97a97754 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -244,11 +244,12 @@ module API
params do
requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token)
+ optional :direct_download, default: false, type: Boolean, desc: %q(Perform direct download from remote storage instead of proxying artifacts)
end
get '/:id/artifacts' do
job = authenticate_job!
- present_artifacts!(job.artifacts_file)
+ present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
end
end
end
diff --git a/lib/api/v3/builds.rb b/lib/api/v3/builds.rb
index ac76fece931..683b9c993cb 100644
--- a/lib/api/v3/builds.rb
+++ b/lib/api/v3/builds.rb
@@ -85,7 +85,7 @@ module API
build = get_build!(params[:build_id])
- present_artifacts!(build.artifacts_file)
+ present_carrierwave_file!(build.artifacts_file)
end
desc 'Download the artifacts file from build' do
@@ -102,10 +102,10 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
- present_artifacts!(latest_build.artifacts_file)
+ present_carrierwave_file!(latest_build.artifacts_file)
end
- # TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace
+ # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific build of a project'
diff --git a/lib/gitlab/ci/trace/http_io.rb b/lib/gitlab/ci/trace/http_io.rb
new file mode 100644
index 00000000000..ac4308f4e2c
--- /dev/null
+++ b/lib/gitlab/ci/trace/http_io.rb
@@ -0,0 +1,187 @@
+##
+# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
+# source: https://gitlab.com/snippets/1685610
+module Gitlab
+ module Ci
+ class Trace
+ class HttpIO
+ BUFFER_SIZE = 128.kilobytes
+
+ InvalidURLError = Class.new(StandardError)
+ FailedToGetChunkError = Class.new(StandardError)
+
+ attr_reader :uri, :size
+ attr_reader :tell
+ attr_reader :chunk, :chunk_range
+
+ alias_method :pos, :tell
+
+ def initialize(url, size)
+ raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
+
+ @uri = URI(url)
+ @size = size
+ @tell = 0
+ end
+
+ def close
+ # no-op
+ end
+
+ def binmode
+ # no-op
+ end
+
+ def binmode?
+ true
+ end
+
+ def path
+ nil
+ end
+
+ def url
+ @uri.to_s
+ end
+
+ def seek(pos, where = IO::SEEK_SET)
+ new_pos =
+ case where
+ when IO::SEEK_END
+ size + pos
+ when IO::SEEK_SET
+ pos
+ when IO::SEEK_CUR
+ tell + pos
+ else
+ -1
+ end
+
+ raise 'new position is outside of file' if new_pos < 0 || new_pos > size
+
+ @tell = new_pos
+ end
+
+ def eof?
+ tell == size
+ end
+
+ def each_line
+ until eof?
+ line = readline
+ break if line.nil?
+
+ yield(line)
+ end
+ end
+
+ def read(length = nil)
+ out = ""
+
+ until eof? || (length && out.length >= length)
+ data = get_chunk
+ break if data.empty?
+
+ out << data
+ @tell += data.bytesize
+ end
+
+ out = out[0, length] if length && out.length > length
+
+ out
+ end
+
+ def readline
+ out = ""
+
+ until eof?
+ data = get_chunk
+ new_line = data.index("\n")
+
+ if !new_line.nil?
+ out << data[0..new_line]
+ @tell += new_line + 1
+ break
+ else
+ out << data
+ @tell += data.bytesize
+ end
+ end
+
+ out
+ end
+
+ def write(data)
+ raise NotImplementedError
+ end
+
+ def truncate(offset)
+ raise NotImplementedError
+ end
+
+ def flush
+ raise NotImplementedError
+ end
+
+ def present?
+ true
+ end
+
+ private
+
+ ##
+ # The below methods are not implemented in IO class
+ #
+ def in_range?
+ @chunk_range&.include?(tell)
+ end
+
+ def get_chunk
+ unless in_range?
+ response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
+ http.request(request)
+ end
+
+ raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
+
+ @chunk = response.body.force_encoding(Encoding::BINARY)
+ @chunk_range = response.content_range
+
+ ##
+ # Note: If provider does not return content_range, then we set it as we requested
+ # Provider: minio
+ # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # Provider: AWS
+ # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # Provider: GCS
+ # - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
+ # - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
+ @chunk_range ||= (chunk_start...(chunk_start + @chunk.length))
+ end
+
+ @chunk[chunk_offset..BUFFER_SIZE]
+ end
+
+ def request
+ Net::HTTP::Get.new(uri).tap do |request|
+ request.set_range(chunk_start, BUFFER_SIZE)
+ end
+ end
+
+ def chunk_offset
+ tell % BUFFER_SIZE
+ end
+
+ def chunk_start
+ (tell / BUFFER_SIZE) * BUFFER_SIZE
+ end
+
+ def chunk_end
+ [chunk_start + BUFFER_SIZE, size].min
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb
index d52194f688b..b3fe3ef1c4d 100644
--- a/lib/gitlab/ci/trace/stream.rb
+++ b/lib/gitlab/ci/trace/stream.rb
@@ -8,7 +8,7 @@ module Gitlab
attr_reader :stream
- delegate :close, :tell, :seek, :size, :path, :truncate, to: :stream, allow_nil: true
+ delegate :close, :tell, :seek, :size, :path, :url, :truncate, to: :stream, allow_nil: true
delegate :valid?, to: :stream, as: :present?, allow_nil: true
diff --git a/lib/gitlab/verify/lfs_objects.rb b/lib/gitlab/verify/lfs_objects.rb
index fe51edbdeeb..970e2a7b718 100644
--- a/lib/gitlab/verify/lfs_objects.rb
+++ b/lib/gitlab/verify/lfs_objects.rb
@@ -12,7 +12,7 @@ module Gitlab
private
def relation
- LfsObject.all
+ LfsObject.with_files_stored_locally
end
def expected_checksum(lfs_object)
diff --git a/lib/gitlab/verify/uploads.rb b/lib/gitlab/verify/uploads.rb
index 6972e517ea5..0ffa71a6d72 100644
--- a/lib/gitlab/verify/uploads.rb
+++ b/lib/gitlab/verify/uploads.rb
@@ -12,7 +12,7 @@ module Gitlab
private
def relation
- Upload.all
+ Upload.with_files_stored_locally
end
def expected_checksum(upload)
diff --git a/lib/tasks/gitlab/artifacts/migrate.rake b/lib/tasks/gitlab/artifacts/migrate.rake
new file mode 100644
index 00000000000..bfca4bfb3f7
--- /dev/null
+++ b/lib/tasks/gitlab/artifacts/migrate.rake
@@ -0,0 +1,25 @@
+require 'logger'
+require 'resolv-replace'
+
+desc "GitLab | Migrate files for artifacts to comply with new storage format"
+namespace :gitlab do
+ namespace :artifacts do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of artifacts')
+
+ Ci::Build.joins(:project)
+ .with_artifacts_stored_locally
+ .find_each(batch_size: 10) do |build|
+ begin
+ build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
+ build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
+
+ logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
+ rescue => e
+ logger.error("Failed to transfer artifacts of #{build.id} with error: #{e.message}")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/lfs/migrate.rake b/lib/tasks/gitlab/lfs/migrate.rake
new file mode 100644
index 00000000000..a45e5ca91e0
--- /dev/null
+++ b/lib/tasks/gitlab/lfs/migrate.rake
@@ -0,0 +1,22 @@
+require 'logger'
+
+desc "GitLab | Migrate LFS objects to remote storage"
+namespace :gitlab do
+ namespace :lfs do
+ task migrate: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Starting transfer of LFS files to object storage')
+
+ LfsObject.with_files_stored_locally
+ .find_each(batch_size: 10) do |lfs_object|
+ begin
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+
+ logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
+ rescue => e
+ logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
+ end
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/uploads/migrate.rake b/lib/tasks/gitlab/uploads/migrate.rake
new file mode 100644
index 00000000000..c26c3ccb3be
--- /dev/null
+++ b/lib/tasks/gitlab/uploads/migrate.rake
@@ -0,0 +1,33 @@
+namespace :gitlab do
+ namespace :uploads do
+ desc 'GitLab | Uploads | Migrate the uploaded files to object storage'
+ task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
+ batch_size = ENV.fetch('BATCH', 200).to_i
+ @to_store = ObjectStorage::Store::REMOTE
+ @mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
+ @uploader_class = args.uploader_class.constantize
+ @model_class = args.model_class.constantize
+
+ uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
+ end
+
+ def enqueue_batch(batch, index)
+ job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
+ @mounted_as,
+ @to_store)
+ puts "Enqueued job ##{index}: #{job}"
+ rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
+ # continue for the next batch
+ puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
+ end
+
+ def uploads
+ Upload.class_eval { include EachBatch } unless Upload < EachBatch
+
+ Upload
+ .where.not(store: @to_store)
+ .where(uploader: @uploader_class.to_s,
+ model_type: @model_class.base_class.sti_name)
+ end
+ end
+end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
new file mode 100644
index 00000000000..f4c99ea4064
--- /dev/null
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+describe SendFileUpload do
+ let(:uploader_class) do
+ Class.new(GitlabUploader) do
+ include ObjectStorage::Concern
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+ end
+ end
+
+ let(:controller_class) do
+ Class.new do
+ include SendFileUpload
+ end
+ end
+
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ describe '#send_upload' do
+ let(:controller) { controller_class.new }
+ let(:temp_file) { Tempfile.new('test') }
+
+ subject { controller.send_upload(uploader) }
+
+ before do
+ FileUtils.touch(temp_file)
+ end
+
+ after do
+ FileUtils.rm_f(temp_file)
+ end
+
+ context 'when local file is used' do
+ before do
+ uploader.store!(temp_file)
+ end
+
+ it 'sends a file' do
+ expect(controller).to receive(:send_file).with(uploader.path, anything)
+
+ subject
+ end
+ end
+
+ context 'when remote file is used' do
+ before do
+ stub_uploads_object_storage(uploader: uploader_class)
+ uploader.object_store = ObjectStorage::Store::REMOTE
+ uploader.store!(temp_file)
+ end
+
+ context 'and proxying is enabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true }
+ end
+
+ it 'sends a file' do
+ headers = double
+ expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
+ expect(controller).to receive(:headers) { headers }
+ expect(controller).to receive(:head).with(:ok)
+
+ subject
+ end
+ end
+
+ context 'and proxying is disabled' do
+ before do
+ allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false }
+ end
+
+ it 'sends a file' do
+ expect(controller).to receive(:redirect_to).with(/#{uploader.path}/)
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 25a2e13fe1a..4ea6f869aa3 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -145,9 +145,23 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
+ let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.root }
end
end
+
+ context 'when using remote file storage' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it_behaves_like 'a valid file' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+ let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
+ let(:store) { ObjectStorage::Store::REMOTE }
+ let(:archive_path) { 'https://' }
+ end
+ end
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index f3e303bb0fe..31046c202e6 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -1,7 +1,9 @@
+# coding: utf-8
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
+ include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -203,6 +205,41 @@ describe Projects::JobsController do
end
end
+ context 'when trace artifact is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+ allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+ end
+
+ context 'when there are no network issues' do
+ before do
+ stub_remote_trace_206
+
+ get_trace
+ end
+
+ it 'returns a trace' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq job.id
+ expect(json_response['status']).to eq job.status
+ expect(json_response['html']).to eq(job.trace.html)
+ end
+ end
+
+ context 'when there is a network issue' do
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'returns a trace' do
+ expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+ end
+
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
@@ -446,14 +483,18 @@ describe Projects::JobsController do
end
describe 'GET raw' do
- before do
- get_raw
+ subject do
+ post :raw, namespace_id: project.namespace,
+ project_id: project,
+ id: job.id
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read
@@ -464,6 +505,8 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'BUILD TRACE'
@@ -474,14 +517,22 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do
+ response = subject
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
- def get_raw
- post :raw, namespace_id: project.namespace,
- project_id: project,
- id: job.id
+ context 'when the trace artifact is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ end
+
+ it 'redirect to the trace file url' do
+ expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
+ end
end
end
end
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index b7df42168e0..08e2ccf893a 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -8,10 +8,7 @@ describe Projects::RawController do
let(:id) { 'master/README.md' }
it 'delivers ASCII file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -25,10 +22,7 @@ describe Projects::RawController do
let(:id) { 'master/files/images/6049019_460s.jpg' }
it 'sets image content type header' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('image/jpeg')
@@ -54,21 +48,40 @@ describe Projects::RawController do
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
end
+
+ context 'and lfs uses object storage' do
+ before do
+ lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.save!
+ stub_lfs_object_storage
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'responds with redirect to file' do
+ get_show(public_project, id)
+
+ expect(response).to have_gitlab_http_status(302)
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+
+ it 'sets content disposition' do
+ get_show(public_project, id)
+
+ file_uri = URI.parse(response.location)
+ params = CGI.parse(file_uri.query)
+
+ expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
+ end
+ end
end
context 'when project does not have access' do
it 'does not serve the file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(404)
end
@@ -81,10 +94,7 @@ describe Projects::RawController do
end
it 'delivers ASCII file' do
- get(:show,
- namespace_id: public_project.namespace.to_param,
- project_id: public_project,
- id: id)
+ get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
@@ -95,4 +105,10 @@ describe Projects::RawController do
end
end
end
+
+ def get_show(project, id)
+ get(:show, namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: id)
+ end
end
diff --git a/spec/factories/appearances.rb b/spec/factories/appearances.rb
index 5f9c57c0c8d..18c7453bd1b 100644
--- a/spec/factories/appearances.rb
+++ b/spec/factories/appearances.rb
@@ -2,8 +2,21 @@
FactoryBot.define do
factory :appearance do
- title "MepMep"
- description "This is my Community Edition instance"
+ title "GitLab Community Edition"
+ description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines"
end
+
+ trait :with_logo do
+ logo { fixture_file_upload('spec/fixtures/dk.png') }
+ end
+
+ trait :with_header_logo do
+ header_logo { fixture_file_upload('spec/fixtures/dk.png') }
+ end
+
+ trait :with_logos do
+ with_logo
+ with_header_logo
+ end
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 8544d54ccaa..3d3287d8168 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -5,6 +5,10 @@ FactoryBot.define do
job factory: :ci_build
file_type :archive
+ trait :remote_store do
+ file_store JobArtifactUploader::Store::REMOTE
+ end
+
after :build do |artifact|
artifact.project ||= artifact.job.project
end
diff --git a/spec/factories/lfs_objects.rb b/spec/factories/lfs_objects.rb
index caaed4d5246..eaf3a4ed497 100644
--- a/spec/factories/lfs_objects.rb
+++ b/spec/factories/lfs_objects.rb
@@ -15,4 +15,8 @@ FactoryBot.define do
trait :correct_oid do
oid 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75'
end
+
+ trait :object_storage do
+ file_store { LfsObjectUploader::Store::REMOTE }
+ end
end
diff --git a/spec/factories/uploads.rb b/spec/factories/uploads.rb
index ff3a2a76acc..b45f6f30e40 100644
--- a/spec/factories/uploads.rb
+++ b/spec/factories/uploads.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
uploader "AvatarUploader"
mount_point :avatar
secret nil
+ store ObjectStorage::Store::LOCAL
# we should build a mount agnostic upload by default
transient do
@@ -27,6 +28,10 @@ FactoryBot.define do
secret SecureRandom.hex
end
+ trait :object_storage do
+ store ObjectStorage::Store::REMOTE
+ end
+
trait :namespace_upload do
model { build(:group) }
path { File.join(secret, filename) }
diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb
new file mode 100644
index 00000000000..de3c157ab7b
--- /dev/null
+++ b/spec/initializers/fog_google_https_private_urls_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+describe 'Fog::Storage::GoogleXML::File' do
+ let(:storage) do
+ Fog.mock!
+ Fog::Storage.new({
+ google_storage_access_key_id: "asdf",
+ google_storage_secret_access_key: "asdf",
+ provider: "Google"
+ })
+ end
+
+ let(:file) do
+ directory = storage.directories.create(key: 'data')
+ directory.files.create(
+ body: 'Hello World!',
+ key: 'hello_world.txt'
+ )
+ end
+
+ it 'delegates to #get_https_url' do
+ expect(file.url(Time.now)).to start_with("https://")
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/http_io_spec.rb b/spec/lib/gitlab/ci/trace/http_io_spec.rb
new file mode 100644
index 00000000000..5474e2f518c
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/http_io_spec.rb
@@ -0,0 +1,315 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Trace::HttpIO do
+ include HttpIOHelpers
+
+ let(:http_io) { described_class.new(url, size) }
+ let(:url) { remote_trace_url }
+ let(:size) { remote_trace_size }
+
+ describe '#close' do
+ subject { http_io.close }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#binmode' do
+ subject { http_io.binmode }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#binmode?' do
+ subject { http_io.binmode? }
+
+ it { is_expected.to be_truthy }
+ end
+
+ describe '#path' do
+ subject { http_io.path }
+
+ it { is_expected.to be_nil }
+ end
+
+ describe '#url' do
+ subject { http_io.url }
+
+ it { is_expected.to eq(url) }
+ end
+
+ describe '#seek' do
+ subject { http_io.seek(pos, where) }
+
+ context 'when moves pos to end of the file' do
+ let(:pos) { 0 }
+ let(:where) { IO::SEEK_END }
+
+ it { is_expected.to eq(size) }
+ end
+
+ context 'when moves pos to middle of the file' do
+ let(:pos) { size / 2 }
+ let(:where) { IO::SEEK_SET }
+
+ it { is_expected.to eq(size / 2) }
+ end
+
+ context 'when moves pos around' do
+ it 'matches the result' do
+ expect(http_io.seek(0)).to eq(0)
+ expect(http_io.seek(100, IO::SEEK_CUR)).to eq(100)
+ expect { http_io.seek(size + 1, IO::SEEK_CUR) }.to raise_error('new position is outside of file')
+ end
+ end
+ end
+
+ describe '#eof?' do
+ subject { http_io.eof? }
+
+ context 'when current pos is at end of the file' do
+ before do
+ http_io.seek(size, IO::SEEK_SET)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when current pos is not at end of the file' do
+ before do
+ http_io.seek(0, IO::SEEK_SET)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#each_line' do
+ subject { http_io.each_line }
+
+ let(:string_io) { StringIO.new(remote_trace_body) }
+
+ before do
+ stub_remote_trace_206
+ end
+
+ it 'yields lines' do
+ expect { |b| http_io.each_line(&b) }.to yield_successive_args(*string_io.each_line.to_a)
+ end
+
+ context 'when buckets on GCS' do
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ stub_remote_trace_200
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'calls get_chunk only once' do
+ expect_any_instance_of(Net::HTTP).to receive(:request).once.and_call_original
+
+ http_io.each_line { |line| }
+ end
+ end
+ end
+ end
+
+ describe '#read' do
+ subject { http_io.read(length) }
+
+ context 'when there are no network issue' do
+ before do
+ stub_remote_trace_206
+ end
+
+ context 'when read whole size' do
+ let(:length) { nil }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+ end
+
+ context 'when read only first 100 bytes' do
+ let(:length) { 100 }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body[0, length])
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body[0, length])
+ end
+ end
+ end
+
+ context 'when tries to read oversize' do
+ let(:length) { size + 1000 }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to eq(remote_trace_body)
+ end
+ end
+ end
+
+ context 'when tries to read 0 bytes' do
+ let(:length) { 0 }
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it 'reads a trace' do
+ is_expected.to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when there is anetwork issue' do
+ let(:length) { nil }
+
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'reads a trace' do
+ expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+ end
+
+ describe '#readline' do
+ subject { http_io.readline }
+
+ let(:string_io) { StringIO.new(remote_trace_body) }
+
+ before do
+ stub_remote_trace_206
+ end
+
+ shared_examples 'all line matching' do
+ it 'reads a line' do
+ (0...remote_trace_body.lines.count).each do
+ expect(http_io.readline).to eq(string_io.readline)
+ end
+ end
+ end
+
+ context 'when there is anetwork issue' do
+ let(:length) { nil }
+
+ before do
+ stub_remote_trace_500
+ end
+
+ it 'reads a trace' do
+ expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
+ end
+ end
+
+ context 'when BUFFER_SIZE is smaller than file size' do
+ before do
+ set_smaller_buffer_size_than(size)
+ end
+
+ it_behaves_like 'all line matching'
+ end
+
+ context 'when BUFFER_SIZE is larger than file size' do
+ before do
+ set_larger_buffer_size_than(size)
+ end
+
+ it_behaves_like 'all line matching'
+ end
+
+ context 'when pos is at middle of the file' do
+ before do
+ set_smaller_buffer_size_than(size)
+
+ http_io.seek(size / 2)
+ string_io.seek(size / 2)
+ end
+
+ it 'reads from pos' do
+ expect(http_io.readline).to eq(string_io.readline)
+ end
+ end
+ end
+
+ describe '#write' do
+ subject { http_io.write(nil) }
+
+ it { expect { subject }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#truncate' do
+ subject { http_io.truncate(nil) }
+
+ it { expect { subject }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#flush' do
+ subject { http_io.flush }
+
+ it { expect { subject }.to raise_error(NotImplementedError) }
+ end
+
+ describe '#present?' do
+ subject { http_io.present? }
+
+ it { is_expected.to be_truthy }
+ end
+end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 44e4c6ff94b..0716852f57f 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -265,7 +265,9 @@ CommitStatus:
- target_url
- description
- artifacts_file
+- artifacts_file_store
- artifacts_metadata
+- artifacts_metadata_store
- erased_by_id
- erased_at
- artifacts_expire_at
diff --git a/spec/lib/gitlab/verify/lfs_objects_spec.rb b/spec/lib/gitlab/verify/lfs_objects_spec.rb
index 64f3a9660e0..0f890e2c7ce 100644
--- a/spec/lib/gitlab/verify/lfs_objects_spec.rb
+++ b/spec/lib/gitlab/verify/lfs_objects_spec.rb
@@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do
expect(failures.keys).to contain_exactly(lfs_object)
expect(failure.to_s).to include('Checksum mismatch')
end
+
+ context 'with remote files' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'skips LFS objects in object storage' do
+ local_failure = create(:lfs_object)
+ create(:lfs_object, :object_storage)
+
+ failures = {}
+ described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
+
+ expect(failures.keys).to contain_exactly(local_failure)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/verify/uploads_spec.rb b/spec/lib/gitlab/verify/uploads_spec.rb
index 6146ce61226..85768308edc 100644
--- a/spec/lib/gitlab/verify/uploads_spec.rb
+++ b/spec/lib/gitlab/verify/uploads_spec.rb
@@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do
expect(failures.keys).to contain_exactly(upload)
expect(failure.to_s).to include('Checksum missing')
end
+
+ context 'with remote files' do
+ before do
+ stub_uploads_object_storage(AvatarUploader)
+ end
+
+ it 'skips uploads in object storage' do
+ local_failure = create(:upload)
+ create(:upload, :object_storage)
+
+ failures = {}
+ described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
+
+ expect(failures.keys).to contain_exactly(local_failure)
+ end
+ end
end
end
diff --git a/spec/migrations/remove_empty_fork_networks_spec.rb b/spec/migrations/remove_empty_fork_networks_spec.rb
index 7f7ce91378b..f6d030ab25c 100644
--- a/spec/migrations/remove_empty_fork_networks_spec.rb
+++ b/spec/migrations/remove_empty_fork_networks_spec.rb
@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
+ after do
+ Upload.reset_column_information
+ end
+
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 30a352fd090..7d935cf8d76 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -198,6 +198,16 @@ describe Ci::Build do
end
context 'when legacy artifacts are used' do
+ let(:build) { create(:ci_build, :legacy_artifacts) }
+
+ subject { build.artifacts? }
+
+ context 'is expired' do
+ let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
+
+ it { is_expected.to be_falsy }
+ end
+
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
@@ -208,13 +218,25 @@ describe Ci::Build do
let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_truthy }
+ end
+ end
+ end
- context 'is expired' do
- let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
+ describe '#browsable_artifacts?' do
+ subject { build.browsable_artifacts? }
- it { is_expected.to be_falsy }
- end
+ context 'artifacts metadata does not exist' do
+ before do
+ build.update_attributes(legacy_artifacts_metadata: nil)
end
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'artifacts metadata does exists' do
+ let(:build) { create(:ci_build, :artifacts) }
+
+ it { is_expected.to be_truthy }
end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index a2bd36537e6..1aa28434879 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -15,6 +15,50 @@ describe Ci::JobArtifact do
it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) }
+ describe 'callbacks' do
+ subject { create(:ci_job_artifact, :archive) }
+
+ describe '#schedule_background_upload' do
+ context 'when object storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it 'does not schedule the migration' do
+ expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when object storage is enabled' do
+ context 'when background upload is enabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
+
+ subject
+ end
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: false)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+ end
+ end
+
describe '#set_size' do
it 'sets the size' do
expect(artifact.size).to eq(106365)
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
new file mode 100644
index 00000000000..a182116d637
--- /dev/null
+++ b/spec/models/lfs_object_spec.rb
@@ -0,0 +1,85 @@
+require 'spec_helper'
+
+describe LfsObject do
+ describe '#local_store?' do
+ it 'returns true when file_store is nil' do
+ subject.file_store = nil
+
+ expect(subject.local_store?).to eq true
+ end
+
+ it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
+ subject.file_store = LfsObjectUploader::Store::LOCAL
+
+ expect(subject.local_store?).to eq true
+ end
+
+ it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
+ subject.file_store = LfsObjectUploader::Store::REMOTE
+
+ expect(subject.local_store?).to eq false
+ end
+ end
+
+ describe '#schedule_background_upload' do
+ before do
+ stub_lfs_setting(enabled: true)
+ end
+
+ subject { create(:lfs_object, :with_file) }
+
+ context 'when object storage is disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it 'does not schedule the migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when object storage is enabled' do
+ context 'when background upload is enabled' do
+ context 'when is licensed' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker)
+ .to receive(:perform_async)
+ .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+ .once
+
+ subject
+ end
+
+ it 'schedules the model for migration once' do
+ expect(ObjectStorage::BackgroundMoveWorker)
+ .to receive(:perform_async)
+ .with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
+ .once
+
+ lfs_object = create(:lfs_object)
+ lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
+ lfs_object.save!
+ end
+ end
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ stub_lfs_object_storage(background_upload: false)
+ end
+
+ it 'schedules the model for migration' do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 6192bbd4abb..3ffdfdc0e9a 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe API::Jobs do
+ include HttpIOHelpers
+
set(:project) do
create(:project, :repository, public_builds: false)
end
@@ -112,6 +114,7 @@ describe API::Jobs do
let(:query) { Hash.new }
before do
+ job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
@@ -335,10 +338,55 @@ describe API::Jobs do
end
end
+ context 'when artifacts are stored remotely' do
+ let(:proxy_download) { false }
+
+ before do
+ stub_artifacts_object_storage(proxy_download: proxy_download)
+ end
+
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+
+ before do
+ job.reload
+
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ context 'when proxy download is enabled' do
+ let(:proxy_download) { true }
+
+ it 'responds with the workhorse send-url' do
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
+ end
+ end
+
+ context 'when proxy download is disabled' do
+ it 'returns location redirect' do
+ expect(response).to have_gitlab_http_status(302)
+ end
+ end
+
+ context 'authorized user' do
+ it 'returns the file remote URL' do
+ expect(response).to redirect_to(artifact.file.url)
+ end
+ end
+
+ context 'unauthorized user' do
+ let(:api_user) { nil }
+
+ it 'does not return specific job artifacts' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
- expect(response).to have_gitlab_http_status(404)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -349,6 +397,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
+ stub_artifacts_object_storage
job.success
end
@@ -412,9 +461,24 @@ describe API::Jobs do
"attachment; filename=#{job.artifacts_file.filename}" }
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_http_status(:ok) }
it { expect(response.headers).to include(download_headers) }
end
+
+ context 'when artifacts are stored remotely' do
+ let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
+
+ before do
+ job.reload
+
+ get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(:found)
+ end
+ end
end
context 'with regular branch' do
@@ -451,6 +515,22 @@ describe API::Jobs do
end
context 'authorized user' do
+ context 'when trace is in ObjectStorage' do
+ let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
+
+ before do
+ stub_remote_trace_206
+ allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
+ allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
+ allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
+ end
+
+ it 'returns specific job trace' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.body).to eq(job.trace.raw)
+ end
+ end
+
context 'when trace is artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 95c23726a79..f3dd121faa9 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -200,7 +200,7 @@ describe API::Runner do
let(:project) { create(:project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) }
- let!(:job) do
+ let(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end
@@ -215,6 +215,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
+ job
stub_container_registry_config(enabled: false)
end
@@ -888,6 +889,7 @@ describe API::Runner do
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do
+ stub_artifacts_object_storage
job.run!
end
@@ -1179,27 +1181,67 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
- before do
- download_artifact
- end
-
context 'when job has artifacts' do
- let(:job) { create(:ci_build, :artifacts) }
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ let(:job) { create(:ci_build) }
+ let(:store) { JobArtifactUploader::Store::LOCAL }
+
+ before do
+ create(:ci_job_artifact, :archive, file_store: store, job: job)
end
context 'when using job token' do
- it 'download artifacts' do
- expect(response).to have_gitlab_http_status(200)
- expect(response.headers).to include download_headers
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
+ end
+
+ before do
+ download_artifact
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_http_status(200)
+ expect(response.headers).to include download_headers
+ end
+ end
+
+ context 'when artifacts are stored remotely' do
+ let(:store) { JobArtifactUploader::Store::REMOTE }
+ let!(:job) { create(:ci_build) }
+
+ context 'when proxy download is being used' do
+ before do
+ download_artifact(direct_download: false)
+ end
+
+ it 'uses workhorse send-url' do
+ expect(response).to have_gitlab_http_status(200)
+ expect(response.headers).to include(
+ 'Gitlab-Workhorse-Send-Data' => /send-url:/)
+ end
+ end
+
+ context 'when direct download is being used' do
+ before do
+ download_artifact(direct_download: true)
+ end
+
+ it 'receive redirect for downloading artifacts' do
+ expect(response).to have_gitlab_http_status(302)
+ expect(response.headers).to include('Location')
+ end
+ end
end
end
context 'when using runnners token' do
let(:token) { job.project.runners_token }
+ before do
+ download_artifact
+ end
+
it 'responds with forbidden' do
expect(response).to have_gitlab_http_status(403)
end
@@ -1208,12 +1250,16 @@ describe API::Runner do
context 'when job does not has artifacts' do
it 'responds with not found' do
+ download_artifact
+
expect(response).to have_gitlab_http_status(404)
end
end
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
+ job.reload
+
get api("/jobs/#{job.id}/artifacts"), params, request_headers
end
end
diff --git a/spec/requests/api/v3/builds_spec.rb b/spec/requests/api/v3/builds_spec.rb
index 79041c6a792..00f067889a0 100644
--- a/spec/requests/api/v3/builds_spec.rb
+++ b/spec/requests/api/v3/builds_spec.rb
@@ -216,6 +216,7 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
+ stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
@@ -230,13 +231,24 @@ describe API::V3::Builds do
end
it 'returns specific job artifacts' do
- expect(response).to have_gitlab_http_status(200)
+ expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
end
end
end
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
+
+ it 'returns location redirect' do
+ get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
+
+ expect(response).to have_gitlab_http_status(302)
+ end
+ end
+
context 'unauthorized user' do
let(:api_user) { nil }
@@ -256,6 +268,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
+ stub_artifacts_object_storage
build.success
end
@@ -318,9 +331,24 @@ describe API::V3::Builds do
"attachment; filename=#{build.artifacts_file.filename}" }
end
- it { expect(response).to have_gitlab_http_status(200) }
+ it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
end
+
+ context 'when artifacts are stored remotely' do
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
+
+ before do
+ build.reload
+
+ get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
+ end
+
+ it 'returns location redirect' do
+ expect(response).to have_http_status(302)
+ end
+ end
end
context 'with regular branch' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 971b45c411d..f7c04c19903 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -191,10 +191,12 @@ describe 'Git LFS API and storage' do
describe 'when fetching lfs object' do
let(:project) { create(:project) }
let(:update_permissions) { }
+ let(:before_get) { }
before do
enable_lfs
update_permissions
+ before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end
@@ -239,6 +241,21 @@ describe 'Git LFS API and storage' do
end
it_behaves_like 'responds with a file'
+
+ context 'when LFS uses object storage' do
+ let(:before_get) do
+ stub_lfs_object_storage
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'responds with redirect' do
+ expect(response).to have_gitlab_http_status(302)
+ end
+
+ it 'responds with the file location' do
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+ end
end
end
@@ -978,6 +995,32 @@ describe 'Git LFS API and storage' do
end
end
+ context 'and workhorse requests upload finalize for a new lfs object' do
+ before do
+ lfs_object.destroy
+ end
+
+ context 'with object storage disabled' do
+ it "doesn't attempt to migrate file to object storage" do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ put_finalize(with_tempfile: true)
+ end
+ end
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'schedules migration of file to object storage' do
+ expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
+
+ put_finalize(with_tempfile: true)
+ end
+ end
+ end
+
context 'invalid tempfiles' do
it 'rejects slashes in the tempfile name (path traversal' do
put_finalize('foo/bar')
@@ -1177,7 +1220,9 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
- def put_finalize(lfs_tmp = lfs_tmp_file)
+ def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
+ setup_tempfile(lfs_tmp) if with_tempfile
+
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
end
@@ -1185,6 +1230,13 @@ describe 'Git LFS API and storage' do
def lfs_tmp_file
"#{sample_oid}012345678"
end
+
+ def setup_tempfile(lfs_tmp)
+ upload_path = LfsObjectUploader.workhorse_upload_path
+
+ FileUtils.mkdir_p(upload_path)
+ FileUtils.touch(File.join(upload_path, lfs_tmp))
+ end
end
def enable_lfs
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index c38795ad1a1..f51c11b141f 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -117,6 +117,7 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
+
expect(recorded.count).to be_within(1).of(36)
expect(recorded.cached_count).to eq(0)
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index db9c216d3f4..b86a3d72bb4 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -28,7 +28,8 @@ describe Ci::RetryBuildService do
%i[type lock_version target_url base_tags trace_sections
commit_id deployments erased_by_id last_deployment project_id
runner_id tag_taggings taggings tags trigger_request_id
- user_id auto_canceled_by_id retried failure_reason].freeze
+ user_id auto_canceled_by_id retried failure_reason
+ artifacts_file_store artifacts_metadata_store].freeze
shared_examples 'build duplication' do
let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index c148a98569b..a9aee9e100f 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -6,7 +6,7 @@ describe Issues::MoveService do
let(:title) { 'Some issue' }
let(:description) { 'Some issue description' }
let(:old_project) { create(:project) }
- let(:new_project) { create(:project, group: create(:group)) }
+ let(:new_project) { create(:project) }
let(:milestone1) { create(:milestone, project_id: old_project.id, title: 'v9.0') }
let(:old_issue) do
diff --git a/spec/support/http_io/http_io_helpers.rb b/spec/support/http_io/http_io_helpers.rb
new file mode 100644
index 00000000000..31e07e720cd
--- /dev/null
+++ b/spec/support/http_io/http_io_helpers.rb
@@ -0,0 +1,64 @@
+module HttpIOHelpers
+ def stub_remote_trace_206
+ WebMock.stub_request(:get, remote_trace_url)
+ .to_return { |request| remote_trace_response(request, 206) }
+ end
+
+ def stub_remote_trace_200
+ WebMock.stub_request(:get, remote_trace_url)
+ .to_return { |request| remote_trace_response(request, 200) }
+ end
+
+ def stub_remote_trace_500
+ WebMock.stub_request(:get, remote_trace_url)
+ .to_return(status: [500, "Internal Server Error"])
+ end
+
+ def remote_trace_url
+ "http://trace.com/trace"
+ end
+
+ def remote_trace_response(request, responce_status)
+ range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
+
+ {
+ status: responce_status,
+ headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
+ body: range_trace_body(range[1].to_i, range[2].to_i)
+ }
+ end
+
+ def remote_trace_response_headers(responce_status, from, to)
+ headers = { 'Content-Type' => 'text/plain' }
+
+ if responce_status == 206
+ headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
+ end
+
+ headers
+ end
+
+ def range_trace_body(from, to)
+ remote_trace_body[from..to]
+ end
+
+ def remote_trace_body
+ @remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
+ end
+
+ def remote_trace_size
+ remote_trace_body.length
+ end
+
+ def set_smaller_buffer_size_than(file_size)
+ blocks = (file_size / 128)
+ new_size = (blocks / 2) * 128
+ stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
+ end
+
+ def set_larger_buffer_size_than(file_size)
+ blocks = (file_size / 128)
+ new_size = (blocks * 2) * 128
+ stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
+ end
+end
diff --git a/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
new file mode 100644
index 00000000000..cd9974cd6e2
--- /dev/null
+++ b/spec/support/shared_examples/uploaders/object_storage_shared_examples.rb
@@ -0,0 +1,126 @@
+shared_context 'with storage' do |store, **stub_params|
+ before do
+ subject.object_store = store
+ end
+end
+
+shared_examples "migrates" do |to_store:, from_store: nil|
+ let(:to) { to_store }
+ let(:from) { from_store || subject.object_store }
+
+ def migrate(to)
+ subject.migrate!(to)
+ end
+
+ def checksum
+ Digest::SHA256.hexdigest(subject.read)
+ end
+
+ before do
+ migrate(from)
+ end
+
+ it 'returns corresponding file type' do
+ expect(subject).to be_an(CarrierWave::Uploader::Base)
+ expect(subject).to be_a(ObjectStorage::Concern)
+
+ if from == described_class::Store::REMOTE
+ expect(subject.file).to be_a(CarrierWave::Storage::Fog::File)
+ elsif from == described_class::Store::LOCAL
+ expect(subject.file).to be_a(CarrierWave::SanitizedFile)
+ else
+ raise 'Unexpected file type'
+ end
+ end
+
+ it 'does nothing when migrating to the current store' do
+ expect { migrate(from) }.not_to change { subject.object_store }.from(from)
+ end
+
+ it 'migrate to the specified store' do
+ from_checksum = checksum
+
+ expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
+ expect(checksum).to eq(from_checksum)
+ end
+
+ it 'removes the original file after the migration' do
+ original_file = subject.file.path
+ migrate(to)
+
+ expect(File.exist?(original_file)).to be_falsey
+ end
+
+ it 'can access to the original file during migration' do
+ file = subject.file
+
+ allow(subject).to receive(:delete_migrated_file) { } # Remove as a callback of :migrate
+ allow(subject).to receive(:record_upload) { } # Remove as a callback of :store (:record_upload)
+
+ expect(file.exists?).to be_truthy
+ expect { migrate(to) }.not_to change { file.exists? }
+ end
+
+ context 'when migrate! is not oqqupied by another process' do
+ it 'executes migrate!' do
+ expect(subject).to receive(:object_store=).at_least(1)
+
+ migrate(to)
+ end
+ end
+
+ context 'when migrate! is occupied by another process' do
+ let(:exclusive_lease_key) { "object_storage_migrate:#{subject.model.class}:#{subject.model.id}" }
+
+ before do
+ @uuid = Gitlab::ExclusiveLease.new(exclusive_lease_key, timeout: 1.hour.to_i).try_obtain
+ end
+
+ it 'does not execute migrate!' do
+ expect(subject).not_to receive(:unsafe_migrate!)
+
+ expect { migrate(to) }.to raise_error('Already running')
+ end
+
+ after do
+ Gitlab::ExclusiveLease.cancel(exclusive_lease_key, @uuid)
+ end
+ end
+
+ context 'migration is unsuccessful' do
+ shared_examples "handles gracefully" do |error:|
+ it 'does not update the object_store' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.object_store).to eq(from)
+ end
+
+ it 'does not delete the original file' do
+ expect { migrate(to) }.to raise_error(error)
+ expect(subject.exists?).to be_truthy
+ end
+ end
+
+ context 'when the store is not supported' do
+ let(:to) { -1 } # not a valid store
+
+ include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
+ end
+
+ context 'upon a fog failure' do
+ before do
+ storage_class = subject.send(:storage_for, to).class
+ expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
+ end
+
+ include_examples "handles gracefully", error: "Store failure."
+ end
+
+ context 'upon a database failure' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
+ end
+
+ include_examples "handles gracefully", error: "ActiveRecord failure."
+ end
+ end
+end
diff --git a/spec/support/stub_object_storage.rb b/spec/support/stub_object_storage.rb
new file mode 100644
index 00000000000..1a0a2feb27d
--- /dev/null
+++ b/spec/support/stub_object_storage.rb
@@ -0,0 +1,43 @@
+module StubConfiguration
+ def stub_object_storage_uploader(
+ config:, uploader:, remote_directory:,
+ enabled: true,
+ proxy_download: false,
+ background_upload: false)
+ Fog.mock!
+
+ allow(config).to receive(:enabled) { enabled }
+ allow(config).to receive(:proxy_download) { proxy_download }
+ allow(config).to receive(:background_upload) { background_upload }
+
+ return unless enabled
+
+ ::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
+ begin
+ connection.directories.create(key: remote_directory)
+ rescue Excon::Error::Conflict
+ end
+ end
+ end
+
+ def stub_artifacts_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ remote_directory: 'artifacts',
+ **params)
+ end
+
+ def stub_lfs_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
+ uploader: LfsObjectUploader,
+ remote_directory: 'lfs-objects',
+ **params)
+ end
+
+ def stub_uploads_object_storage(uploader = described_class, **params)
+ stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
+ uploader: uploader,
+ remote_directory: 'uploads',
+ **params)
+ end
+end
diff --git a/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
new file mode 100644
index 00000000000..8544fb62b5a
--- /dev/null
+++ b/spec/tasks/gitlab/artifacts/migrate_rake_spec.rb
@@ -0,0 +1,118 @@
+require 'rake_helper'
+
+describe 'gitlab:artifacts namespace rake task' do
+ before(:context) do
+ Rake.application.rake_require 'tasks/gitlab/artifacts/migrate'
+ end
+
+ let(:object_storage_enabled) { false }
+
+ before do
+ stub_artifacts_object_storage(enabled: object_storage_enabled)
+ end
+
+ subject { run_rake_task('gitlab:artifacts:migrate') }
+
+ context 'legacy artifacts' do
+ describe 'migrate' do
+ let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
+
+ context 'when local storage is used' do
+ let(:store) { ObjectStorage::Store::LOCAL }
+
+ context 'and job does not have file store defined' do
+ let(:object_storage_enabled) { true }
+ let(:store) { nil }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is defined' do
+ let(:object_storage_enabled) { true }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is not defined' do
+ it "fails to migrate to remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
+
+ context 'when remote storage is used' do
+ let(:object_storage_enabled) { true }
+
+ let(:store) { ObjectStorage::Store::REMOTE }
+
+ it "file stays on remote storage" do
+ subject
+
+ expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+ end
+ end
+
+ context 'job artifacts' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
+
+ context 'when local storage is used' do
+ let(:store) { ObjectStorage::Store::LOCAL }
+
+ context 'and job does not have file store defined' do
+ let(:object_storage_enabled) { true }
+ let(:store) { nil }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is defined' do
+ let(:object_storage_enabled) { true }
+
+ it "migrates file to remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is not defined' do
+ it "fails to migrate to remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
+
+ context 'when remote storage is used' do
+ let(:object_storage_enabled) { true }
+ let(:store) { ObjectStorage::Store::REMOTE }
+
+ it "file stays on remote storage" do
+ subject
+
+ expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/lfs/migrate_rake_spec.rb b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
new file mode 100644
index 00000000000..66d1a192a96
--- /dev/null
+++ b/spec/tasks/gitlab/lfs/migrate_rake_spec.rb
@@ -0,0 +1,37 @@
+require 'rake_helper'
+
+describe 'gitlab:lfs namespace rake task' do
+ before :all do
+ Rake.application.rake_require 'tasks/gitlab/lfs/migrate'
+ end
+
+ describe 'migrate' do
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
+ let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+
+ def lfs_migrate
+ run_rake_task('gitlab:lfs:migrate')
+ end
+
+ context 'object storage disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it "doesn't migrate files" do
+ expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+
+ context 'object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'migrates local file to object storage' do
+ expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/uploads/migrate_rake_spec.rb b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
new file mode 100644
index 00000000000..b778d26060d
--- /dev/null
+++ b/spec/tasks/gitlab/uploads/migrate_rake_spec.rb
@@ -0,0 +1,28 @@
+require 'rake_helper'
+
+describe 'gitlab:uploads:migrate rake tasks' do
+ let!(:projects) { create_list(:project, 10, :with_avatar) }
+ let(:model_class) { Project }
+ let(:uploader_class) { AvatarUploader }
+ let(:mounted_as) { :avatar }
+ let(:batch_size) { 3 }
+
+ before do
+ stub_env('BATCH', batch_size.to_s)
+ stub_uploads_object_storage(uploader_class)
+ Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
+
+ allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
+ end
+
+ def run
+ args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
+ run_rake_task("gitlab:uploads:migrate", *args)
+ end
+
+ it 'enqueue jobs in batch' do
+ expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times
+
+ run
+ end
+end
diff --git a/spec/uploaders/attachment_uploader_spec.rb b/spec/uploaders/attachment_uploader_spec.rb
index 091ba824fc6..d302c14efb9 100644
--- a/spec/uploaders/attachment_uploader_spec.rb
+++ b/spec/uploaders/attachment_uploader_spec.rb
@@ -11,4 +11,26 @@ describe AttachmentUploader do
store_dir: %r[uploads/-/system/note/attachment/],
upload_path: %r[uploads/-/system/note/attachment/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
+
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[note/attachment/],
+ upload_path: %r[note/attachment/]
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index bf9028c9260..b0468bc35ff 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe AvatarUploader do
- let(:model) { create(:user, :with_avatar) }
+ let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) }
@@ -12,15 +12,28 @@ describe AvatarUploader do
upload_path: %r[uploads/-/system/user/avatar/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
- describe '#move_to_cache' do
- it 'is false' do
- expect(uploader.move_to_cache).to eq(false)
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[user/avatar/],
+ upload_path: %r[user/avatar/]
end
- describe '#move_to_store' do
- it 'is false' do
- expect(uploader.move_to_store).to eq(false)
+ context "with a file" do
+ let(:project) { create(:project, :with_avatar) }
+ let(:uploader) { project.avatar }
+ let(:upload) { uploader.upload }
+
+ before do
+ stub_uploads_object_storage
end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index bc024cd307c..68b7e24776d 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -36,6 +36,12 @@ describe FileMover do
it 'creates a new update record' do
expect { subject }.to change { Upload.count }.by(1)
end
+
+ it 'schedules a background migration' do
+ expect_any_instance_of(PersonalFileUploader).to receive(:schedule_background_upload).once
+
+ subject
+ end
end
context 'when update_markdown fails' do
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index b42ce982b27..db2810bbe1d 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -11,32 +11,41 @@ describe FileUploader do
shared_examples 'builds correct legacy storage paths' do
include_examples 'builds correct paths',
store_dir: %r{awesome/project/\h+},
+ upload_path: %r{\h+/<filename>},
absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
- shared_examples 'uses hashed storage' do
- context 'when rolled out attachments' do
- let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
+ context 'legacy storage' do
+ it_behaves_like 'builds correct legacy storage paths'
- before do
- allow(project).to receive(:disk_path).and_return('ca/fe/fe/ed')
- end
+ context 'uses hashed storage' do
+ context 'when rolled out attachments' do
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project') }
- it_behaves_like 'builds correct paths',
- store_dir: %r{ca/fe/fe/ed/\h+},
- absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
- end
+ include_examples 'builds correct paths',
+ store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
+ upload_path: %r{\h+/<filename>}
+ end
- context 'when only repositories are rolled out' do
- let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
+ context 'when only repositories are rolled out' do
+ let(:project) { build_stubbed(:project, namespace: group, name: 'project', storage_version: Project::HASHED_STORAGE_FEATURES[:repository]) }
- it_behaves_like 'builds correct legacy storage paths'
+ it_behaves_like 'builds correct legacy storage paths'
+ end
end
end
- context 'legacy storage' do
- it_behaves_like 'builds correct legacy storage paths'
- include_examples 'uses hashed storage'
+ context 'object store is remote' do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ # always use hashed storage path for remote uploads
+ it_behaves_like 'builds correct paths',
+ store_dir: %r{@hashed/\h{2}/\h{2}/\h+},
+ upload_path: %r{@hashed/\h{2}/\h{2}/\h+/\h+/<filename>}
end
describe 'initialize' do
@@ -78,6 +87,16 @@ describe FileUploader do
end
end
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
+
describe '#upload=' do
let(:secret) { SecureRandom.hex }
let(:upload) { create(:upload, :issuable_upload, secret: secret, filename: 'file.txt') }
@@ -93,15 +112,5 @@ describe FileUploader do
uploader.upload = upload
end
-
- context 'uploader_context is empty' do
- it 'fallbacks to regex based extraction' do
- expect(upload).to receive(:uploader_context).and_return({})
-
- uploader.upload = upload
- expect(uploader.secret).to eq(secret)
- expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt')
- end
- end
end
end
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 5612ec7e661..42036d67f3d 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -1,7 +1,8 @@
require 'spec_helper'
describe JobArtifactUploader do
- let(:job_artifact) { create(:ci_job_artifact) }
+ let(:store) { described_class::Store::LOCAL }
+ let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
subject { uploader }
@@ -11,6 +12,17 @@ describe JobArtifactUploader do
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
+ context "object store is REMOTE" do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
+ end
+
describe '#open' do
subject { uploader.open }
@@ -36,6 +48,17 @@ describe JobArtifactUploader do
end
end
end
+
+ context 'when trace is stored in Object storage' do
+ before do
+ allow(uploader).to receive(:file_storage?) { false }
+ allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
+ end
+
+ it 'returns http io stream' do
+ is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
+ end
+ end
end
context 'file is stored in valid local_path' do
@@ -55,4 +78,14 @@ describe JobArtifactUploader do
it { is_expected.to include("/#{job_artifact.job_id}/#{job_artifact.id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/trace/sample_trace')))
+ stub_artifacts_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/legacy_artifact_uploader_spec.rb b/spec/uploaders/legacy_artifact_uploader_spec.rb
index 54c6a8b869b..eeb6fd90c9d 100644
--- a/spec/uploaders/legacy_artifact_uploader_spec.rb
+++ b/spec/uploaders/legacy_artifact_uploader_spec.rb
@@ -1,7 +1,8 @@
require 'rails_helper'
describe LegacyArtifactUploader do
- let(:job) { create(:ci_build) }
+ let(:store) { described_class::Store::LOCAL }
+ let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { described_class.root }
@@ -20,6 +21,17 @@ describe LegacyArtifactUploader do
cache_dir: %r[artifacts/tmp/cache],
work_dir: %r[artifacts/tmp/work]
+ context 'object store is remote' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\d{4}_\d{1,2}/\d+/\d+\z]
+ end
+
describe '#filename' do
# we need to use uploader, as this makes to use mounter
# which initialises uploader.file object
diff --git a/spec/uploaders/lfs_object_uploader_spec.rb b/spec/uploaders/lfs_object_uploader_spec.rb
index 6ebc885daa8..a2fb3886610 100644
--- a/spec/uploaders/lfs_object_uploader_spec.rb
+++ b/spec/uploaders/lfs_object_uploader_spec.rb
@@ -11,4 +11,62 @@ describe LfsObjectUploader do
store_dir: %r[\h{2}/\h{2}],
cache_dir: %r[/lfs-objects/tmp/cache],
work_dir: %r[/lfs-objects/tmp/work]
+
+ context "object store is REMOTE" do
+ before do
+ stub_lfs_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}]
+ end
+
+ describe 'migration to object storage' do
+ context 'with object storage disabled' do
+ it "is skipped" do
+ expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
+
+ lfs_object
+ end
+ end
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'is scheduled to run after creation' do
+ expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
+
+ lfs_object
+ end
+ end
+ end
+
+ describe 'remote file' do
+ let(:remote) { described_class::Store::REMOTE }
+ let(:lfs_object) { create(:lfs_object, file_store: remote) }
+
+ context 'with object storage enabled' do
+ before do
+ stub_lfs_object_storage
+ end
+
+ it 'can store file remotely' do
+ allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
+
+ store_file(lfs_object)
+
+ expect(lfs_object.file_store).to eq remote
+ expect(lfs_object.file.path).not_to be_blank
+ end
+ end
+ end
+
+ def store_file(lfs_object)
+ lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
+ lfs_object.save!
+ end
end
diff --git a/spec/uploaders/namespace_file_uploader_spec.rb b/spec/uploaders/namespace_file_uploader_spec.rb
index 24a2fc0f72e..a8ba01d70b8 100644
--- a/spec/uploaders/namespace_file_uploader_spec.rb
+++ b/spec/uploaders/namespace_file_uploader_spec.rb
@@ -13,4 +13,26 @@ describe NamespaceFileUploader do
store_dir: %r[uploads/-/system/namespace/\d+],
upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/namespace/\d+/#{IDENTIFIER}]
+
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[namespace/\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
new file mode 100644
index 00000000000..489b6707c6e
--- /dev/null
+++ b/spec/uploaders/object_storage_spec.rb
@@ -0,0 +1,326 @@
+require 'rails_helper'
+require 'carrierwave/storage/fog'
+
+class Implementation < GitlabUploader
+ include ObjectStorage::Concern
+ include ::RecordsUploads::Concern
+ prepend ::ObjectStorage::Extension::RecordsUploads
+
+ storage_options Gitlab.config.uploads
+
+ private
+
+ # user/:id
+ def dynamic_segment
+ File.join(model.class.to_s.underscore, model.id.to_s)
+ end
+end
+
+describe ObjectStorage do
+ let(:uploader_class) { Implementation }
+ let(:object) { build_stubbed(:user) }
+ let(:uploader) { uploader_class.new(object, :file) }
+
+ before do
+ allow(uploader_class).to receive(:object_store_enabled?).and_return(true)
+ end
+
+ describe '#object_store=' do
+ it "reload the local storage" do
+ uploader.object_store = described_class::Store::LOCAL
+ expect(uploader.file_storage?).to be_truthy
+ end
+
+ it "reload the REMOTE storage" do
+ uploader.object_store = described_class::Store::REMOTE
+ expect(uploader.file_storage?).to be_falsey
+ end
+ end
+
+ context 'object_store is Store::LOCAL' do
+ before do
+ uploader.object_store = described_class::Store::LOCAL
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (base_dir, dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("uploads/-/system/user/")
+ end
+ end
+ end
+
+ context 'object_store is Store::REMOTE' do
+ before do
+ uploader.object_store = described_class::Store::REMOTE
+ end
+
+ describe '#store_dir' do
+ it 'is the composition of (dynamic_segment)' do
+ expect(uploader.store_dir).to start_with("user/")
+ end
+ end
+ end
+
+ describe '#object_store' do
+ it "delegates to <mount>_store on model" do
+ expect(object).to receive(:file_store)
+
+ uploader.object_store
+ end
+
+ context 'when store is null' do
+ before do
+ expect(object).to receive(:file_store).and_return(nil)
+ end
+
+ it "returns Store::LOCAL" do
+ expect(uploader.object_store).to eq(described_class::Store::LOCAL)
+ end
+ end
+
+ context 'when value is set' do
+ before do
+ expect(object).to receive(:file_store).and_return(described_class::Store::REMOTE)
+ end
+
+ it "returns the given value" do
+ expect(uploader.object_store).to eq(described_class::Store::REMOTE)
+ end
+ end
+ end
+
+ describe '#file_cache_storage?' do
+ context 'when file storage is used' do
+ before do
+ uploader_class.cache_storage(:file)
+ end
+
+ it { expect(uploader).to be_file_cache_storage }
+ end
+
+ context 'when is remote storage' do
+ before do
+ uploader_class.cache_storage(:fog)
+ end
+
+ it { expect(uploader).not_to be_file_cache_storage }
+ end
+ end
+
+ # this means the model shall include
+ # include RecordsUpload::Concern
+ # prepend ObjectStorage::Extension::RecordsUploads
+ # the object_store persistence is delegated to the `Upload` model.
+ #
+ context 'when persist_object_store? is false' do
+ let(:object) { create(:project, :with_avatar) }
+ let(:uploader) { object.avatar }
+
+ it { expect(object).to be_a(Avatarable) }
+ it { expect(uploader.persist_object_store?).to be_falsey }
+
+ describe 'delegates the object_store logic to the `Upload` model' do
+ it 'sets @upload to the found `upload`' do
+ expect(uploader.upload).to eq(uploader.upload)
+ end
+
+ it 'sets @object_store to the `Upload` value' do
+ expect(uploader.object_store).to eq(uploader.upload.store)
+ end
+ end
+
+ describe '#migrate!' do
+ let(:new_store) { ObjectStorage::Store::REMOTE }
+
+ before do
+ stub_uploads_object_storage(uploader: AvatarUploader)
+ end
+
+ subject { uploader.migrate!(new_store) }
+
+ it 'persist @object_store to the recorded upload' do
+ subject
+
+ expect(uploader.upload.store).to eq(new_store)
+ end
+
+ describe 'fails' do
+ it 'is handled gracefully' do
+ store = uploader.object_store
+ expect_any_instance_of(Upload).to receive(:save!).and_raise("An error")
+
+ expect { subject }.to raise_error("An error")
+ expect(uploader.exists?).to be_truthy
+ expect(uploader.upload.store).to eq(store)
+ end
+ end
+ end
+ end
+
+ # this means the model holds an <mounted_as>_store attribute directly
+ # and do not delegate the object_store persistence to the `Upload` model.
+ #
+ context 'persist_object_store? is true' do
+ context 'when using JobArtifactsUploader' do
+ let(:store) { described_class::Store::LOCAL }
+ let(:object) { create(:ci_job_artifact, :archive, file_store: store) }
+ let(:uploader) { object.file }
+
+ context 'checking described_class' do
+ it "uploader include described_class::Concern" do
+ expect(uploader).to be_a(described_class::Concern)
+ end
+ end
+
+ describe '#use_file' do
+ context 'when file is stored locally' do
+ it "calls a regular path" do
+ expect { |b| uploader.use_file(&b) }.not_to yield_with_args(%r[tmp/cache])
+ end
+ end
+
+ context 'when file is stored remotely' do
+ let(:store) { described_class::Store::REMOTE }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it "calls a cache path" do
+ expect { |b| uploader.use_file(&b) }.to yield_with_args(%r[tmp/cache])
+ end
+ end
+ end
+
+ describe '#migrate!' do
+ subject { uploader.migrate!(new_store) }
+
+ shared_examples "updates the underlying <mounted>_store" do
+ it do
+ subject
+
+ expect(object.file_store).to eq(new_store)
+ end
+ end
+
+ context 'when using the same storage' do
+ let(:new_store) { store }
+
+ it "to not migrate the storage" do
+ subject
+
+ expect(uploader).not_to receive(:store!)
+ expect(uploader.object_store).to eq(store)
+ end
+ end
+
+ context 'when migrating to local storage' do
+ let(:store) { described_class::Store::REMOTE }
+ let(:new_store) { described_class::Store::LOCAL }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "local file does not exist" do
+ expect(File.exist?(uploader.path)).to eq(false)
+ end
+
+ it "remote file exist" do
+ expect(uploader.file.exists?).to be_truthy
+ end
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ expect(File.exist?(uploader.path)).to eq(true)
+ end
+ end
+
+ context 'when migrating to remote storage' do
+ let(:new_store) { described_class::Store::REMOTE }
+ let!(:current_path) { uploader.path }
+
+ it "file does exist" do
+ expect(File.exist?(current_path)).to eq(true)
+ end
+
+ context 'when storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it "to raise an error" do
+ expect { subject }.to raise_error(/Object Storage is not enabled/)
+ end
+ end
+
+ context 'when credentials are set' do
+ before do
+ stub_artifacts_object_storage
+ end
+
+ include_examples "updates the underlying <mounted>_store"
+
+ it "does migrate the file" do
+ subject
+
+ expect(uploader.object_store).to eq(new_store)
+ end
+
+ it "does delete original file" do
+ subject
+
+ expect(File.exist?(current_path)).to eq(false)
+ end
+
+ context 'when subject save fails' do
+ before do
+ expect(uploader).to receive(:persist_object_store!).and_raise(RuntimeError, "exception")
+ end
+
+ it "original file is not removed" do
+ expect { subject }.to raise_error(/exception/)
+
+ expect(File.exist?(current_path)).to eq(true)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '#fog_directory' do
+ let(:remote_directory) { 'directory' }
+
+ before do
+ uploader_class.storage_options double(object_store: double(remote_directory: remote_directory))
+ end
+
+ subject { uploader.fog_directory }
+
+ it { is_expected.to eq(remote_directory) }
+ end
+
+ describe '#fog_credentials' do
+ let(:connection) { Settingslogic.new("provider" => "AWS") }
+
+ before do
+ uploader_class.storage_options double(object_store: double(connection: connection))
+ end
+
+ subject { uploader.fog_credentials }
+
+ it { is_expected.to eq(provider: 'AWS') }
+ end
+
+ describe '#fog_public' do
+ subject { uploader.fog_public }
+
+ it { is_expected.to eq(false) }
+ end
+end
diff --git a/spec/uploaders/personal_file_uploader_spec.rb b/spec/uploaders/personal_file_uploader_spec.rb
index ed1fba6edda..c70521d90dc 100644
--- a/spec/uploaders/personal_file_uploader_spec.rb
+++ b/spec/uploaders/personal_file_uploader_spec.rb
@@ -14,6 +14,18 @@ describe PersonalFileUploader do
upload_path: IDENTIFIER,
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/personal_snippet/\d+/#{IDENTIFIER}]
+ context "object_store is REMOTE" do
+ before do
+ stub_uploads_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[\d+/\h+],
+ upload_path: IDENTIFIER
+ end
+
describe '#to_h' do
before do
subject.instance_variable_set(:@secret, 'secret')
@@ -30,4 +42,14 @@ describe PersonalFileUploader do
)
end
end
+
+ describe "#migrate!" do
+ before do
+ uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
+ stub_uploads_object_storage
+ end
+
+ it_behaves_like "migrates", to_store: described_class::Store::REMOTE
+ it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
+ end
end
diff --git a/spec/workers/object_storage_upload_worker_spec.rb b/spec/workers/object_storage_upload_worker_spec.rb
new file mode 100644
index 00000000000..32ddcbe9757
--- /dev/null
+++ b/spec/workers/object_storage_upload_worker_spec.rb
@@ -0,0 +1,108 @@
+require 'spec_helper'
+
+describe ObjectStorageUploadWorker do
+ let(:local) { ObjectStorage::Store::LOCAL }
+ let(:remote) { ObjectStorage::Store::REMOTE }
+
+ def perform
+ described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
+ end
+
+ context 'for LFS' do
+ let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
+ let(:uploader_class) { LfsObjectUploader }
+ let(:subject_class) { LfsObject }
+ let(:file_field) { :file }
+ let(:subject_id) { lfs_object.id }
+
+ context 'when object storage is enabled' do
+ before do
+ stub_lfs_object_storage(background_upload: true)
+ end
+
+ it 'uploads object to storage' do
+ expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
+ end
+
+ context 'when background upload is disabled' do
+ before do
+ allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
+ end
+
+ it 'is skipped' do
+ expect { perform }.not_to change { lfs_object.reload.file_store }
+ end
+ end
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ stub_lfs_object_storage(enabled: false)
+ end
+
+ it "doesn't migrate files" do
+ perform
+
+ expect(lfs_object.reload.file_store).to eq(local)
+ end
+ end
+ end
+
+ context 'for legacy artifacts' do
+ let(:build) { create(:ci_build, :legacy_artifacts) }
+ let(:uploader_class) { LegacyArtifactUploader }
+ let(:subject_class) { Ci::Build }
+ let(:file_field) { :artifacts_file }
+ let(:subject_id) { build.id }
+
+ context 'when local storage is used' do
+ let(:store) { local }
+
+ context 'and remote storage is defined' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it "migrates file to remote storage" do
+ perform
+
+ expect(build.reload.artifacts_file_store).to eq(remote)
+ end
+
+ context 'for artifacts_metadata' do
+ let(:file_field) { :artifacts_metadata }
+
+ it 'migrates metadata to remote storage' do
+ perform
+
+ expect(build.reload.artifacts_metadata_store).to eq(remote)
+ end
+ end
+ end
+ end
+ end
+
+ context 'for job artifacts' do
+ let(:artifact) { create(:ci_job_artifact, :archive) }
+ let(:uploader_class) { JobArtifactUploader }
+ let(:subject_class) { Ci::JobArtifact }
+ let(:file_field) { :file }
+ let(:subject_id) { artifact.id }
+
+ context 'when local storage is used' do
+ let(:store) { local }
+
+ context 'and remote storage is defined' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
+ end
+
+ it "migrates file to remote storage" do
+ perform
+
+ expect(artifact.reload.file_store).to eq(remote)
+ end
+ end
+ end
+ end
+end